修改处理逻辑,按照文档分别处理。

This commit is contained in:
wanglihui
2020-07-20 19:37:00 +08:00
parent 4df142cdf6
commit ddf98b5563
18 changed files with 171 additions and 258 deletions

View File

@@ -11,6 +11,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
/** /**
* 获取arangoDB历史数据 * 获取arangoDB历史数据
@@ -18,45 +19,31 @@ import java.util.concurrent.ConcurrentHashMap;
public class BaseArangoData { public class BaseArangoData {
private static final Logger LOG = LoggerFactory.getLogger(BaseArangoData.class); private static final Logger LOG = LoggerFactory.getLogger(BaseArangoData.class);
public static ConcurrentHashMap<String, BaseDocument> v_Fqdn_Map = new ConcurrentHashMap<>(); public static ConcurrentHashMap<String, BaseDocument> historyVertexFqdnMap = new ConcurrentHashMap<>();
public static ConcurrentHashMap<String, BaseDocument> v_Ip_Map = new ConcurrentHashMap<>(); public static ConcurrentHashMap<String, BaseDocument> historyVertexIpMap = new ConcurrentHashMap<>();
public static ConcurrentHashMap<String, BaseDocument> v_Subscriber_Map = new ConcurrentHashMap<>(); public static ConcurrentHashMap<String, BaseDocument> historyVertexSubscriberMap = new ConcurrentHashMap<>();
public static ConcurrentHashMap<String, BaseEdgeDocument> e_Fqdn_Address_Ip_Map = new ConcurrentHashMap<>(); public static ConcurrentHashMap<String, BaseEdgeDocument> historyRelationFqdnAddressIpMap = new ConcurrentHashMap<>();
public static ConcurrentHashMap<String, BaseEdgeDocument> e_Ip_Visit_Fqdn_Map = new ConcurrentHashMap<>(); public static ConcurrentHashMap<String, BaseEdgeDocument> historyRelationIpVisitFqdnMap = new ConcurrentHashMap<>();
public static ConcurrentHashMap<String, BaseEdgeDocument> e_Subsciber_Locate_Ip_Map = new ConcurrentHashMap<>(); public static ConcurrentHashMap<String, BaseEdgeDocument> historyRelationSubsciberLocateIpMap = new ConcurrentHashMap<>();
private static ArangoDBConnect arangoDBConnect = ArangoDBConnect.getInstance(); private static ArangoDBConnect arangoDBConnect = ArangoDBConnect.getInstance();
private static ExecutorThreadPool threadPool = ExecutorThreadPool.getInstance(); private ExecutorThreadPool threadPool = ExecutorThreadPool.getInstance();
public void baseDocumentDataMap(){ <T extends BaseDocument> void readHistoryData(String table, ConcurrentHashMap<String, T> map, Class<T> type){
long startA = System.currentTimeMillis();
readHistoryData("FQDN", v_Fqdn_Map,BaseDocument.class);
readHistoryData("IP", v_Ip_Map,BaseDocument.class);
readHistoryData("SUBSCRIBER",v_Subscriber_Map,BaseDocument.class);
readHistoryData("R_LOCATE_FQDN2IP", e_Fqdn_Address_Ip_Map,BaseEdgeDocument.class);
readHistoryData("R_VISIT_IP2FQDN", e_Ip_Visit_Fqdn_Map,BaseEdgeDocument.class);
readHistoryData("R_LOCATE_SUBSCRIBER2IP",e_Subsciber_Locate_Ip_Map,BaseEdgeDocument.class);
threadPool.shutdown();
threadPool.awaitThreadTask();
LOG.info("v_Fqdn_Map大小"+v_Fqdn_Map.size());
LOG.info("v_Ip_Map大小"+v_Ip_Map.size());
LOG.info("v_Subscriber_Map大小"+v_Subscriber_Map.size());
LOG.info("e_Fqdn_Address_Ip_Map大小"+e_Fqdn_Address_Ip_Map.size());
LOG.info("e_Ip_Visit_Fqdn_Map大小"+e_Ip_Visit_Fqdn_Map.size());
LOG.info("e_Subsciber_Locate_Ip_Map大小"+e_Subsciber_Locate_Ip_Map.size());
long lastA = System.currentTimeMillis();
LOG.info("读取ArangoDb时间"+(lastA - startA));
}
private <T extends BaseDocument> void readHistoryData(String table, ConcurrentHashMap<String, T> map, Class<T> type){
try { try {
long start = System.currentTimeMillis();
CountDownLatch countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
long[] timeRange = getTimeRange(table); long[] timeRange = getTimeRange(table);
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) { for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
String sql = getQuerySql(timeRange, i, table); String sql = getQuerySql(timeRange, i, table);
ReadHistoryArangoData<T> readHistoryArangoData = new ReadHistoryArangoData<>(arangoDBConnect, sql, map,type,table); ReadHistoryArangoData<T> readHistoryArangoData = new ReadHistoryArangoData<>(arangoDBConnect, sql, map,type,table,countDownLatch);
threadPool.executor(readHistoryArangoData); threadPool.executor(readHistoryArangoData);
} }
countDownLatch.await();
long last = System.currentTimeMillis();
LOG.info("读取"+table+" arangoDB 共耗时:"+(last-start));
LOG.info(table+" history Map大小为"+map.size());
}catch (Exception e){ }catch (Exception e){
e.printStackTrace(); e.printStackTrace();
} }

View File

@@ -23,18 +23,18 @@ public class BaseClickhouseData {
private static final Logger LOG = LoggerFactory.getLogger(BaseClickhouseData.class); private static final Logger LOG = LoggerFactory.getLogger(BaseClickhouseData.class);
private static ClickhouseConnect manger = ClickhouseConnect.getInstance(); private static ClickhouseConnect manger = ClickhouseConnect.getInstance();
static HashMap<Integer, HashMap<String, ArrayList<BaseDocument>>> vFqdnMap = new HashMap<>(); static HashMap<Integer, HashMap<String, ArrayList<BaseDocument>>> newVertexFqdnMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseDocument>>> vIpMap = new HashMap<>(); static HashMap<Integer, HashMap<String, ArrayList<BaseDocument>>> newVertexIpMap = new HashMap<>();
static HashMap<Integer, HashMap<String,ArrayList<BaseDocument>>> vSubscriberMap = new HashMap<>(); static HashMap<Integer, HashMap<String,ArrayList<BaseDocument>>> newVertexSubscriberMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> eFqdnAddressIpMap = new HashMap<>(); static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> newRelationFqdnAddressIpMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> eIpVisitFqdnMap = new HashMap<>(); static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> newRelationIpVisitFqdnMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> eSubsciberLocateIpMap = new HashMap<>(); static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> newRelationSubsciberLocateIpMap = new HashMap<>();
private DruidPooledConnection connection; private DruidPooledConnection connection;
private Statement statement; private Statement statement;
void baseVertexFqdn() { void baseVertexFqdn() {
initializeMap(vFqdnMap); initializeMap(newVertexFqdnMap);
LOG.info("FQDN resultMap初始化完成"); LOG.info("FQDN resultMap初始化完成");
String sql = getVertexFqdnSql(); String sql = getVertexFqdnSql();
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
@@ -45,7 +45,7 @@ public class BaseClickhouseData {
while (resultSet.next()) { while (resultSet.next()) {
BaseDocument newDoc = getVertexFqdnDocument(resultSet); BaseDocument newDoc = getVertexFqdnDocument(resultSet);
if (newDoc != null) { if (newDoc != null) {
putMapByHashcode(newDoc,vFqdnMap); putMapByHashcode(newDoc, newVertexFqdnMap);
} }
} }
long last = System.currentTimeMillis(); long last = System.currentTimeMillis();
@@ -58,7 +58,7 @@ public class BaseClickhouseData {
} }
void baseVertexIp() { void baseVertexIp() {
initializeMap(vIpMap); initializeMap(newVertexIpMap);
LOG.info("IP resultMap初始化完成"); LOG.info("IP resultMap初始化完成");
String sql = getVertexIpSql(); String sql = getVertexIpSql();
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
@@ -68,7 +68,7 @@ public class BaseClickhouseData {
ResultSet resultSet = statement.executeQuery(sql); ResultSet resultSet = statement.executeQuery(sql);
while (resultSet.next()) { while (resultSet.next()) {
BaseDocument newDoc = getVertexIpDocument(resultSet); BaseDocument newDoc = getVertexIpDocument(resultSet);
putMapByHashcode(newDoc,vIpMap); putMapByHashcode(newDoc, newVertexIpMap);
} }
long last = System.currentTimeMillis(); long last = System.currentTimeMillis();
LOG.info(sql + "\n读取clickhouse v_IP时间" + (last - start)); LOG.info(sql + "\n读取clickhouse v_IP时间" + (last - start));
@@ -80,7 +80,7 @@ public class BaseClickhouseData {
} }
void baseVertexSubscriber(){ void baseVertexSubscriber(){
initializeMap(vSubscriberMap); initializeMap(newVertexSubscriberMap);
LOG.info("SUBSCRIBER resultMap初始化完成"); LOG.info("SUBSCRIBER resultMap初始化完成");
String sql = getVertexSubscriberSql(); String sql = getVertexSubscriberSql();
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
@@ -90,7 +90,7 @@ public class BaseClickhouseData {
ResultSet resultSet = statement.executeQuery(sql); ResultSet resultSet = statement.executeQuery(sql);
while (resultSet.next()){ while (resultSet.next()){
BaseDocument newDoc = getVertexSubscriberDocument(resultSet); BaseDocument newDoc = getVertexSubscriberDocument(resultSet);
putMapByHashcode(newDoc,vSubscriberMap); putMapByHashcode(newDoc, newVertexSubscriberMap);
} }
long last = System.currentTimeMillis(); long last = System.currentTimeMillis();
LOG.info(sql + "\n读取clickhouse v_SUBSCRIBER时间" + (last - start)); LOG.info(sql + "\n读取clickhouse v_SUBSCRIBER时间" + (last - start));
@@ -103,7 +103,7 @@ public class BaseClickhouseData {
} }
void baseRelationshipSubscriberLocateIp(){ void baseRelationshipSubscriberLocateIp(){
initializeMap(eSubsciberLocateIpMap); initializeMap(newRelationSubsciberLocateIpMap);
LOG.info("R_LOCATE_SUBSCRIBER2IP"); LOG.info("R_LOCATE_SUBSCRIBER2IP");
String sql = getRelationshipSubsciberLocateIpSql(); String sql = getRelationshipSubsciberLocateIpSql();
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
@@ -113,7 +113,7 @@ public class BaseClickhouseData {
ResultSet resultSet = statement.executeQuery(sql); ResultSet resultSet = statement.executeQuery(sql);
while (resultSet.next()){ while (resultSet.next()){
BaseEdgeDocument newDoc = getRelationshipSubsciberLocateIpDocument(resultSet); BaseEdgeDocument newDoc = getRelationshipSubsciberLocateIpDocument(resultSet);
putMapByHashcode(newDoc,eSubsciberLocateIpMap); putMapByHashcode(newDoc, newRelationSubsciberLocateIpMap);
} }
long last = System.currentTimeMillis(); long last = System.currentTimeMillis();
LOG.info(sql + "\n读取clickhouse ESubsciberLocateIp时间" + (last - start)); LOG.info(sql + "\n读取clickhouse ESubsciberLocateIp时间" + (last - start));
@@ -126,7 +126,7 @@ public class BaseClickhouseData {
} }
void baseRelationshipFqdnAddressIp() { void baseRelationshipFqdnAddressIp() {
initializeMap(eFqdnAddressIpMap); initializeMap(newRelationFqdnAddressIpMap);
LOG.info("R_LOCATE_FQDN2IP resultMap初始化完成"); LOG.info("R_LOCATE_FQDN2IP resultMap初始化完成");
String sql = getRelationshipFqdnAddressIpSql(); String sql = getRelationshipFqdnAddressIpSql();
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
@@ -137,7 +137,7 @@ public class BaseClickhouseData {
while (resultSet.next()) { while (resultSet.next()) {
BaseEdgeDocument newDoc = getRelationFqdnAddressIpDocument(resultSet); BaseEdgeDocument newDoc = getRelationFqdnAddressIpDocument(resultSet);
putMapByHashcode(newDoc,eFqdnAddressIpMap); putMapByHashcode(newDoc, newRelationFqdnAddressIpMap);
} }
long last = System.currentTimeMillis(); long last = System.currentTimeMillis();
LOG.info(sql + "\n读取clickhouse EFqdnAddressIp时间" + (last - start)); LOG.info(sql + "\n读取clickhouse EFqdnAddressIp时间" + (last - start));
@@ -149,7 +149,7 @@ public class BaseClickhouseData {
} }
void baseRelationshipIpVisitFqdn() { void baseRelationshipIpVisitFqdn() {
initializeMap(eIpVisitFqdnMap); initializeMap(newRelationIpVisitFqdnMap);
LOG.info("R_VISIT_IP2FQDN resultMap初始化完成"); LOG.info("R_VISIT_IP2FQDN resultMap初始化完成");
String sql = getRelationshipIpVisitFqdnSql(); String sql = getRelationshipIpVisitFqdnSql();
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
@@ -159,7 +159,7 @@ public class BaseClickhouseData {
ResultSet resultSet = statement.executeQuery(sql); ResultSet resultSet = statement.executeQuery(sql);
while (resultSet.next()) { while (resultSet.next()) {
BaseEdgeDocument newDoc = getRelationIpVisitFqdnDocument(resultSet); BaseEdgeDocument newDoc = getRelationIpVisitFqdnDocument(resultSet);
putMapByHashcode(newDoc,eIpVisitFqdnMap); putMapByHashcode(newDoc, newRelationIpVisitFqdnMap);
} }
long last = System.currentTimeMillis(); long last = System.currentTimeMillis();
LOG.info(sql + "\n读取clickhouse EIpVisitFqdn时间" + (last - start)); LOG.info(sql + "\n读取clickhouse EIpVisitFqdn时间" + (last - start));

View File

@@ -1,12 +1,13 @@
package cn.ac.iie.dao; package cn.ac.iie.dao;
import cn.ac.iie.config.ApplicationConfig; import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.service.relationship.LocateFqdn2Ip; import cn.ac.iie.service.update.Document;
import cn.ac.iie.service.relationship.LocateSubscriber2Ip; import cn.ac.iie.service.update.relationship.LocateFqdn2Ip;
import cn.ac.iie.service.relationship.VisitIp2Fqdn; import cn.ac.iie.service.update.relationship.LocateSubscriber2Ip;
import cn.ac.iie.service.vertex.Fqdn; import cn.ac.iie.service.update.relationship.VisitIp2Fqdn;
import cn.ac.iie.service.vertex.Ip; import cn.ac.iie.service.update.vertex.Fqdn;
import cn.ac.iie.service.vertex.Subscriber; import cn.ac.iie.service.update.vertex.Ip;
import cn.ac.iie.service.update.vertex.Subscriber;
import cn.ac.iie.utils.ArangoDBConnect; import cn.ac.iie.utils.ArangoDBConnect;
import cn.ac.iie.utils.ExecutorThreadPool; import cn.ac.iie.utils.ExecutorThreadPool;
import com.arangodb.entity.BaseDocument; import com.arangodb.entity.BaseDocument;
@@ -18,8 +19,12 @@ import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import static cn.ac.iie.dao.BaseArangoData.*;
import static cn.ac.iie.dao.BaseClickhouseData.*;
/** /**
* 更新图数据库业务类 * 更新图数据库业务类
* @author wlh
*/ */
public class UpdateGraphData { public class UpdateGraphData {
private static final Logger LOG = LoggerFactory.getLogger(UpdateGraphData.class); private static final Logger LOG = LoggerFactory.getLogger(UpdateGraphData.class);
@@ -30,122 +35,161 @@ public class UpdateGraphData {
private CountDownLatch countDownLatch; private CountDownLatch countDownLatch;
public void updateArango(){ public void updateArango(){
long startC = System.currentTimeMillis(); long start = System.currentTimeMillis();
try { try {
BaseArangoData baseArangoData = new BaseArangoData();
baseArangoData.readHistoryData("FQDN", historyVertexFqdnMap,BaseDocument.class);
updateVertexFqdn(); updateVertexFqdn();
baseArangoData.readHistoryData("IP", historyVertexIpMap,BaseDocument.class);
updateVertexIp(); updateVertexIp();
updateRelationFqdnAddressIp();
updateRelationIpVisitFqdn(); baseArangoData.readHistoryData("SUBSCRIBER", historyVertexSubscriberMap,BaseDocument.class);
updateVertexSubscriber(); updateVertexSubscriber();
baseArangoData.readHistoryData("R_LOCATE_FQDN2IP", historyRelationFqdnAddressIpMap,BaseEdgeDocument.class);
updateRelationFqdnAddressIp();
baseArangoData.readHistoryData("R_VISIT_IP2FQDN", historyRelationIpVisitFqdnMap,BaseEdgeDocument.class);
updateRelationIpVisitFqdn();
baseArangoData.readHistoryData("R_LOCATE_SUBSCRIBER2IP", historyRelationSubsciberLocateIpMap,BaseEdgeDocument.class);
updateRelationshipSubsciberLocateIp(); updateRelationshipSubsciberLocateIp();
long last = System.currentTimeMillis();
LOG.info("更新图数据库时间共计:"+(last - start));
}catch (Exception e){ }catch (Exception e){
e.printStackTrace(); e.printStackTrace();
}finally { }finally {
arangoManger.clean(); arangoManger.clean();
pool.shutdown();
} }
long lastC = System.currentTimeMillis();
LOG.info("更新ArangoDb时间"+(lastC - startC));
} }
private void updateVertexFqdn(){ private void updateVertexFqdn(){
baseClickhouseData.baseVertexFqdn();
try { try {
long start = System.currentTimeMillis();
baseClickhouseData.baseVertexFqdn();
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER); countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) { for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
HashMap<String, ArrayList<BaseDocument>> stringArrayListHashMap = BaseClickhouseData.vFqdnMap.get(i); HashMap<String, ArrayList<BaseDocument>> tmpMap = newVertexFqdnMap.get(i);
LOG.info("vFqdn baseDocumentHashMap大小" + stringArrayListHashMap.size()); Document updateFqdn = new Fqdn(tmpMap, arangoManger, "FQDN", historyVertexFqdnMap,countDownLatch);
Fqdn updateFqdn = new Fqdn(stringArrayListHashMap, arangoManger, "FQDN", BaseArangoData.v_Fqdn_Map,countDownLatch); pool.executor(updateFqdn);
updateFqdn.run();
} }
countDownLatch.await(); countDownLatch.await();
LOG.info("---------FQDN vertex 更新完毕---------"); long last = System.currentTimeMillis();
LOG.info("FQDN vertex 更新完毕,共耗时:"+(last-start));
}catch (Exception e){ }catch (Exception e){
e.printStackTrace(); e.printStackTrace();
}finally {
historyVertexFqdnMap.clear();
newVertexFqdnMap.clear();
} }
} }
private void updateVertexSubscriber(){ private void updateVertexSubscriber(){
baseClickhouseData.baseVertexSubscriber();
try { try {
long start = System.currentTimeMillis();
baseClickhouseData.baseVertexSubscriber();
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER); countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) { for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
HashMap<String, ArrayList<BaseDocument>> stringArrayListHashMap = BaseClickhouseData.vSubscriberMap.get(i); HashMap<String, ArrayList<BaseDocument>> tmpMap = newVertexSubscriberMap.get(i);
LOG.info("vSubscriber baseDocumentHashMap大小" + stringArrayListHashMap.size()); Subscriber updateSubscriber = new Subscriber(tmpMap, arangoManger, "SUBSCRIBER", historyVertexSubscriberMap,countDownLatch);
Subscriber updateSubscriber = new Subscriber(stringArrayListHashMap, arangoManger, "SUBSCRIBER", BaseArangoData.v_Subscriber_Map,countDownLatch); pool.executor(updateSubscriber);
updateSubscriber.run();
} }
countDownLatch.await(); countDownLatch.await();
LOG.info("---------SUBSCRIBER vertex 更新完毕---------"); long last = System.currentTimeMillis();
LOG.info("SUBSCRIBER vertex 更新完毕,共耗时:"+(last-start));
}catch (Exception e){ }catch (Exception e){
e.printStackTrace(); e.printStackTrace();
}finally {
historyVertexSubscriberMap.clear();
newVertexSubscriberMap.clear();
} }
} }
private void updateRelationshipSubsciberLocateIp(){ private void updateRelationshipSubsciberLocateIp(){
baseClickhouseData.baseRelationshipSubscriberLocateIp();
try { try {
long start = System.currentTimeMillis();
baseClickhouseData.baseRelationshipSubscriberLocateIp();
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER); countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) { for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
HashMap<String, ArrayList<BaseEdgeDocument>> baseDocumentHashMap = BaseClickhouseData.eSubsciberLocateIpMap.get(i); HashMap<String, ArrayList<BaseEdgeDocument>> tmpMap = newRelationSubsciberLocateIpMap.get(i);
LOG.info("ESubsciberLocateIp baseDocumentHashMap大小" + baseDocumentHashMap.size()); LocateSubscriber2Ip locateSubscriber2Ip = new LocateSubscriber2Ip(tmpMap, arangoManger, "R_LOCATE_SUBSCRIBER2IP", historyRelationSubsciberLocateIpMap, countDownLatch);
LocateSubscriber2Ip locateSubscriber2Ip = new LocateSubscriber2Ip(baseDocumentHashMap, arangoManger, "R_LOCATE_SUBSCRIBER2IP", BaseArangoData.e_Subsciber_Locate_Ip_Map, countDownLatch); pool.executor(locateSubscriber2Ip);
locateSubscriber2Ip.run();
} }
countDownLatch.await(); countDownLatch.await();
LOG.info("------------R_LOCATE_SUBSCRIBER2IP relationship 更新完毕----------------"); long last = System.currentTimeMillis();
LOG.info("R_LOCATE_SUBSCRIBER2IP relationship 更新完毕,共耗时:"+(last-start));
}catch (Exception e){ }catch (Exception e){
e.printStackTrace(); e.printStackTrace();
}finally {
historyRelationSubsciberLocateIpMap.clear();
newRelationSubsciberLocateIpMap.clear();
} }
} }
private void updateVertexIp(){ private void updateVertexIp(){
baseClickhouseData.baseVertexIp();
try { try {
long start = System.currentTimeMillis();
baseClickhouseData.baseVertexIp();
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER); countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) { for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
HashMap<String, ArrayList<BaseDocument>> stringArrayListHashMap = BaseClickhouseData.vIpMap.get(i); HashMap<String, ArrayList<BaseDocument>> tmpMap = newVertexIpMap.get(i);
LOG.info("vIp baseDocumentHashMap大小" + stringArrayListHashMap.size()); Ip updateIp = new Ip(tmpMap, arangoManger, "IP", historyVertexIpMap, countDownLatch);
Ip updateIp = new Ip(stringArrayListHashMap, arangoManger, "IP", BaseArangoData.v_Ip_Map, countDownLatch); pool.executor(updateIp);
updateIp.run();
} }
countDownLatch.await(); countDownLatch.await();
LOG.info("----------IP vertex 更新完毕-------------"); long last = System.currentTimeMillis();
LOG.info("IP vertex 更新完毕,共耗时:"+(last-start));
}catch (Exception e){ }catch (Exception e){
e.printStackTrace(); e.printStackTrace();
}finally {
historyVertexIpMap.clear();
newVertexIpMap.clear();
} }
} }
private void updateRelationFqdnAddressIp(){ private void updateRelationFqdnAddressIp(){
baseClickhouseData.baseRelationshipFqdnAddressIp();
try { try {
long start = System.currentTimeMillis();
baseClickhouseData.baseRelationshipFqdnAddressIp();
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER); countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) { for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
HashMap<String, ArrayList<BaseEdgeDocument>> baseDocumentHashMap = BaseClickhouseData.eFqdnAddressIpMap.get(i); HashMap<String, ArrayList<BaseEdgeDocument>> tmpMap = newRelationFqdnAddressIpMap.get(i);
LOG.info("EFqdnAddressIp baseDocumentHashMap大小" + baseDocumentHashMap.size()); LocateFqdn2Ip fqdnAddressIp = new LocateFqdn2Ip(tmpMap, arangoManger, "R_LOCATE_FQDN2IP", historyRelationFqdnAddressIpMap, countDownLatch);
LocateFqdn2Ip fqdnAddressIp = new LocateFqdn2Ip(baseDocumentHashMap, arangoManger, "R_LOCATE_FQDN2IP", BaseArangoData.e_Fqdn_Address_Ip_Map, countDownLatch); pool.executor(fqdnAddressIp);
fqdnAddressIp.run();
} }
countDownLatch.await(); countDownLatch.await();
LOG.info("------------R_LOCATE_FQDN2IP relationship 更新完毕----------------"); long last = System.currentTimeMillis();
LOG.info("R_LOCATE_FQDN2IP relationship 更新完毕,共耗时:"+(last-start));
}catch (Exception e){ }catch (Exception e){
e.printStackTrace(); e.printStackTrace();
}finally {
historyRelationFqdnAddressIpMap.clear();
newRelationFqdnAddressIpMap.clear();
} }
} }
private void updateRelationIpVisitFqdn(){ private void updateRelationIpVisitFqdn(){
baseClickhouseData.baseRelationshipIpVisitFqdn();
try { try {
long start = System.currentTimeMillis();
baseClickhouseData.baseRelationshipIpVisitFqdn();
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER); countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) { for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
HashMap<String, ArrayList<BaseEdgeDocument>> baseDocumentHashMap = BaseClickhouseData.eIpVisitFqdnMap.get(i); HashMap<String, ArrayList<BaseEdgeDocument>> tmpMap = newRelationIpVisitFqdnMap.get(i);
LOG.info("EIpVisitFqdn baseDocumentHashMap大小" + baseDocumentHashMap.size()); VisitIp2Fqdn ipVisitFqdn = new VisitIp2Fqdn(tmpMap,arangoManger,"R_VISIT_IP2FQDN", historyRelationIpVisitFqdnMap,countDownLatch);
VisitIp2Fqdn ipVisitFqdn = new VisitIp2Fqdn(baseDocumentHashMap,arangoManger,"R_VISIT_IP2FQDN",BaseArangoData.e_Ip_Visit_Fqdn_Map,countDownLatch); pool.executor(ipVisitFqdn);
ipVisitFqdn.run();
} }
countDownLatch.await(); countDownLatch.await();
LOG.info("---------------R_VISIT_IP2FQDN ralationship 更新完毕----------------"); long last = System.currentTimeMillis();
LOG.info("R_VISIT_IP2FQDN ralationship 更新完毕,共耗时:"+(last-start));
}catch (Exception e){ }catch (Exception e){
e.printStackTrace(); e.printStackTrace();
}finally {
historyRelationIpVisitFqdnMap.clear();
newRelationIpVisitFqdnMap.clear();
} }
} }

View File

@@ -29,7 +29,7 @@ public class UpdateEFqdnAddressIp implements Runnable {
BaseEdgeDocument newEdgeDocument = documentHashMap.getOrDefault(key, null); BaseEdgeDocument newEdgeDocument = documentHashMap.getOrDefault(key, null);
if (newEdgeDocument != null){ if (newEdgeDocument != null){
i += 1; i += 1;
BaseEdgeDocument edgeDocument = BaseArangoData.e_Fqdn_Address_Ip_Map.getOrDefault(key, null); BaseEdgeDocument edgeDocument = BaseArangoData.historyRelationFqdnAddressIpMap.getOrDefault(key, null);
if (edgeDocument != null){ if (edgeDocument != null){
Object lastFoundTime = newEdgeDocument.getAttribute("LAST_FOUND_TIME"); Object lastFoundTime = newEdgeDocument.getAttribute("LAST_FOUND_TIME");
long countTotal = Long.parseLong(newEdgeDocument.getAttribute("COUNT_TOTAL").toString()); long countTotal = Long.parseLong(newEdgeDocument.getAttribute("COUNT_TOTAL").toString());

View File

@@ -29,7 +29,7 @@ public class UpdateEIpVisitFqdn implements Runnable {
BaseEdgeDocument newEdgeDocument = documentHashMap.getOrDefault(key, null); BaseEdgeDocument newEdgeDocument = documentHashMap.getOrDefault(key, null);
if (newEdgeDocument != null){ if (newEdgeDocument != null){
i += 1; i += 1;
BaseEdgeDocument edgeDocument = BaseArangoData.e_Ip_Visit_Fqdn_Map.getOrDefault(key, null); BaseEdgeDocument edgeDocument = BaseArangoData.historyRelationIpVisitFqdnMap.getOrDefault(key, null);
if (edgeDocument != null){ if (edgeDocument != null){
Object lastFoundTime = newEdgeDocument.getAttribute("LAST_FOUND_TIME"); Object lastFoundTime = newEdgeDocument.getAttribute("LAST_FOUND_TIME");
long countTotal = Long.parseLong(newEdgeDocument.getAttribute("COUNT_TOTAL").toString()); long countTotal = Long.parseLong(newEdgeDocument.getAttribute("COUNT_TOTAL").toString());

View File

@@ -33,7 +33,7 @@ public class UpdateVFqdn implements Runnable{
if (newDocument != null){ if (newDocument != null){
i += 1; i += 1;
BaseDocument document = BaseArangoData.v_Fqdn_Map.getOrDefault(key, null); BaseDocument document = BaseArangoData.historyVertexFqdnMap.getOrDefault(key, null);
if (document != null){ if (document != null){
Object lastFoundTime = newDocument.getAttribute("LAST_FOUND_TIME"); Object lastFoundTime = newDocument.getAttribute("LAST_FOUND_TIME");
long fqdnCountTotal = Long.parseLong(newDocument.getAttribute("FQDN_COUNT_TOTAL").toString()); long fqdnCountTotal = Long.parseLong(newDocument.getAttribute("FQDN_COUNT_TOTAL").toString());

View File

@@ -31,7 +31,7 @@ public class UpdateVIP implements Runnable {
BaseDocument newDocument = documentHashMap.getOrDefault(key, null); BaseDocument newDocument = documentHashMap.getOrDefault(key, null);
if (newDocument != null){ if (newDocument != null){
i += 1; i += 1;
BaseDocument document = BaseArangoData.v_Ip_Map.getOrDefault(key, null); BaseDocument document = BaseArangoData.historyVertexIpMap.getOrDefault(key, null);
if (document != null){ if (document != null){
Object lastFoundTime = newDocument.getAttribute("LAST_FOUND_TIME"); Object lastFoundTime = newDocument.getAttribute("LAST_FOUND_TIME");
long ipCountTotal = Long.parseLong(newDocument.getAttribute("IP_COUNT_TOTAL").toString()); long ipCountTotal = Long.parseLong(newDocument.getAttribute("IP_COUNT_TOTAL").toString());

View File

@@ -9,6 +9,7 @@ import org.slf4j.LoggerFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
/** /**
* @author wlh * @author wlh
@@ -22,17 +23,20 @@ public class ReadHistoryArangoData<T extends BaseDocument> extends Thread {
private ConcurrentHashMap<String, T> map; private ConcurrentHashMap<String, T> map;
private Class<T> type; private Class<T> type;
private String table; private String table;
private CountDownLatch countDownLatch;
public ReadHistoryArangoData(ArangoDBConnect arangoConnect, String query, ConcurrentHashMap<String, T> map, Class<T> type, String table) { public ReadHistoryArangoData(ArangoDBConnect arangoConnect, String query, ConcurrentHashMap<String, T> map, Class<T> type, String table,CountDownLatch countDownLatch) {
this.arangoConnect = arangoConnect; this.arangoConnect = arangoConnect;
this.query = query; this.query = query;
this.map = map; this.map = map;
this.type = type; this.type = type;
this.table = table; this.table = table;
this.countDownLatch = countDownLatch;
} }
@Override @Override
public void run() { public void run() {
try {
long s = System.currentTimeMillis(); long s = System.currentTimeMillis();
ArangoCursor<T> docs = arangoConnect.executorQuery(query, type); ArangoCursor<T> docs = arangoConnect.executorQuery(query, type);
if (docs != null) { if (docs != null) {
@@ -53,7 +57,12 @@ public class ReadHistoryArangoData<T extends BaseDocument> extends Thread {
i++; i++;
} }
long l = System.currentTimeMillis(); long l = System.currentTimeMillis();
LOG.info(query + "\n处理数据" + i + "条,运行时间:" + (l - s)); LOG.info(query + "\n读取数据" + i + "条,运行时间:" + (l - s));
}
}catch (Exception e){
e.printStackTrace();
}finally {
countDownLatch.countDown();
} }
} }

View File

@@ -39,6 +39,7 @@ public class Document<T extends BaseDocument> extends Thread{
@Override @Override
public void run() { public void run() {
LOG.info(collectionName+" new Map 大小:"+newDocumentMap.size());
Set<String> keySet = newDocumentMap.keySet(); Set<String> keySet = newDocumentMap.keySet();
ArrayList<T> resultDocumentList = new ArrayList<>(); ArrayList<T> resultDocumentList = new ArrayList<>();
int i = 0; int i = 0;

View File

@@ -1,4 +1,4 @@
package cn.ac.iie.service.relationship; package cn.ac.iie.service.update.relationship;
import cn.ac.iie.service.read.ReadClickhouseData; import cn.ac.iie.service.read.ReadClickhouseData;
import cn.ac.iie.service.update.Relationship; import cn.ac.iie.service.update.Relationship;

View File

@@ -1,4 +1,4 @@
package cn.ac.iie.service.relationship; package cn.ac.iie.service.update.relationship;
import cn.ac.iie.service.update.Relationship; import cn.ac.iie.service.update.Relationship;
import cn.ac.iie.utils.ArangoDBConnect; import cn.ac.iie.utils.ArangoDBConnect;

View File

@@ -1,4 +1,4 @@
package cn.ac.iie.service.relationship; package cn.ac.iie.service.update.relationship;
import cn.ac.iie.service.update.Relationship; import cn.ac.iie.service.update.Relationship;
import cn.ac.iie.utils.ArangoDBConnect; import cn.ac.iie.utils.ArangoDBConnect;

View File

@@ -1,21 +0,0 @@
package cn.ac.iie.service.vertex;
import cn.ac.iie.service.update.Vertex;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class Fqdn extends Vertex {
public Fqdn(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
}
}

View File

@@ -1,79 +0,0 @@
package cn.ac.iie.service.vertex;
import cn.ac.iie.service.update.Vertex;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class Ip extends Vertex {
public Ip(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
}
@Override
protected void updateFunction(BaseDocument newDocument, BaseDocument historyDocument) {
super.updateFunction(newDocument, historyDocument);
updateIpByType(newDocument, historyDocument);
}
@Override
protected void mergeFunction(Map<String, Object> properties, BaseDocument doc) {
super.mergeFunction(properties, doc);
mergeIpByType(properties,doc);
}
private void mergeIpByType(Map<String, Object> properties, BaseDocument doc){
Map<String, Object> mergeProperties = doc.getProperties();
checkIpTypeProperty(properties,mergeProperties,"CLIENT_SESSION_COUNT");
checkIpTypeProperty(properties,mergeProperties,"CLIENT_BYTES_SUM");
checkIpTypeProperty(properties,mergeProperties,"SERVER_SESSION_COUNT");
checkIpTypeProperty(properties,mergeProperties,"SERVER_BYTES_SUM");
}
private void checkIpTypeProperty(Map<String, Object> properties,Map<String, Object> mergeProperties,String property){
try {
if (!properties.containsKey(property)){
properties.put(property,0L);
checkIpTypeProperty(properties,mergeProperties,property);
}else if ("0".equals(properties.get(property).toString()) && mergeProperties.containsKey(property)){
if (!"0".equals(mergeProperties.get(property).toString())){
properties.put(property,Long.parseLong(mergeProperties.get(property).toString()));
}
}
}catch (Exception e){
e.printStackTrace();
}
}
private void updateIpByType(BaseDocument newDocument, BaseDocument historyDocument){
addProperty(newDocument,historyDocument,"CLIENT_SESSION_COUNT");
addProperty(newDocument,historyDocument,"CLIENT_BYTES_SUM");
addProperty(newDocument,historyDocument,"SERVER_SESSION_COUNT");
addProperty(newDocument,historyDocument,"SERVER_BYTES_SUM");
}
private void addProperty(BaseDocument newDocument, BaseDocument historyDocument,String property){
try {
if (historyDocument.getProperties().containsKey(property)){
long newProperty = Long.parseLong(newDocument.getAttribute(property).toString());
long hisProperty = Long.parseLong(historyDocument.getAttribute(property).toString());
historyDocument.updateAttribute(property,newProperty+hisProperty);
}else {
historyDocument.addAttribute(property,0L);
}
}catch (Exception e){
e.printStackTrace();
}
}
}

View File

@@ -1,21 +0,0 @@
package cn.ac.iie.service.vertex;
import cn.ac.iie.service.update.Vertex;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class Subscriber extends Vertex {
public Subscriber(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch);
}
}

View File

@@ -1,25 +1,18 @@
package cn.ac.iie.test; package cn.ac.iie.test;
import cn.ac.iie.dao.BaseArangoData;
import cn.ac.iie.dao.UpdateGraphData; import cn.ac.iie.dao.UpdateGraphData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* iplearning程序入口
* @author wlh
*/
public class IpLearningApplicationTest { public class IpLearningApplicationTest {
private static final Logger LOG = LoggerFactory.getLogger(IpLearningApplicationTest.class);
public static void main(String[] args) { public static void main(String[] args) {
long start = System.currentTimeMillis();
LOG.info("Ip Learning Application开始运行");
BaseArangoData baseArangoData = new BaseArangoData();
baseArangoData.baseDocumentDataMap();
LOG.info("历史数据读取完成,开始更新数据");
UpdateGraphData updateGraphData = new UpdateGraphData(); UpdateGraphData updateGraphData = new UpdateGraphData();
updateGraphData.updateArango(); updateGraphData.updateArango();
long last = System.currentTimeMillis();
LOG.info("共计运行时间:"+(last - start));
} }
} }

View File

@@ -20,10 +20,10 @@ public class ArangoDBConnect {
private static ArangoDB arangoDB = null; private static ArangoDB arangoDB = null;
private static ArangoDBConnect conn = null; private static ArangoDBConnect conn = null;
static { static {
getArangoDB(); getArangoDatabase();
} }
private static void getArangoDB(){ private static void getArangoDatabase(){
arangoDB = new ArangoDB.Builder() arangoDB = new ArangoDB.Builder()
.maxConnections(ApplicationConfig.THREAD_POOL_NUMBER) .maxConnections(ApplicationConfig.THREAD_POOL_NUMBER)
.host(ApplicationConfig.ARANGODB_HOST, ApplicationConfig.ARANGODB_PORT) .host(ApplicationConfig.ARANGODB_HOST, ApplicationConfig.ARANGODB_PORT)
@@ -39,7 +39,7 @@ public class ArangoDBConnect {
return conn; return conn;
} }
public ArangoDatabase getDatabase(){ private ArangoDatabase getDatabase(){
return arangoDB.db(ApplicationConfig.ARANGODB_DB_NAME); return arangoDB.db(ApplicationConfig.ARANGODB_DB_NAME);
} }

View File

@@ -1,9 +1,9 @@
#arangoDB参数配置 #arangoDB参数配置
arangoDB.host=192.168.40.127 arangoDB.host=192.168.40.182
arangoDB.port=8529 arangoDB.port=8529
arangoDB.user=root arangoDB.user=root
arangoDB.password=111111 arangoDB.password=111111
arangoDB.DB.name=ip-learning-test arangoDB.DB.name=ip-learning-test-0
arangoDB.batch=100000 arangoDB.batch=100000
arangoDB.ttl=3600 arangoDB.ttl=3600