添加PROTOCOL_TYPE字段辅助计算,修改计算逻辑,维护24小时内协议出现次数。
This commit is contained in:
@@ -10,8 +10,6 @@ import com.arangodb.entity.BaseEdgeDocument;
|
|||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Enumeration;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -20,25 +18,25 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||||||
public class BaseArangoData {
|
public class BaseArangoData {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(BaseArangoData.class);
|
private static final Logger LOG = LoggerFactory.getLogger(BaseArangoData.class);
|
||||||
|
|
||||||
static ConcurrentHashMap<String, BaseEdgeDocument> v_Fqdn_Map = new ConcurrentHashMap<>();
|
static ConcurrentHashMap<String, BaseDocument> v_Fqdn_Map = new ConcurrentHashMap<>();
|
||||||
static ConcurrentHashMap<String, BaseEdgeDocument> v_Ip_Map = new ConcurrentHashMap<>();
|
static ConcurrentHashMap<String, BaseDocument> v_Ip_Map = new ConcurrentHashMap<>();
|
||||||
static ConcurrentHashMap<String, BaseEdgeDocument> v_Subscriber_Map = new ConcurrentHashMap<>();
|
static ConcurrentHashMap<String, BaseDocument> v_Subscriber_Map = new ConcurrentHashMap<>();
|
||||||
static ConcurrentHashMap<String, BaseEdgeDocument> e_Fqdn_Address_Ip_Map = new ConcurrentHashMap<>();
|
static ConcurrentHashMap<String, BaseEdgeDocument> e_Fqdn_Address_Ip_Map = new ConcurrentHashMap<>();
|
||||||
static ConcurrentHashMap<String, BaseEdgeDocument> e_Ip_Visit_Fqdn_Map = new ConcurrentHashMap<>();
|
static ConcurrentHashMap<String, BaseEdgeDocument> e_Ip_Visit_Fqdn_Map = new ConcurrentHashMap<>();
|
||||||
static ConcurrentHashMap<String, BaseEdgeDocument> e_Subsciber_Locate_Ip_Map = new ConcurrentHashMap<>();
|
static ConcurrentHashMap<String, BaseEdgeDocument> e_Subsciber_Locate_Ip_Map = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
private static final ArangoDBConnect arangoDBConnect = ArangoDBConnect.getInstance();
|
private static ArangoDBConnect arangoDBConnect = ArangoDBConnect.getInstance();
|
||||||
|
|
||||||
private static final ExecutorThreadPool threadPool = ExecutorThreadPool.getInstance();
|
private static ExecutorThreadPool threadPool = ExecutorThreadPool.getInstance();
|
||||||
|
|
||||||
public void baseDocumentDataMap(){
|
public void baseDocumentDataMap(){
|
||||||
long startA = System.currentTimeMillis();
|
long startA = System.currentTimeMillis();
|
||||||
readHistoryData("FQDN", v_Fqdn_Map);
|
readHistoryData("FQDN", v_Fqdn_Map,BaseDocument.class);
|
||||||
readHistoryData("IP", v_Ip_Map);
|
readHistoryData("IP", v_Ip_Map,BaseDocument.class);
|
||||||
readHistoryData("SUBSCRIBER",v_Subscriber_Map);
|
readHistoryData("SUBSCRIBER",v_Subscriber_Map,BaseDocument.class);
|
||||||
readHistoryData("R_LOCATE_FQDN2IP", e_Fqdn_Address_Ip_Map);
|
readHistoryData("R_LOCATE_FQDN2IP", e_Fqdn_Address_Ip_Map,BaseEdgeDocument.class);
|
||||||
// readHistoryData("R_VISIT_IP2FQDN", e_Ip_Visit_Fqdn_Map);
|
// readHistoryData("R_VISIT_IP2FQDN", e_Ip_Visit_Fqdn_Map,BaseEdgeDocument.class);
|
||||||
readHistoryData("R_LOCATE_SUBSCRIBER2IP",e_Subsciber_Locate_Ip_Map);
|
readHistoryData("R_LOCATE_SUBSCRIBER2IP",e_Subsciber_Locate_Ip_Map,BaseEdgeDocument.class);
|
||||||
threadPool.shutdown();
|
threadPool.shutdown();
|
||||||
threadPool.awaitThreadTask();
|
threadPool.awaitThreadTask();
|
||||||
LOG.info("v_Fqdn_Map大小:"+v_Fqdn_Map.size());
|
LOG.info("v_Fqdn_Map大小:"+v_Fqdn_Map.size());
|
||||||
@@ -51,29 +49,12 @@ public class BaseArangoData {
|
|||||||
LOG.info("读取ArangoDb时间:"+(lastA - startA));
|
LOG.info("读取ArangoDb时间:"+(lastA - startA));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) {
|
private <T extends BaseDocument> void readHistoryData(String collectionName, ConcurrentHashMap<String, T> map, Class<T> type){
|
||||||
new BaseArangoData().readHistoryData("IP", v_Ip_Map);
|
|
||||||
threadPool.shutdown();
|
|
||||||
threadPool.awaitThreadTask();
|
|
||||||
ArrayList<BaseEdgeDocument> baseEdgeDocuments = new ArrayList<>();
|
|
||||||
Enumeration<String> keys = v_Ip_Map.keys();
|
|
||||||
while (keys.hasMoreElements()){
|
|
||||||
String key = keys.nextElement();
|
|
||||||
BaseEdgeDocument baseEdgeDocument = v_Ip_Map.get(key);
|
|
||||||
baseEdgeDocument.addAttribute("COMMON_LINK_INFO","");
|
|
||||||
baseEdgeDocuments.add(baseEdgeDocument);
|
|
||||||
}
|
|
||||||
arangoDBConnect.overwrite(baseEdgeDocuments,"IP");
|
|
||||||
arangoDBConnect.clean();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private void readHistoryData(String table, ConcurrentHashMap<String, BaseEdgeDocument> map){
|
|
||||||
try {
|
try {
|
||||||
long[] timeRange = getTimeRange(table);
|
long[] timeRange = getTimeRange(collectionName);
|
||||||
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
||||||
String sql = getQuerySql(timeRange, i, table);
|
String sql = getQuerySql(timeRange, i, collectionName);
|
||||||
ReadHistoryArangoData readHistoryArangoData = new ReadHistoryArangoData(arangoDBConnect, sql, map);
|
ReadHistoryArangoData<T> readHistoryArangoData = new ReadHistoryArangoData<>(arangoDBConnect, sql, map,type,collectionName);
|
||||||
threadPool.executor(readHistoryArangoData);
|
threadPool.executor(readHistoryArangoData);
|
||||||
}
|
}
|
||||||
}catch (Exception e){
|
}catch (Exception e){
|
||||||
|
|||||||
@@ -12,31 +12,31 @@ import java.sql.ResultSet;
|
|||||||
import java.sql.Statement;
|
import java.sql.Statement;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import static cn.ac.iie.service.read.ReadClickhouseData.*;
|
import static cn.ac.iie.service.read.ReadClickhouseData.*;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 读取clickhouse数据,封装到map
|
* 读取clickhouse数据,封装到map
|
||||||
|
* @author wlh
|
||||||
*/
|
*/
|
||||||
public class BaseClickhouseData {
|
public class BaseClickhouseData {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(BaseClickhouseData.class);
|
private static final Logger LOG = LoggerFactory.getLogger(BaseClickhouseData.class);
|
||||||
|
|
||||||
private static final ClickhouseConnect manger = ClickhouseConnect.getInstance();
|
private static ClickhouseConnect manger = ClickhouseConnect.getInstance();
|
||||||
static HashMap<Integer, HashMap<String, ArrayList<BaseDocument>>> vFqdnMap = new HashMap<>();
|
static HashMap<Integer, HashMap<String, ArrayList<BaseDocument>>> vFqdnMap = new HashMap<>();
|
||||||
static HashMap<Integer, HashMap<String, ArrayList<BaseDocument>>> vIpMap = new HashMap<>();
|
static HashMap<Integer, HashMap<String, ArrayList<BaseDocument>>> vIpMap = new HashMap<>();
|
||||||
static HashMap<Integer,HashMap<String,ArrayList<BaseDocument>>> vSubscriberMap = new HashMap<>();
|
static HashMap<Integer, HashMap<String,ArrayList<BaseDocument>>> vSubscriberMap = new HashMap<>();
|
||||||
static HashMap<Integer, HashMap<String, HashMap<String, BaseEdgeDocument>>> eFqdnAddressIpMap = new HashMap<>();
|
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> eFqdnAddressIpMap = new HashMap<>();
|
||||||
static HashMap<Integer, HashMap<String, HashMap<String, BaseEdgeDocument>>> eIpVisitFqdnMap = new HashMap<>();
|
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> eIpVisitFqdnMap = new HashMap<>();
|
||||||
static HashMap<Integer, HashMap<String, HashMap<String, BaseEdgeDocument>>> eSubsciberLocateIpMap = new HashMap<>();
|
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> eSubsciberLocateIpMap = new HashMap<>();
|
||||||
|
|
||||||
private DruidPooledConnection connection;
|
private DruidPooledConnection connection;
|
||||||
private Statement statement;
|
private Statement statement;
|
||||||
|
|
||||||
void BaseVFqdn() {
|
void baseVertexFqdn() {
|
||||||
initializeMap(vFqdnMap);
|
initializeMap(vFqdnMap);
|
||||||
LOG.info("FQDN resultMap初始化完成");
|
LOG.info("FQDN resultMap初始化完成");
|
||||||
String sql = getVFqdnSql();
|
String sql = getVertexFqdnSql();
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
try {
|
try {
|
||||||
connection = manger.getConnection();
|
connection = manger.getConnection();
|
||||||
@@ -45,12 +45,7 @@ public class BaseClickhouseData {
|
|||||||
while (resultSet.next()) {
|
while (resultSet.next()) {
|
||||||
BaseDocument newDoc = getVertexFqdnDocument(resultSet);
|
BaseDocument newDoc = getVertexFqdnDocument(resultSet);
|
||||||
if (newDoc != null) {
|
if (newDoc != null) {
|
||||||
String fqdnName = newDoc.getKey();
|
putMapByHashcode(newDoc,vFqdnMap);
|
||||||
int i = Math.abs(fqdnName.hashCode()) % ApplicationConfig.THREAD_POOL_NUMBER;
|
|
||||||
HashMap<String, ArrayList<BaseDocument>> documentHashMap = vFqdnMap.getOrDefault(i, new HashMap<>());
|
|
||||||
ArrayList<BaseDocument> documentArrayList = documentHashMap.getOrDefault(fqdnName, new ArrayList<>());
|
|
||||||
documentArrayList.add(newDoc);
|
|
||||||
documentHashMap.put(fqdnName,documentArrayList);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
long last = System.currentTimeMillis();
|
long last = System.currentTimeMillis();
|
||||||
@@ -62,10 +57,10 @@ public class BaseClickhouseData {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void BaseVIp() {
|
void baseVertexIp() {
|
||||||
initializeMap(vIpMap);
|
initializeMap(vIpMap);
|
||||||
LOG.info("IP resultMap初始化完成");
|
LOG.info("IP resultMap初始化完成");
|
||||||
String sql = getVIpSql();
|
String sql = getVertexIpSql();
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
try {
|
try {
|
||||||
connection = manger.getConnection();
|
connection = manger.getConnection();
|
||||||
@@ -73,12 +68,7 @@ public class BaseClickhouseData {
|
|||||||
ResultSet resultSet = statement.executeQuery(sql);
|
ResultSet resultSet = statement.executeQuery(sql);
|
||||||
while (resultSet.next()) {
|
while (resultSet.next()) {
|
||||||
BaseDocument newDoc = getVertexIpDocument(resultSet);
|
BaseDocument newDoc = getVertexIpDocument(resultSet);
|
||||||
String ip = newDoc.getKey();
|
putMapByHashcode(newDoc,vIpMap);
|
||||||
int i = Math.abs(ip.hashCode()) % ApplicationConfig.THREAD_POOL_NUMBER;
|
|
||||||
HashMap<String, ArrayList<BaseDocument>> documentHashMap = vIpMap.getOrDefault(i, new HashMap<>());
|
|
||||||
ArrayList<BaseDocument> documentArrayList = documentHashMap.getOrDefault(ip, new ArrayList<>());
|
|
||||||
documentArrayList.add(newDoc);
|
|
||||||
documentHashMap.put(ip,documentArrayList);
|
|
||||||
}
|
}
|
||||||
long last = System.currentTimeMillis();
|
long last = System.currentTimeMillis();
|
||||||
LOG.info(sql + "\n读取clickhouse v_IP时间:" + (last - start));
|
LOG.info(sql + "\n读取clickhouse v_IP时间:" + (last - start));
|
||||||
@@ -89,7 +79,7 @@ public class BaseClickhouseData {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void BaseVertexSubscriber(){
|
void baseVertexSubscriber(){
|
||||||
initializeMap(vSubscriberMap);
|
initializeMap(vSubscriberMap);
|
||||||
LOG.info("SUBSCRIBER resultMap初始化完成");
|
LOG.info("SUBSCRIBER resultMap初始化完成");
|
||||||
String sql = getVertexSubscriberSql();
|
String sql = getVertexSubscriberSql();
|
||||||
@@ -100,12 +90,7 @@ public class BaseClickhouseData {
|
|||||||
ResultSet resultSet = statement.executeQuery(sql);
|
ResultSet resultSet = statement.executeQuery(sql);
|
||||||
while (resultSet.next()){
|
while (resultSet.next()){
|
||||||
BaseDocument newDoc = getVertexSubscriberDocument(resultSet);
|
BaseDocument newDoc = getVertexSubscriberDocument(resultSet);
|
||||||
String key = newDoc.getKey();
|
putMapByHashcode(newDoc,vSubscriberMap);
|
||||||
int i = Math.abs(key.hashCode()) % ApplicationConfig.THREAD_POOL_NUMBER;
|
|
||||||
HashMap<String, ArrayList<BaseDocument>> documentHashMap = vSubscriberMap.getOrDefault(i, new HashMap<>());
|
|
||||||
ArrayList<BaseDocument> documentArrayList = documentHashMap.getOrDefault(key, new ArrayList<>());
|
|
||||||
documentArrayList.add(newDoc);
|
|
||||||
documentHashMap.put(key,documentArrayList);
|
|
||||||
}
|
}
|
||||||
long last = System.currentTimeMillis();
|
long last = System.currentTimeMillis();
|
||||||
LOG.info(sql + "\n读取clickhouse v_SUBSCRIBER时间:" + (last - start));
|
LOG.info(sql + "\n读取clickhouse v_SUBSCRIBER时间:" + (last - start));
|
||||||
@@ -117,7 +102,7 @@ public class BaseClickhouseData {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void BaseRelationshipSubscriberLocateIp(){
|
void baseRelationshipSubscriberLocateIp(){
|
||||||
initializeMap(eSubsciberLocateIpMap);
|
initializeMap(eSubsciberLocateIpMap);
|
||||||
LOG.info("R_LOCATE_SUBSCRIBER2IP");
|
LOG.info("R_LOCATE_SUBSCRIBER2IP");
|
||||||
String sql = getRelationshipSubsciberLocateIpSql();
|
String sql = getRelationshipSubsciberLocateIpSql();
|
||||||
@@ -128,8 +113,7 @@ public class BaseClickhouseData {
|
|||||||
ResultSet resultSet = statement.executeQuery(sql);
|
ResultSet resultSet = statement.executeQuery(sql);
|
||||||
while (resultSet.next()){
|
while (resultSet.next()){
|
||||||
BaseEdgeDocument newDoc = getRelationshipSubsciberLocateIpDocument(resultSet);
|
BaseEdgeDocument newDoc = getRelationshipSubsciberLocateIpDocument(resultSet);
|
||||||
String key = newDoc.getKey();
|
putMapByHashcode(newDoc,eSubsciberLocateIpMap);
|
||||||
putMapByHashcode(newDoc, eSubsciberLocateIpMap,key);
|
|
||||||
}
|
}
|
||||||
long last = System.currentTimeMillis();
|
long last = System.currentTimeMillis();
|
||||||
LOG.info(sql + "\n读取clickhouse ESubsciberLocateIp时间:" + (last - start));
|
LOG.info(sql + "\n读取clickhouse ESubsciberLocateIp时间:" + (last - start));
|
||||||
@@ -141,10 +125,10 @@ public class BaseClickhouseData {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void BaseEFqdnAddressIp() {
|
void baseRelationshipFqdnAddressIp() {
|
||||||
initializeMap(eFqdnAddressIpMap);
|
initializeMap(eFqdnAddressIpMap);
|
||||||
LOG.info("R_LOCATE_FQDN2IP resultMap初始化完成");
|
LOG.info("R_LOCATE_FQDN2IP resultMap初始化完成");
|
||||||
String sql = getEFqdnAddressIpSql();
|
String sql = getRelationshipFqdnAddressIpSql();
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
try {
|
try {
|
||||||
connection = manger.getConnection();
|
connection = manger.getConnection();
|
||||||
@@ -153,8 +137,7 @@ public class BaseClickhouseData {
|
|||||||
|
|
||||||
while (resultSet.next()) {
|
while (resultSet.next()) {
|
||||||
BaseEdgeDocument newDoc = getRelationFqdnAddressIpDocument(resultSet);
|
BaseEdgeDocument newDoc = getRelationFqdnAddressIpDocument(resultSet);
|
||||||
String commonSchemaType = resultSet.getString("common_schema_type");
|
putMapByHashcode(newDoc,eFqdnAddressIpMap);
|
||||||
putMapByHashcode(newDoc, eFqdnAddressIpMap,commonSchemaType);
|
|
||||||
}
|
}
|
||||||
long last = System.currentTimeMillis();
|
long last = System.currentTimeMillis();
|
||||||
LOG.info(sql + "\n读取clickhouse EFqdnAddressIp时间:" + (last - start));
|
LOG.info(sql + "\n读取clickhouse EFqdnAddressIp时间:" + (last - start));
|
||||||
@@ -165,10 +148,10 @@ public class BaseClickhouseData {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void BaseEIpVisitFqdn() {
|
void baseRelationshipIpVisitFqdn() {
|
||||||
initializeMap(eIpVisitFqdnMap);
|
initializeMap(eIpVisitFqdnMap);
|
||||||
LOG.info("R_VISIT_IP2FQDN resultMap初始化完成");
|
LOG.info("R_VISIT_IP2FQDN resultMap初始化完成");
|
||||||
String sql = getEIpVisitFqdnSql();
|
String sql = getRelationshipIpVisitFqdnSql();
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
try {
|
try {
|
||||||
connection = manger.getConnection();
|
connection = manger.getConnection();
|
||||||
@@ -176,8 +159,7 @@ public class BaseClickhouseData {
|
|||||||
ResultSet resultSet = statement.executeQuery(sql);
|
ResultSet resultSet = statement.executeQuery(sql);
|
||||||
while (resultSet.next()) {
|
while (resultSet.next()) {
|
||||||
BaseEdgeDocument newDoc = getRelationIpVisitFqdnDocument(resultSet);
|
BaseEdgeDocument newDoc = getRelationIpVisitFqdnDocument(resultSet);
|
||||||
String commonSchemaType = resultSet.getString("common_schema_type");
|
putMapByHashcode(newDoc,eIpVisitFqdnMap);
|
||||||
putMapByHashcode(newDoc, eIpVisitFqdnMap,commonSchemaType);
|
|
||||||
}
|
}
|
||||||
long last = System.currentTimeMillis();
|
long last = System.currentTimeMillis();
|
||||||
LOG.info(sql + "\n读取clickhouse EIpVisitFqdn时间:" + (last - start));
|
LOG.info(sql + "\n读取clickhouse EIpVisitFqdn时间:" + (last - start));
|
||||||
@@ -188,7 +170,7 @@ public class BaseClickhouseData {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void initializeMap(Map map){
|
private <T extends BaseDocument> void initializeMap(HashMap<Integer, HashMap<String,ArrayList<T>>> map){
|
||||||
try {
|
try {
|
||||||
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
||||||
map.put(i, new HashMap<>());
|
map.put(i, new HashMap<>());
|
||||||
|
|||||||
@@ -1,12 +1,8 @@
|
|||||||
package cn.ac.iie.dao;
|
package cn.ac.iie.dao;
|
||||||
|
|
||||||
import cn.ac.iie.config.ApplicationConfig;
|
import cn.ac.iie.config.ApplicationConfig;
|
||||||
import cn.ac.iie.service.relationship.LocateFqdn2Ip;
|
import cn.ac.iie.service.update.vertex.*;
|
||||||
import cn.ac.iie.service.relationship.LocateSubscriber2Ip;
|
import cn.ac.iie.service.update.relationship.*;
|
||||||
import cn.ac.iie.service.relationship.VisitIp2Fqdn;
|
|
||||||
import cn.ac.iie.service.vertex.Fqdn;
|
|
||||||
import cn.ac.iie.service.vertex.Ip;
|
|
||||||
import cn.ac.iie.service.vertex.Subscriber;
|
|
||||||
import cn.ac.iie.utils.ArangoDBConnect;
|
import cn.ac.iie.utils.ArangoDBConnect;
|
||||||
import cn.ac.iie.utils.ExecutorThreadPool;
|
import cn.ac.iie.utils.ExecutorThreadPool;
|
||||||
import com.arangodb.entity.BaseDocument;
|
import com.arangodb.entity.BaseDocument;
|
||||||
@@ -26,28 +22,17 @@ public class UpdateGraphData {
|
|||||||
private static ExecutorThreadPool pool = ExecutorThreadPool.getInstance();
|
private static ExecutorThreadPool pool = ExecutorThreadPool.getInstance();
|
||||||
private static ArangoDBConnect arangoManger = ArangoDBConnect.getInstance();
|
private static ArangoDBConnect arangoManger = ArangoDBConnect.getInstance();
|
||||||
|
|
||||||
|
private static BaseClickhouseData baseClickhouseData = new BaseClickhouseData();
|
||||||
private CountDownLatch countDownLatch;
|
private CountDownLatch countDownLatch;
|
||||||
|
|
||||||
public void updateArango(){
|
public void updateArango(){
|
||||||
long startC = System.currentTimeMillis();
|
long startC = System.currentTimeMillis();
|
||||||
try {
|
try {
|
||||||
BaseClickhouseData baseClickhouseData = new BaseClickhouseData();
|
|
||||||
baseClickhouseData.BaseVFqdn();
|
|
||||||
updateVertexFqdn();
|
updateVertexFqdn();
|
||||||
|
|
||||||
baseClickhouseData.BaseVIp();
|
|
||||||
updateVertexIp();
|
updateVertexIp();
|
||||||
|
|
||||||
baseClickhouseData.BaseEFqdnAddressIp();
|
|
||||||
updateRelationFqdnAddressIp();
|
updateRelationFqdnAddressIp();
|
||||||
|
|
||||||
// baseClickhouseData.BaseEIpVisitFqdn();
|
|
||||||
// updateRelationIpVisitFqdn();
|
// updateRelationIpVisitFqdn();
|
||||||
|
|
||||||
baseClickhouseData.BaseVertexSubscriber();
|
|
||||||
updateVertexSubscriber();
|
updateVertexSubscriber();
|
||||||
|
|
||||||
baseClickhouseData.BaseRelationshipSubscriberLocateIp();
|
|
||||||
updateRelationshipSubsciberLocateIp();
|
updateRelationshipSubsciberLocateIp();
|
||||||
}catch (Exception e){
|
}catch (Exception e){
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
@@ -59,13 +44,14 @@ public class UpdateGraphData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void updateVertexFqdn(){
|
private void updateVertexFqdn(){
|
||||||
|
baseClickhouseData.baseVertexFqdn();
|
||||||
try {
|
try {
|
||||||
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
|
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
|
||||||
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
||||||
HashMap<String, ArrayList<BaseDocument>> stringArrayListHashMap = BaseClickhouseData.vFqdnMap.get(i);
|
HashMap<String, ArrayList<BaseDocument>> stringArrayListHashMap = BaseClickhouseData.vFqdnMap.get(i);
|
||||||
LOG.info("vFqdn baseDocumentHashMap大小:" + stringArrayListHashMap.size());
|
LOG.info("vFqdn baseDocumentHashMap大小:" + stringArrayListHashMap.size());
|
||||||
Fqdn updateVFqdn = new Fqdn(stringArrayListHashMap, arangoManger, "FQDN", BaseArangoData.v_Fqdn_Map,countDownLatch);
|
Fqdn updateFqdn = new Fqdn(stringArrayListHashMap, arangoManger, "FQDN", BaseArangoData.v_Fqdn_Map,countDownLatch);
|
||||||
updateVFqdn.run();
|
updateFqdn.run();
|
||||||
}
|
}
|
||||||
countDownLatch.await();
|
countDownLatch.await();
|
||||||
LOG.info("---------FQDN vertex 更新完毕---------");
|
LOG.info("---------FQDN vertex 更新完毕---------");
|
||||||
@@ -75,13 +61,14 @@ public class UpdateGraphData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void updateVertexSubscriber(){
|
private void updateVertexSubscriber(){
|
||||||
|
baseClickhouseData.baseVertexSubscriber();
|
||||||
try {
|
try {
|
||||||
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
|
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
|
||||||
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
||||||
HashMap<String, ArrayList<BaseDocument>> stringArrayListHashMap = BaseClickhouseData.vSubscriberMap.get(i);
|
HashMap<String, ArrayList<BaseDocument>> stringArrayListHashMap = BaseClickhouseData.vSubscriberMap.get(i);
|
||||||
LOG.info("vSubscriber baseDocumentHashMap大小:" + stringArrayListHashMap.size());
|
LOG.info("vSubscriber baseDocumentHashMap大小:" + stringArrayListHashMap.size());
|
||||||
Subscriber updateVSubscriber = new Subscriber(stringArrayListHashMap, arangoManger, "SUBSCRIBER", BaseArangoData.v_Subscriber_Map,countDownLatch);
|
Subscriber updateSubscriber = new Subscriber(stringArrayListHashMap, arangoManger, "SUBSCRIBER", BaseArangoData.v_Subscriber_Map,countDownLatch);
|
||||||
updateVSubscriber.run();
|
updateSubscriber.run();
|
||||||
}
|
}
|
||||||
countDownLatch.await();
|
countDownLatch.await();
|
||||||
LOG.info("---------SUBSCRIBER vertex 更新完毕---------");
|
LOG.info("---------SUBSCRIBER vertex 更新完毕---------");
|
||||||
@@ -91,13 +78,14 @@ public class UpdateGraphData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void updateRelationshipSubsciberLocateIp(){
|
private void updateRelationshipSubsciberLocateIp(){
|
||||||
|
baseClickhouseData.baseRelationshipSubscriberLocateIp();
|
||||||
try {
|
try {
|
||||||
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
|
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
|
||||||
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
||||||
HashMap<String, HashMap<String, BaseEdgeDocument>> baseDocumentHashMap = BaseClickhouseData.eSubsciberLocateIpMap.get(i);
|
HashMap<String, ArrayList<BaseEdgeDocument>> baseDocumentHashMap = BaseClickhouseData.eSubsciberLocateIpMap.get(i);
|
||||||
LOG.info("ESubsciberLocateIp baseDocumentHashMap大小:" + baseDocumentHashMap.size());
|
LOG.info("ESubsciberLocateIp baseDocumentHashMap大小:" + baseDocumentHashMap.size());
|
||||||
LocateSubscriber2Ip rLocateSubscriber2IP = new LocateSubscriber2Ip(baseDocumentHashMap, arangoManger, "R_LOCATE_SUBSCRIBER2IP", BaseArangoData.e_Subsciber_Locate_Ip_Map, countDownLatch);
|
LocateSubscriber2Ip locateSubscriber2Ip = new LocateSubscriber2Ip(baseDocumentHashMap, arangoManger, "R_LOCATE_SUBSCRIBER2IP", BaseArangoData.e_Subsciber_Locate_Ip_Map, countDownLatch);
|
||||||
rLocateSubscriber2IP.run();
|
locateSubscriber2Ip.run();
|
||||||
}
|
}
|
||||||
countDownLatch.await();
|
countDownLatch.await();
|
||||||
LOG.info("------------R_LOCATE_SUBSCRIBER2IP relationship 更新完毕----------------");
|
LOG.info("------------R_LOCATE_SUBSCRIBER2IP relationship 更新完毕----------------");
|
||||||
@@ -107,13 +95,14 @@ public class UpdateGraphData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void updateVertexIp(){
|
private void updateVertexIp(){
|
||||||
|
baseClickhouseData.baseVertexIp();
|
||||||
try {
|
try {
|
||||||
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
|
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
|
||||||
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
||||||
HashMap<String, ArrayList<BaseDocument>> stringArrayListHashMap = BaseClickhouseData.vIpMap.get(i);
|
HashMap<String, ArrayList<BaseDocument>> stringArrayListHashMap = BaseClickhouseData.vIpMap.get(i);
|
||||||
LOG.info("vIp baseDocumentHashMap大小:" + stringArrayListHashMap.size());
|
LOG.info("vIp baseDocumentHashMap大小:" + stringArrayListHashMap.size());
|
||||||
Ip updateVIp = new Ip(stringArrayListHashMap, arangoManger, "IP", BaseArangoData.v_Ip_Map, countDownLatch);
|
Ip updateIp = new Ip(stringArrayListHashMap, arangoManger, "IP", BaseArangoData.v_Ip_Map, countDownLatch);
|
||||||
updateVIp.run();
|
updateIp.run();
|
||||||
}
|
}
|
||||||
countDownLatch.await();
|
countDownLatch.await();
|
||||||
LOG.info("----------IP vertex 更新完毕-------------");
|
LOG.info("----------IP vertex 更新完毕-------------");
|
||||||
@@ -123,13 +112,14 @@ public class UpdateGraphData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void updateRelationFqdnAddressIp(){
|
private void updateRelationFqdnAddressIp(){
|
||||||
|
baseClickhouseData.baseRelationshipFqdnAddressIp();
|
||||||
try {
|
try {
|
||||||
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
|
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
|
||||||
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
||||||
HashMap<String, HashMap<String, BaseEdgeDocument>> baseDocumentHashMap = BaseClickhouseData.eFqdnAddressIpMap.get(i);
|
HashMap<String, ArrayList<BaseEdgeDocument>> baseDocumentHashMap = BaseClickhouseData.eFqdnAddressIpMap.get(i);
|
||||||
LOG.info("EFqdnAddressIp baseDocumentHashMap大小:" + baseDocumentHashMap.size());
|
LOG.info("EFqdnAddressIp baseDocumentHashMap大小:" + baseDocumentHashMap.size());
|
||||||
LocateFqdn2Ip updateEFqdnAddressIp = new LocateFqdn2Ip(baseDocumentHashMap, arangoManger, "R_LOCATE_FQDN2IP", BaseArangoData.e_Fqdn_Address_Ip_Map, countDownLatch);
|
LocateFqdn2Ip fqdnAddressIp = new LocateFqdn2Ip(baseDocumentHashMap, arangoManger, "R_LOCATE_FQDN2IP", BaseArangoData.e_Fqdn_Address_Ip_Map, countDownLatch);
|
||||||
updateEFqdnAddressIp.run();
|
fqdnAddressIp.run();
|
||||||
}
|
}
|
||||||
countDownLatch.await();
|
countDownLatch.await();
|
||||||
LOG.info("------------R_LOCATE_FQDN2IP relationship 更新完毕----------------");
|
LOG.info("------------R_LOCATE_FQDN2IP relationship 更新完毕----------------");
|
||||||
@@ -139,13 +129,14 @@ public class UpdateGraphData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void updateRelationIpVisitFqdn(){
|
private void updateRelationIpVisitFqdn(){
|
||||||
|
baseClickhouseData.baseRelationshipIpVisitFqdn();
|
||||||
try {
|
try {
|
||||||
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
|
countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
|
||||||
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
|
||||||
HashMap<String, HashMap<String, BaseEdgeDocument>> baseDocumentHashMap = BaseClickhouseData.eIpVisitFqdnMap.get(i);
|
HashMap<String, ArrayList<BaseEdgeDocument>> baseDocumentHashMap = BaseClickhouseData.eIpVisitFqdnMap.get(i);
|
||||||
LOG.info("EIpVisitFqdn baseDocumentHashMap大小:" + baseDocumentHashMap.size());
|
LOG.info("EIpVisitFqdn baseDocumentHashMap大小:" + baseDocumentHashMap.size());
|
||||||
VisitIp2Fqdn updateEIpVisitFqdn = new VisitIp2Fqdn(baseDocumentHashMap,arangoManger,"R_VISIT_IP2FQDN",BaseArangoData.e_Ip_Visit_Fqdn_Map,countDownLatch);
|
VisitIp2Fqdn ipVisitFqdn = new VisitIp2Fqdn(baseDocumentHashMap,arangoManger,"R_VISIT_IP2FQDN", BaseArangoData.e_Ip_Visit_Fqdn_Map,countDownLatch);
|
||||||
updateEIpVisitFqdn.run();
|
ipVisitFqdn.run();
|
||||||
}
|
}
|
||||||
countDownLatch.await();
|
countDownLatch.await();
|
||||||
LOG.info("---------------R_VISIT_IP2FQDN ralationship 更新完毕----------------");
|
LOG.info("---------------R_VISIT_IP2FQDN ralationship 更新完毕----------------");
|
||||||
|
|||||||
@@ -8,9 +8,14 @@ import org.slf4j.LoggerFactory;
|
|||||||
|
|
||||||
import java.sql.ResultSet;
|
import java.sql.ResultSet;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author wlh
|
||||||
|
*/
|
||||||
public class ReadClickhouseData {
|
public class ReadClickhouseData {
|
||||||
|
|
||||||
public static long currentHour = System.currentTimeMillis() / (60 * 60 * 1000) * 60 * 60;
|
public static long currentHour = System.currentTimeMillis() / (60 * 60 * 1000) * 60 * 60;
|
||||||
@@ -18,6 +23,16 @@ public class ReadClickhouseData {
|
|||||||
private static Pattern pattern = Pattern.compile("^[\\d]*$");
|
private static Pattern pattern = Pattern.compile("^[\\d]*$");
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(ReadClickhouseData.class);
|
private static final Logger LOG = LoggerFactory.getLogger(ReadClickhouseData.class);
|
||||||
|
|
||||||
|
|
||||||
|
public static HashSet<String> protocolSet;
|
||||||
|
|
||||||
|
static {
|
||||||
|
protocolSet = new HashSet<>();
|
||||||
|
protocolSet.add("HTTP");
|
||||||
|
protocolSet.add("TLS");
|
||||||
|
protocolSet.add("DNS");
|
||||||
|
}
|
||||||
|
|
||||||
public static BaseDocument getVertexFqdnDocument(ResultSet resultSet) throws SQLException {
|
public static BaseDocument getVertexFqdnDocument(ResultSet resultSet) throws SQLException {
|
||||||
String fqdnName = resultSet.getString("FQDN");
|
String fqdnName = resultSet.getString("FQDN");
|
||||||
BaseDocument newDoc = null;
|
BaseDocument newDoc = null;
|
||||||
@@ -49,17 +64,18 @@ public class ReadClickhouseData {
|
|||||||
case "client":
|
case "client":
|
||||||
newDoc.addAttribute("CLIENT_SESSION_COUNT", sessionCount);
|
newDoc.addAttribute("CLIENT_SESSION_COUNT", sessionCount);
|
||||||
newDoc.addAttribute("CLIENT_BYTES_SUM", bytesSum);
|
newDoc.addAttribute("CLIENT_BYTES_SUM", bytesSum);
|
||||||
newDoc.addAttribute("SERVER_SESSION_COUNT",0L);
|
newDoc.addAttribute("SERVER_SESSION_COUNT", 0L);
|
||||||
newDoc.addAttribute("SERVER_BYTES_SUM",0L);
|
newDoc.addAttribute("SERVER_BYTES_SUM", 0L);
|
||||||
break;
|
break;
|
||||||
case "server":
|
case "server":
|
||||||
newDoc.addAttribute("SERVER_SESSION_COUNT", sessionCount);
|
newDoc.addAttribute("SERVER_SESSION_COUNT", sessionCount);
|
||||||
newDoc.addAttribute("SERVER_BYTES_SUM", bytesSum);
|
newDoc.addAttribute("SERVER_BYTES_SUM", bytesSum);
|
||||||
newDoc.addAttribute("CLIENT_SESSION_COUNT",0L);
|
newDoc.addAttribute("CLIENT_SESSION_COUNT", 0L);
|
||||||
newDoc.addAttribute("CLIENT_BYTES_SUM",0L);
|
newDoc.addAttribute("CLIENT_BYTES_SUM", 0L);
|
||||||
break;
|
break;
|
||||||
|
default:
|
||||||
}
|
}
|
||||||
newDoc.addAttribute("COMMON_LINK_INFO","");
|
newDoc.addAttribute("COMMON_LINK_INFO", "");
|
||||||
return newDoc;
|
return newDoc;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -102,9 +118,10 @@ public class ReadClickhouseData {
|
|||||||
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
|
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
|
||||||
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
|
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
|
||||||
long countTotal = resultSet.getLong("COUNT_TOTAL");
|
long countTotal = resultSet.getLong("COUNT_TOTAL");
|
||||||
|
String schemaType = resultSet.getString("schema_type");
|
||||||
String[] distCipRecents = (String[]) resultSet.getArray("DIST_CIP_RECENT").getArray();
|
String[] distCipRecents = (String[]) resultSet.getArray("DIST_CIP_RECENT").getArray();
|
||||||
long[] clientIpTs = new long[distCipRecents.length];
|
long[] clientIpTs = new long[distCipRecents.length];
|
||||||
for (int i = 0;i < clientIpTs.length;i++){
|
for (int i = 0; i < clientIpTs.length; i++) {
|
||||||
clientIpTs[i] = currentHour;
|
clientIpTs[i] = currentHour;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -115,10 +132,10 @@ public class ReadClickhouseData {
|
|||||||
newDoc.setTo("IP/" + vIp);
|
newDoc.setTo("IP/" + vIp);
|
||||||
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
|
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
|
||||||
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
|
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
|
||||||
newDoc.addAttribute("COUNT_TOTAL", countTotal);
|
|
||||||
newDoc.addAttribute("DIST_CIP", distCipRecents);
|
newDoc.addAttribute("DIST_CIP", distCipRecents);
|
||||||
newDoc.addAttribute("DIST_CIP_TS",clientIpTs);
|
newDoc.addAttribute("DIST_CIP_TS", clientIpTs);
|
||||||
|
newDoc.addAttribute("PROTOCOL_TYPE", schemaType);
|
||||||
|
checkSchemaProperty(newDoc, schemaType, countTotal);
|
||||||
}
|
}
|
||||||
return newDoc;
|
return newDoc;
|
||||||
}
|
}
|
||||||
@@ -132,6 +149,7 @@ public class ReadClickhouseData {
|
|||||||
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
|
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
|
||||||
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
|
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
|
||||||
long countTotal = resultSet.getLong("COUNT_TOTAL");
|
long countTotal = resultSet.getLong("COUNT_TOTAL");
|
||||||
|
String schemaType = resultSet.getString("schema_type");
|
||||||
|
|
||||||
newDoc = new BaseEdgeDocument();
|
newDoc = new BaseEdgeDocument();
|
||||||
newDoc.setKey(key);
|
newDoc.setKey(key);
|
||||||
@@ -139,20 +157,20 @@ public class ReadClickhouseData {
|
|||||||
newDoc.setTo("FQDN/" + vFqdn);
|
newDoc.setTo("FQDN/" + vFqdn);
|
||||||
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
|
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
|
||||||
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
|
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
|
||||||
newDoc.addAttribute("COUNT_TOTAL", countTotal);
|
newDoc.addAttribute("PROTOCOL_TYPE", schemaType);
|
||||||
|
checkSchemaProperty(newDoc, schemaType, countTotal);
|
||||||
}
|
}
|
||||||
return newDoc;
|
return newDoc;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void putMapByHashcode(BaseEdgeDocument newDoc, HashMap<Integer, HashMap<String, HashMap<String, BaseEdgeDocument>>> map, String schema) throws SQLException {
|
public static <T extends BaseDocument> void putMapByHashcode(T newDoc, HashMap<Integer, HashMap<String, ArrayList<T>>> map) {
|
||||||
if (newDoc != null) {
|
if (newDoc != null) {
|
||||||
String key = newDoc.getKey();
|
String key = newDoc.getKey();
|
||||||
int i = Math.abs(key.hashCode()) % ApplicationConfig.THREAD_POOL_NUMBER;
|
int i = Math.abs(key.hashCode()) % ApplicationConfig.THREAD_POOL_NUMBER;
|
||||||
HashMap<String, HashMap<String, BaseEdgeDocument>> documentHashMap = map.getOrDefault(i, new HashMap());
|
HashMap<String, ArrayList<T>> documentHashMap = map.getOrDefault(i, new HashMap<>());
|
||||||
|
ArrayList<T> documentArrayList = documentHashMap.getOrDefault(key, new ArrayList<>());
|
||||||
HashMap<String, BaseEdgeDocument> schemaHashMap = documentHashMap.getOrDefault(key, new HashMap<>());
|
documentArrayList.add(newDoc);
|
||||||
schemaHashMap.put(schema, newDoc);
|
documentHashMap.put(key, documentArrayList);
|
||||||
documentHashMap.put(key, schemaHashMap);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -179,7 +197,23 @@ public class ReadClickhouseData {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getVFqdnSql() {
|
private static void checkSchemaProperty(BaseEdgeDocument newDoc, String schema, long countTotal) {
|
||||||
|
long[] recentCnt = new long[24];
|
||||||
|
recentCnt[0] = countTotal;
|
||||||
|
for (String protocol:protocolSet){
|
||||||
|
String protocolRecent = protocol +"_CNT_RECENT";
|
||||||
|
String protocolTotal = protocol + "_CNT_TOTAL";
|
||||||
|
if (protocol.equals(schema)){
|
||||||
|
newDoc.addAttribute(protocolTotal, countTotal);
|
||||||
|
newDoc.addAttribute(protocolRecent, recentCnt);
|
||||||
|
}else {
|
||||||
|
newDoc.addAttribute(protocolTotal, 0L);
|
||||||
|
newDoc.addAttribute(protocolRecent, new long[24]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getVertexFqdnSql() {
|
||||||
long[] timeLimit = getTimeLimit();
|
long[] timeLimit = getTimeLimit();
|
||||||
long maxTime = timeLimit[0];
|
long maxTime = timeLimit[0];
|
||||||
long minTime = timeLimit[1];
|
long minTime = timeLimit[1];
|
||||||
@@ -189,7 +223,7 @@ public class ReadClickhouseData {
|
|||||||
return "SELECT FQDN,MAX( LAST_FOUND_TIME ) AS LAST_FOUND_TIME,MIN( FIRST_FOUND_TIME ) AS FIRST_FOUND_TIME FROM ((" + sslSql + ") UNION ALL (" + httpSql + ")) GROUP BY FQDN HAVING FQDN != ''";
|
return "SELECT FQDN,MAX( LAST_FOUND_TIME ) AS LAST_FOUND_TIME,MIN( FIRST_FOUND_TIME ) AS FIRST_FOUND_TIME FROM ((" + sslSql + ") UNION ALL (" + httpSql + ")) GROUP BY FQDN HAVING FQDN != ''";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getVIpSql() {
|
public static String getVertexIpSql() {
|
||||||
long[] timeLimit = getTimeLimit();
|
long[] timeLimit = getTimeLimit();
|
||||||
long maxTime = timeLimit[0];
|
long maxTime = timeLimit[0];
|
||||||
long minTime = timeLimit[1];
|
long minTime = timeLimit[1];
|
||||||
@@ -199,23 +233,23 @@ public class ReadClickhouseData {
|
|||||||
return "SELECT * FROM((" + clientIpSql + ") UNION ALL (" + serverIpSql + "))";
|
return "SELECT * FROM((" + clientIpSql + ") UNION ALL (" + serverIpSql + "))";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getEFqdnAddressIpSql() {
|
public static String getRelationshipFqdnAddressIpSql() {
|
||||||
long[] timeLimit = getTimeLimit();
|
long[] timeLimit = getTimeLimit();
|
||||||
long maxTime = timeLimit[0];
|
long maxTime = timeLimit[0];
|
||||||
long minTime = timeLimit[1];
|
long minTime = timeLimit[1];
|
||||||
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
|
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
|
||||||
String sslSql = "SELECT ssl_sni AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(100)(common_client_ip) AS DIST_CIP_RECENT,'SSL' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'SSL' GROUP BY ssl_sni,common_server_ip";
|
String sslSql = "SELECT ssl_sni AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(10000)(common_client_ip) AS DIST_CIP_RECENT,'TLS' AS schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'SSL' GROUP BY ssl_sni,common_server_ip";
|
||||||
String httpSql = "SELECT http_host AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(100)(common_client_ip) AS DIST_CIP_RECENT,'HTTP' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_server_ip";
|
String httpSql = "SELECT http_host AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(10000)(common_client_ip) AS DIST_CIP_RECENT,'HTTP' AS schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_server_ip";
|
||||||
return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''";
|
return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getEIpVisitFqdnSql() {
|
public static String getRelationshipIpVisitFqdnSql() {
|
||||||
long[] timeLimit = getTimeLimit();
|
long[] timeLimit = getTimeLimit();
|
||||||
long maxTime = timeLimit[0];
|
long maxTime = timeLimit[0];
|
||||||
long minTime = timeLimit[1];
|
long minTime = timeLimit[1];
|
||||||
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
|
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
|
||||||
String httpSql = "SELECT http_host AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'HTTP' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_client_ip";
|
String httpSql = "SELECT http_host AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'HTTP' AS schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_client_ip";
|
||||||
String sslSql = "SELECT ssl_sni AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'SSL' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE common_schema_type = 'SSL' GROUP BY ssl_sni,common_client_ip";
|
String sslSql = "SELECT ssl_sni AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'TLS' AS schema_type FROM tsg_galaxy_v3.connection_record_log WHERE common_schema_type = 'SSL' GROUP BY ssl_sni,common_client_ip";
|
||||||
return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''";
|
return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,10 +2,11 @@ package cn.ac.iie.service.read;
|
|||||||
|
|
||||||
import cn.ac.iie.utils.ArangoDBConnect;
|
import cn.ac.iie.utils.ArangoDBConnect;
|
||||||
import com.arangodb.ArangoCursor;
|
import com.arangodb.ArangoCursor;
|
||||||
import com.arangodb.entity.BaseEdgeDocument;
|
import com.arangodb.entity.BaseDocument;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
@@ -13,33 +14,61 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||||||
* @author wlh
|
* @author wlh
|
||||||
* 多线程全量读取arangoDb历史数据,封装到map
|
* 多线程全量读取arangoDb历史数据,封装到map
|
||||||
*/
|
*/
|
||||||
public class ReadHistoryArangoData extends Thread {
|
public class ReadHistoryArangoData<T extends BaseDocument> extends Thread {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(ReadHistoryArangoData.class);
|
private static final Logger LOG = LoggerFactory.getLogger(ReadHistoryArangoData.class);
|
||||||
|
|
||||||
private ArangoDBConnect arangoDBConnect;
|
private ArangoDBConnect arangoConnect;
|
||||||
private String query;
|
private String query;
|
||||||
private ConcurrentHashMap<String, BaseEdgeDocument> map;
|
private ConcurrentHashMap<String, T> map;
|
||||||
|
private Class<T> type;
|
||||||
|
private String collectionName;
|
||||||
|
|
||||||
public ReadHistoryArangoData(ArangoDBConnect arangoDBConnect, String query, ConcurrentHashMap<String, BaseEdgeDocument> map) {
|
public ReadHistoryArangoData(ArangoDBConnect arangoConnect, String query, ConcurrentHashMap<String, T> map, Class<T> type, String collectionName) {
|
||||||
this.arangoDBConnect = arangoDBConnect;
|
this.arangoConnect = arangoConnect;
|
||||||
this.query = query;
|
this.query = query;
|
||||||
this.map = map;
|
this.map = map;
|
||||||
|
this.type = type;
|
||||||
|
this.collectionName = collectionName;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
long s = System.currentTimeMillis();
|
long s = System.currentTimeMillis();
|
||||||
ArangoCursor<BaseEdgeDocument> docs = arangoDBConnect.executorQuery(query, BaseEdgeDocument.class);
|
ArangoCursor<T> docs = arangoConnect.executorQuery(query, type);
|
||||||
if (docs != null){
|
if (docs != null) {
|
||||||
List<BaseEdgeDocument> baseDocuments = docs.asListRemaining();
|
List<T> baseDocuments = docs.asListRemaining();
|
||||||
int i = 0;
|
int i = 0;
|
||||||
for (BaseEdgeDocument doc : baseDocuments) {
|
for (T doc : baseDocuments) {
|
||||||
String key = doc.getKey();
|
String key = doc.getKey();
|
||||||
|
switch (collectionName) {
|
||||||
|
case "R_LOCATE_FQDN2IP":
|
||||||
|
updateProtocolDocument(doc);
|
||||||
|
break;
|
||||||
|
case "R_VISIT_IP2FQDN":
|
||||||
|
updateProtocolDocument(doc);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
}
|
||||||
map.put(key, doc);
|
map.put(key, doc);
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
long l = System.currentTimeMillis();
|
long l = System.currentTimeMillis();
|
||||||
LOG.info(query+ "\n处理数据" + i + "条,运行时间:" + (l - s));
|
LOG.info(query + "\n处理数据" + i + "条,运行时间:" + (l - s));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void updateProtocolDocument(T doc) {
|
||||||
|
if (doc.getProperties().containsKey("PROTOCOL_TYPE")) {
|
||||||
|
for (String protocol : ReadClickhouseData.protocolSet) {
|
||||||
|
String protocolRecent = protocol + "_CNT_RECENT";
|
||||||
|
ArrayList<Long> cntRecent = (ArrayList<Long>) doc.getAttribute(protocolRecent);
|
||||||
|
Long[] cntRecentsSrc = cntRecent.toArray(new Long[cntRecent.size()]);
|
||||||
|
Long[] cntRecentsDst = new Long[24];
|
||||||
|
System.arraycopy(cntRecentsSrc, 0, cntRecentsDst, 1, cntRecentsSrc.length - 1);
|
||||||
|
cntRecentsDst[0] = 0L;
|
||||||
|
doc.addAttribute(protocolRecent, cntRecentsDst);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,38 +0,0 @@
|
|||||||
package cn.ac.iie.service.relationship;
|
|
||||||
|
|
||||||
import cn.ac.iie.service.update.Relationship;
|
|
||||||
import cn.ac.iie.utils.ArangoDBConnect;
|
|
||||||
import com.arangodb.entity.BaseEdgeDocument;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
|
||||||
import java.util.concurrent.CountDownLatch;
|
|
||||||
|
|
||||||
public class LocateSubscriber2Ip extends Relationship {
|
|
||||||
|
|
||||||
public LocateSubscriber2Ip(HashMap<String, HashMap<String, BaseEdgeDocument>> newDocumentHashMap,
|
|
||||||
ArangoDBConnect arangoManger,
|
|
||||||
String collectionName,
|
|
||||||
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
|
|
||||||
CountDownLatch countDownLatch) {
|
|
||||||
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected BaseEdgeDocument mergeRelationship(HashMap<String, BaseEdgeDocument> newEdgeDocumentSchemaMap) {
|
|
||||||
BaseEdgeDocument doc = null;
|
|
||||||
if (newEdgeDocumentSchemaMap.size() == 1){
|
|
||||||
Set<String> strings = newEdgeDocumentSchemaMap.keySet();
|
|
||||||
for (String key:strings){
|
|
||||||
doc = newEdgeDocumentSchemaMap.getOrDefault(key,null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return doc;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument) {
|
|
||||||
super.updateFoundTime(newEdgeDocument,historyEdgeDocument);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
package cn.ac.iie.service.relationship;
|
|
||||||
|
|
||||||
import cn.ac.iie.service.update.Relationship;
|
|
||||||
import cn.ac.iie.utils.ArangoDBConnect;
|
|
||||||
import com.arangodb.entity.BaseEdgeDocument;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
|
||||||
import java.util.concurrent.CountDownLatch;
|
|
||||||
|
|
||||||
public class VisitIp2Fqdn extends Relationship {
|
|
||||||
public VisitIp2Fqdn(HashMap<String, HashMap<String, BaseEdgeDocument>> newDocumentHashMap,
|
|
||||||
ArangoDBConnect arangoManger,
|
|
||||||
String collectionName,
|
|
||||||
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
|
|
||||||
CountDownLatch countDownLatch) {
|
|
||||||
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,118 @@
|
|||||||
|
package cn.ac.iie.service.update;
|
||||||
|
|
||||||
|
import cn.ac.iie.config.ApplicationConfig;
|
||||||
|
import cn.ac.iie.utils.ArangoDBConnect;
|
||||||
|
import com.arangodb.entity.BaseDocument;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
|
||||||
|
public class Document<T extends BaseDocument> extends Thread{
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(Document.class);
|
||||||
|
private HashMap<String, ArrayList<T>> newDocumentMap;
|
||||||
|
private ArangoDBConnect arangoManger;
|
||||||
|
private String collectionName;
|
||||||
|
private ConcurrentHashMap<String, T> historyDocumentMap;
|
||||||
|
private CountDownLatch countDownLatch;
|
||||||
|
private Class<T> type;
|
||||||
|
|
||||||
|
Document(HashMap<String, ArrayList<T>> newDocumentMap,
|
||||||
|
ArangoDBConnect arangoManger,
|
||||||
|
String collectionName,
|
||||||
|
ConcurrentHashMap<String, T> historyDocumentMap,
|
||||||
|
CountDownLatch countDownLatch,
|
||||||
|
Class<T> type) {
|
||||||
|
this.newDocumentMap = newDocumentMap;
|
||||||
|
this.arangoManger = arangoManger;
|
||||||
|
this.collectionName = collectionName;
|
||||||
|
this.historyDocumentMap = historyDocumentMap;
|
||||||
|
this.countDownLatch = countDownLatch;
|
||||||
|
this.type = type;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
Set<String> keySet = newDocumentMap.keySet();
|
||||||
|
ArrayList<T> resultDocumentList = new ArrayList<>();
|
||||||
|
int i = 0;
|
||||||
|
try {
|
||||||
|
for (String key : keySet) {
|
||||||
|
ArrayList<T> newDocumentSchemaList = newDocumentMap.getOrDefault(key, null);
|
||||||
|
if (newDocumentSchemaList != null) {
|
||||||
|
T newDocument = mergeDocument(newDocumentSchemaList);
|
||||||
|
i += 1;
|
||||||
|
T historyDocument = historyDocumentMap.getOrDefault(key, null);
|
||||||
|
updateDocument(newDocument,historyDocument,resultDocumentList);
|
||||||
|
if (i >= ApplicationConfig.UPDATE_ARANGO_BATCH) {
|
||||||
|
arangoManger.overwrite(resultDocumentList, collectionName);
|
||||||
|
LOG.info("更新"+collectionName+":" + i);
|
||||||
|
i = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (i != 0) {
|
||||||
|
arangoManger.overwrite(resultDocumentList, collectionName);
|
||||||
|
LOG.info("更新"+collectionName+":" + i);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
LOG.error(e.toString());
|
||||||
|
}finally {
|
||||||
|
countDownLatch.countDown();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateDocument(T newDocument, T historyDocument, ArrayList<T> resultDocumentList) {
|
||||||
|
if (historyDocument != null){
|
||||||
|
updateFunction(newDocument,historyDocument);
|
||||||
|
resultDocumentList.add(historyDocument);
|
||||||
|
}else {
|
||||||
|
resultDocumentList.add(newDocument);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void updateFunction(T newDocument, T historyDocument) {
|
||||||
|
Object lastFoundTime = newDocument.getAttribute("LAST_FOUND_TIME");
|
||||||
|
historyDocument.addAttribute("LAST_FOUND_TIME",lastFoundTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
private T mergeDocument(ArrayList<T> newDocumentSchemaList) throws IllegalAccessException, InstantiationException {
|
||||||
|
if (newDocumentSchemaList == null || newDocumentSchemaList.isEmpty()){
|
||||||
|
return null;
|
||||||
|
}else if (newDocumentSchemaList.size() == 1){
|
||||||
|
return newDocumentSchemaList.get(0);
|
||||||
|
}else {
|
||||||
|
T newDocument = type.newInstance();
|
||||||
|
Map<String, Object> newProperties = newDocument.getProperties();
|
||||||
|
for (T doc:newDocumentSchemaList){
|
||||||
|
if (newProperties.isEmpty()){
|
||||||
|
newDocument = doc;
|
||||||
|
newProperties = doc.getProperties();
|
||||||
|
}else {
|
||||||
|
mergeFunction(newProperties,doc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
newDocument.setProperties(newProperties);
|
||||||
|
return newDocument;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void mergeFunction(Map<String, Object> newProperties, T lastDoc) {
|
||||||
|
long firstFoundTime = Long.parseLong(newProperties.getOrDefault("FIRST_FOUND_TIME", 0L).toString());
|
||||||
|
long docFirstFoundTime = Long.parseLong(lastDoc.getAttribute("FIRST_FOUND_TIME").toString());
|
||||||
|
newProperties.put("FIRST_FOUND_TIME",firstFoundTime<docFirstFoundTime? firstFoundTime:docFirstFoundTime);
|
||||||
|
|
||||||
|
long lastFoundTime = Long.parseLong(newProperties.getOrDefault("LAST_FOUND_TIME", 0L).toString());
|
||||||
|
long docLastFoundTime = Long.parseLong(lastDoc.getAttribute("LAST_FOUND_TIME").toString());
|
||||||
|
newProperties.put("LAST_FOUND_TIME",lastFoundTime>docLastFoundTime? lastFoundTime:docLastFoundTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,173 +1,78 @@
|
|||||||
package cn.ac.iie.service.update;
|
package cn.ac.iie.service.update;
|
||||||
|
|
||||||
import cn.ac.iie.config.ApplicationConfig;
|
import cn.ac.iie.service.read.ReadClickhouseData;
|
||||||
import cn.ac.iie.utils.ArangoDBConnect;
|
import cn.ac.iie.utils.ArangoDBConnect;
|
||||||
import com.arangodb.entity.BaseEdgeDocument;
|
import com.arangodb.entity.BaseEdgeDocument;
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
|
||||||
public class Relationship extends Thread {
|
public class Relationship extends Document<BaseEdgeDocument> {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(Relationship.class);
|
|
||||||
|
|
||||||
private HashMap<String, HashMap<String, BaseEdgeDocument>> newDocumentHashMap;
|
public Relationship(HashMap<String, ArrayList<BaseEdgeDocument>> newDocumentHashMap,
|
||||||
private ArangoDBConnect arangoManger;
|
|
||||||
private String collectionName;
|
|
||||||
private ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap;
|
|
||||||
private CountDownLatch countDownLatch;
|
|
||||||
|
|
||||||
public Relationship(HashMap<String, HashMap<String, BaseEdgeDocument>> newDocumentHashMap,
|
|
||||||
ArangoDBConnect arangoManger,
|
ArangoDBConnect arangoManger,
|
||||||
String collectionName,
|
String collectionName,
|
||||||
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
|
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
|
||||||
CountDownLatch countDownLatch) {
|
CountDownLatch countDownLatch) {
|
||||||
this.newDocumentHashMap = newDocumentHashMap;
|
super(newDocumentHashMap,arangoManger,collectionName,historyDocumentMap,countDownLatch,BaseEdgeDocument.class);
|
||||||
this.arangoManger = arangoManger;
|
|
||||||
this.collectionName = collectionName;
|
|
||||||
this.historyDocumentMap = historyDocumentMap;
|
|
||||||
this.countDownLatch = countDownLatch;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument){
|
||||||
Set<String> keySet = newDocumentHashMap.keySet();
|
super.updateFunction(newEdgeDocument,historyEdgeDocument);
|
||||||
ArrayList<BaseEdgeDocument> docInsert = new ArrayList<>();
|
|
||||||
int i = 0;
|
|
||||||
try {
|
|
||||||
for (String key : keySet) {
|
|
||||||
HashMap<String, BaseEdgeDocument> newEdgeDocumentSchemaMap = newDocumentHashMap.getOrDefault(key, null);
|
|
||||||
if (newEdgeDocumentSchemaMap != null) {
|
|
||||||
BaseEdgeDocument newEdgeDocument = mergeRelationship(newEdgeDocumentSchemaMap);
|
|
||||||
i += 1;
|
|
||||||
BaseEdgeDocument historyEdgeDocument = historyDocumentMap.getOrDefault(key, null);
|
|
||||||
updateRelationship(newEdgeDocument,historyEdgeDocument,docInsert);
|
|
||||||
if (i >= ApplicationConfig.UPDATE_ARANGO_BATCH) {
|
|
||||||
arangoManger.overwrite(docInsert, collectionName);
|
|
||||||
LOG.info("更新"+collectionName+":" + i);
|
|
||||||
i = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (i != 0) {
|
|
||||||
arangoManger.overwrite(docInsert, collectionName);
|
|
||||||
LOG.info("更新"+collectionName+":" + i);
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
LOG.error(e.toString());
|
|
||||||
}finally {
|
|
||||||
countDownLatch.countDown();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected BaseEdgeDocument mergeRelationship(HashMap<String, BaseEdgeDocument> newEdgeDocumentSchemaMap) {
|
protected void updateProcotol(BaseEdgeDocument historyEdgeDocument, String schema, BaseEdgeDocument newEdgeDocument){
|
||||||
BaseEdgeDocument newBaseEdgeDocument = new BaseEdgeDocument();
|
String recentSchema = schema +"_CNT_RECENT";
|
||||||
Set<String> schemaSets = newEdgeDocumentSchemaMap.keySet();
|
String totalSchema = schema + "_CNT_TOTAL";
|
||||||
Map<String, Object> properties = newBaseEdgeDocument.getProperties();
|
|
||||||
|
|
||||||
for (String schema : schemaSets) {
|
|
||||||
BaseEdgeDocument schemaEdgeDoc = newEdgeDocumentSchemaMap.get(schema);
|
|
||||||
if (!properties.isEmpty()) {
|
|
||||||
mergeFunction(properties, schemaEdgeDoc);
|
|
||||||
} else {
|
|
||||||
newBaseEdgeDocument = schemaEdgeDoc;
|
|
||||||
properties = schemaEdgeDoc.getProperties();
|
|
||||||
}
|
|
||||||
setSchemaCount(schema, schemaEdgeDoc, properties);
|
|
||||||
}
|
|
||||||
properties.remove("COUNT_TOTAL");
|
|
||||||
checkSchemaProperty(properties);
|
|
||||||
|
|
||||||
newBaseEdgeDocument.setProperties(properties);
|
|
||||||
return newBaseEdgeDocument;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateRelationship(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument historyEdgeDocument,ArrayList<BaseEdgeDocument> docInsert){
|
|
||||||
if (historyEdgeDocument != null && newEdgeDocument != null) {
|
|
||||||
updateFunction(newEdgeDocument, historyEdgeDocument);
|
|
||||||
docInsert.add(historyEdgeDocument);
|
|
||||||
} else {
|
|
||||||
docInsert.add(newEdgeDocument);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void updateFunction(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument historyEdgeDocument){
|
|
||||||
updateFoundTime(newEdgeDocument,historyEdgeDocument);
|
|
||||||
setSchemaCntByHistory(historyEdgeDocument,"TLS_CNT_RECENT","TLS_CNT_TOTAL",newEdgeDocument);
|
|
||||||
setSchemaCntByHistory(historyEdgeDocument,"HTTP_CNT_RECENT","HTTP_CNT_TOTAL",newEdgeDocument);
|
|
||||||
setSchemaCntByHistory(historyEdgeDocument,"DNS_CNT_RECENT","DNS_CNT_TOTAL",newEdgeDocument);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void updateFoundTime(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument historyEdgeDocument){
|
|
||||||
Object lastFoundTime = newEdgeDocument.getAttribute("LAST_FOUND_TIME");
|
|
||||||
historyEdgeDocument.addAttribute("LAST_FOUND_TIME", lastFoundTime);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void setSchemaCntByHistory(BaseEdgeDocument historyEdgeDocument,String schema,String totalSchema,BaseEdgeDocument newEdgeDocument){
|
|
||||||
long countTotal = Long.parseLong(newEdgeDocument.getAttribute(totalSchema).toString());
|
long countTotal = Long.parseLong(newEdgeDocument.getAttribute(totalSchema).toString());
|
||||||
long updateCountTotal = Long.parseLong(historyEdgeDocument.getAttribute(totalSchema).toString());
|
if (countTotal > 0L){
|
||||||
|
long updateCountTotal = Long.parseLong(historyEdgeDocument.getAttribute(totalSchema).toString());
|
||||||
|
|
||||||
ArrayList<Long> cntRecent = (ArrayList<Long>) historyEdgeDocument.getAttribute(schema);
|
Long[] cntRecent = (Long[]) historyEdgeDocument.getAttribute(recentSchema);
|
||||||
Long[] cntRecentsSrc = cntRecent.toArray(new Long[cntRecent.size()]);
|
cntRecent[0] = countTotal;
|
||||||
Long[] cntRecentsDst = new Long[24];
|
|
||||||
System.arraycopy(cntRecentsSrc, 0, cntRecentsDst, 1, cntRecentsSrc.length - 1);
|
|
||||||
cntRecentsDst[0] = countTotal;
|
|
||||||
|
|
||||||
historyEdgeDocument.addAttribute(schema, cntRecentsDst);
|
historyEdgeDocument.addAttribute(recentSchema, cntRecent);
|
||||||
historyEdgeDocument.addAttribute(totalSchema, countTotal + updateCountTotal);
|
historyEdgeDocument.addAttribute(totalSchema, countTotal + updateCountTotal);
|
||||||
}
|
String hisProtocolType = historyEdgeDocument.getAttribute("PROTOCOL_TYPE").toString();
|
||||||
|
if (!hisProtocolType.contains(schema)){
|
||||||
protected void mergeFunction(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc) {
|
hisProtocolType = hisProtocolType + "," + schema;
|
||||||
mergeFoundTime(properties, schemaEdgeDoc);
|
historyEdgeDocument.addAttribute("PROTOCOL_TYPE",hisProtocolType);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void mergeFoundTime(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc) {
|
|
||||||
long schemaFirstFoundTime = Long.parseLong(schemaEdgeDoc.getAttribute("FIRST_FOUND_TIME").toString());
|
|
||||||
long firstFoundTime = Long.parseLong(properties.get("FIRST_FOUND_TIME").toString());
|
|
||||||
properties.put("FIRST_FOUND_TIME", schemaFirstFoundTime < firstFoundTime ? schemaFirstFoundTime : firstFoundTime);
|
|
||||||
long schemaLastFoundTime = Long.parseLong(schemaEdgeDoc.getAttribute("LAST_FOUND_TIME").toString());
|
|
||||||
long lastFoundTime = Long.parseLong(properties.get("LAST_FOUND_TIME").toString());
|
|
||||||
properties.put("LAST_FOUND_TIME", schemaLastFoundTime > lastFoundTime ? schemaLastFoundTime : lastFoundTime);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void setSchemaCount(String schema, BaseEdgeDocument schemaEdgeDoc, Map<String, Object> properties) {
|
|
||||||
switch (schema) {
|
|
||||||
case "HTTP":
|
|
||||||
long httpCntTotal = Long.parseLong(schemaEdgeDoc.getAttribute("COUNT_TOTAL").toString());
|
|
||||||
properties.put("HTTP_CNT_TOTAL", httpCntTotal);
|
|
||||||
long[] httpCntRecentsDst = new long[24];
|
|
||||||
httpCntRecentsDst[0] = httpCntTotal;
|
|
||||||
properties.put("HTTP_CNT_RECENT", httpCntRecentsDst);
|
|
||||||
break;
|
|
||||||
case "SSL":
|
|
||||||
long tlsCntTotal = Long.parseLong(schemaEdgeDoc.getAttribute("COUNT_TOTAL").toString());
|
|
||||||
properties.put("TLS_CNT_TOTAL", tlsCntTotal);
|
|
||||||
long[] tlsCntRecentsDst = new long[24];
|
|
||||||
tlsCntRecentsDst[0] = tlsCntTotal;
|
|
||||||
properties.put("TLS_CNT_RECENT", tlsCntRecentsDst);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkSchemaProperty(Map<String, Object> properties){
|
@Override
|
||||||
if (!properties.containsKey("TLS_CNT_TOTAL")){
|
protected void mergeFunction(Map<String, Object> newProperties, BaseEdgeDocument lastDoc) {
|
||||||
properties.put("TLS_CNT_TOTAL",0L);
|
super.mergeFunction(newProperties, lastDoc);
|
||||||
properties.put("TLS_CNT_RECENT",new long[24]);
|
}
|
||||||
}
|
|
||||||
if (!properties.containsKey("HTTP_CNT_TOTAL")){
|
protected void mergeProtocol(Map<String, Object> newProperties, BaseEdgeDocument lastDoc) {
|
||||||
properties.put("HTTP_CNT_TOTAL",0L);
|
String schema = lastDoc.getAttribute("PROTOCOL_TYPE").toString();
|
||||||
properties.put("HTTP_CNT_RECENT",new long[24]);
|
if (ReadClickhouseData.protocolSet.contains(schema)){
|
||||||
}
|
setProtocolProperties(schema,newProperties,lastDoc);
|
||||||
if (!properties.containsKey("DNS_CNT_TOTAL")){
|
|
||||||
properties.put("DNS_CNT_TOTAL",0L);
|
|
||||||
properties.put("DNS_CNT_RECENT",new long[24]);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void setProtocolProperties(String protocol,Map<String, Object> newProperties, BaseEdgeDocument lastDoc){
|
||||||
|
String protocolRecent = protocol +"_CNT_RECENT";
|
||||||
|
String protocolTotal = protocol + "_CNT_TOTAL";
|
||||||
|
long httpCntTotal = Long.parseLong(lastDoc.getAttribute(protocolTotal).toString());
|
||||||
|
newProperties.put(protocolTotal, httpCntTotal);
|
||||||
|
long[] httpCntRecents = (long[]) lastDoc.getAttribute(protocolRecent);
|
||||||
|
newProperties.put(protocolRecent, httpCntRecents);
|
||||||
|
String protocolType = newProperties.get("PROTOCOL_TYPE").toString();
|
||||||
|
newProperties.put("PROTOCOL_TYPE",addProcotolType(protocolType,protocol));
|
||||||
|
}
|
||||||
|
|
||||||
|
private String addProcotolType(String protocolType,String schema){
|
||||||
|
if (!protocolType.contains(schema)){
|
||||||
|
protocolType = protocolType + "," + schema;
|
||||||
|
}
|
||||||
|
return protocolType;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +1,11 @@
|
|||||||
package cn.ac.iie.service.update;
|
package cn.ac.iie.service.update;
|
||||||
|
|
||||||
import cn.ac.iie.config.ApplicationConfig;
|
|
||||||
import cn.ac.iie.utils.ArangoDBConnect;
|
import cn.ac.iie.utils.ArangoDBConnect;
|
||||||
import com.arangodb.entity.BaseDocument;
|
import com.arangodb.entity.BaseDocument;
|
||||||
import com.arangodb.entity.BaseEdgeDocument;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
|
||||||
@@ -18,109 +13,29 @@ import java.util.concurrent.CountDownLatch;
|
|||||||
* @author wlh
|
* @author wlh
|
||||||
* 多线程更新vertex数据
|
* 多线程更新vertex数据
|
||||||
*/
|
*/
|
||||||
public class Vertex extends Thread{
|
public class Vertex extends Document<BaseDocument> {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(Vertex.class);
|
|
||||||
|
|
||||||
private HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap;
|
public Vertex(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
|
||||||
private ArangoDBConnect arangoManger;
|
|
||||||
private String collectionName;
|
|
||||||
private ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap;
|
|
||||||
private CountDownLatch countDownLatch;
|
|
||||||
|
|
||||||
public Vertex(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
|
|
||||||
ArangoDBConnect arangoManger,
|
ArangoDBConnect arangoManger,
|
||||||
String collectionName,
|
String collectionName,
|
||||||
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
|
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
|
||||||
CountDownLatch countDownLatch){
|
CountDownLatch countDownLatch) {
|
||||||
this.newDocumentHashMap = newDocumentHashMap;
|
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch,BaseDocument.class);
|
||||||
this.arangoManger = arangoManger;
|
}
|
||||||
this.collectionName = collectionName;
|
|
||||||
this.historyDocumentMap = historyDocumentMap;
|
@Override
|
||||||
this.countDownLatch = countDownLatch;
|
protected void updateFunction(BaseDocument newDocument, BaseDocument historyDocument) {
|
||||||
|
super.updateFunction(newDocument, historyDocument);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void mergeFunction(Map<String, Object> properties, BaseDocument doc) {
|
||||||
|
super.mergeFunction(properties, doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
Set<String> keySet = newDocumentHashMap.keySet();
|
super.run();
|
||||||
ArrayList<BaseDocument> docInsert = new ArrayList<>();
|
|
||||||
int i = 0;
|
|
||||||
try {
|
|
||||||
for (String key:keySet){
|
|
||||||
ArrayList<BaseDocument> documentArrayList = newDocumentHashMap.getOrDefault(key, null);
|
|
||||||
BaseDocument newDocument = mergeVertex(documentArrayList);
|
|
||||||
if (newDocument != null){
|
|
||||||
i += 1;
|
|
||||||
BaseDocument historyDocument = historyDocumentMap.getOrDefault(key, null);
|
|
||||||
updateVertex(newDocument,historyDocument,docInsert);
|
|
||||||
}
|
|
||||||
if (i >= ApplicationConfig.UPDATE_ARANGO_BATCH){
|
|
||||||
arangoManger.overwrite(docInsert,collectionName);
|
|
||||||
LOG.info("更新"+collectionName+":"+i);
|
|
||||||
i = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (i != 0){
|
|
||||||
arangoManger.overwrite(docInsert,collectionName);
|
|
||||||
LOG.info("更新"+collectionName+":"+i);
|
|
||||||
}
|
|
||||||
}catch (Exception e){
|
|
||||||
e.printStackTrace();
|
|
||||||
LOG.error(e.toString());
|
|
||||||
}finally {
|
|
||||||
countDownLatch.countDown();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateVertex(BaseDocument newDocument,BaseDocument historyDocument,ArrayList<BaseDocument> docInsert){
|
|
||||||
if (historyDocument != null){
|
|
||||||
updateFunction(newDocument,historyDocument);
|
|
||||||
docInsert.add(historyDocument);
|
|
||||||
}else {
|
|
||||||
docInsert.add(newDocument);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void updateFunction(BaseDocument newDocument,BaseDocument historyDocument){
|
|
||||||
updateFoundTime(newDocument,historyDocument);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateFoundTime(BaseDocument newDocument,BaseDocument historyDocument){
|
|
||||||
Object lastFoundTime = newDocument.getAttribute("LAST_FOUND_TIME");
|
|
||||||
historyDocument.addAttribute("LAST_FOUND_TIME",lastFoundTime);
|
|
||||||
}
|
|
||||||
|
|
||||||
private BaseDocument mergeVertex(ArrayList<BaseDocument> documentArrayList){
|
|
||||||
if (documentArrayList == null || documentArrayList.isEmpty()){
|
|
||||||
return null;
|
|
||||||
}else if (documentArrayList.size() == 1){
|
|
||||||
return documentArrayList.get(0);
|
|
||||||
}else {
|
|
||||||
BaseDocument document = new BaseDocument();
|
|
||||||
Map<String, Object> properties = document.getProperties();
|
|
||||||
for (BaseDocument doc:documentArrayList){
|
|
||||||
if (properties.isEmpty()){
|
|
||||||
document = doc;
|
|
||||||
properties = doc.getProperties();
|
|
||||||
}else {
|
|
||||||
mergeFunction(properties,doc);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
document.setProperties(properties);
|
|
||||||
return document;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void mergeFunction(Map<String, Object> properties,BaseDocument doc){
|
|
||||||
mergeFoundTime(properties,doc);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void mergeFoundTime(Map<String, Object> properties,BaseDocument doc){
|
|
||||||
long firstFoundTime = Long.parseLong(properties.getOrDefault("FIRST_FOUND_TIME", 0L).toString());
|
|
||||||
long docFirstFoundTime = Long.parseLong(doc.getAttribute("FIRST_FOUND_TIME").toString());
|
|
||||||
properties.put("FIRST_FOUND_TIME",firstFoundTime<docFirstFoundTime? firstFoundTime:docFirstFoundTime);
|
|
||||||
|
|
||||||
long lastFoundTime = Long.parseLong(properties.getOrDefault("LAST_FOUND_TIME", 0L).toString());
|
|
||||||
long docLastFoundTime = Long.parseLong(doc.getAttribute("LAST_FOUND_TIME").toString());
|
|
||||||
properties.put("LAST_FOUND_TIME",lastFoundTime>docLastFoundTime? lastFoundTime:docLastFoundTime);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
package cn.ac.iie.service.relationship;
|
package cn.ac.iie.service.update.relationship;
|
||||||
|
|
||||||
import cn.ac.iie.service.read.ReadClickhouseData;
|
import cn.ac.iie.service.read.ReadClickhouseData;
|
||||||
import cn.ac.iie.service.update.Relationship;
|
import cn.ac.iie.service.update.Relationship;
|
||||||
@@ -11,7 +11,7 @@ import java.util.concurrent.CountDownLatch;
|
|||||||
|
|
||||||
public class LocateFqdn2Ip extends Relationship {
|
public class LocateFqdn2Ip extends Relationship {
|
||||||
|
|
||||||
public LocateFqdn2Ip(HashMap<String, HashMap<String, BaseEdgeDocument>> newDocumentHashMap,
|
public LocateFqdn2Ip(HashMap<String, ArrayList<BaseEdgeDocument>> newDocumentHashMap,
|
||||||
ArangoDBConnect arangoManger,
|
ArangoDBConnect arangoManger,
|
||||||
String collectionName,
|
String collectionName,
|
||||||
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
|
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
|
||||||
@@ -22,25 +22,16 @@ public class LocateFqdn2Ip extends Relationship {
|
|||||||
@Override
|
@Override
|
||||||
protected void mergeFunction(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc){
|
protected void mergeFunction(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc){
|
||||||
super.mergeFunction(properties,schemaEdgeDoc);
|
super.mergeFunction(properties,schemaEdgeDoc);
|
||||||
mergeDistinctClientIp(properties,schemaEdgeDoc);
|
mergeProtocol(properties, schemaEdgeDoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument) {
|
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument) {
|
||||||
super.updateFunction(newEdgeDocument, historyEdgeDocument);
|
super.updateFunction(newEdgeDocument, historyEdgeDocument);
|
||||||
updateDistinctClientIp(newEdgeDocument, historyEdgeDocument);
|
for (String schema:ReadClickhouseData.protocolSet){
|
||||||
}
|
updateProcotol(historyEdgeDocument,schema,newEdgeDocument);
|
||||||
|
|
||||||
private void mergeDistinctClientIp(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc){
|
|
||||||
String[] schemaDistCipRecents = (String[]) schemaEdgeDoc.getAttribute("DIST_CIP");
|
|
||||||
String[] distCipRecents = (String[]) properties.get("DIST_CIP");
|
|
||||||
Object[] mergeClientIp = distinctIp(schemaDistCipRecents, distCipRecents);
|
|
||||||
long[] mergeClientIpTs = new long[mergeClientIp.length];
|
|
||||||
for (int i = 0;i < mergeClientIpTs.length;i++){
|
|
||||||
mergeClientIpTs[i] = ReadClickhouseData.currentHour;
|
|
||||||
}
|
}
|
||||||
properties.put("DIST_CIP", mergeClientIp);
|
updateDistinctClientIp(newEdgeDocument, historyEdgeDocument);
|
||||||
properties.put("DIST_CIP_TS",mergeClientIpTs);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateDistinctClientIp(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument edgeDocument){
|
private void updateDistinctClientIp(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument edgeDocument){
|
||||||
@@ -54,7 +45,7 @@ public class LocateFqdn2Ip extends Relationship {
|
|||||||
}
|
}
|
||||||
Object[] distCipRecent = (Object[])newEdgeDocument.getAttribute("DIST_CIP");
|
Object[] distCipRecent = (Object[])newEdgeDocument.getAttribute("DIST_CIP");
|
||||||
for (Object cip:distCipRecent){
|
for (Object cip:distCipRecent){
|
||||||
distCipToTs.put(cip.toString(),ReadClickhouseData.currentHour);
|
distCipToTs.put(cip.toString(), ReadClickhouseData.currentHour);
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, Long> sortDistCip = sortMapByValue(distCipToTs);
|
Map<String, Long> sortDistCip = sortMapByValue(distCipToTs);
|
||||||
@@ -74,8 +65,8 @@ public class LocateFqdn2Ip extends Relationship {
|
|||||||
List<Map.Entry<String, Long>> entryList = new ArrayList<>(oriMap.entrySet());
|
List<Map.Entry<String, Long>> entryList = new ArrayList<>(oriMap.entrySet());
|
||||||
entryList.sort((o1, o2) -> o2.getValue().compareTo(o1.getValue()));
|
entryList.sort((o1, o2) -> o2.getValue().compareTo(o1.getValue()));
|
||||||
|
|
||||||
if(entryList.size() > 100){
|
if(entryList.size() > 10000){
|
||||||
for(Map.Entry<String, Long> set:entryList.subList(0, 100)){
|
for(Map.Entry<String, Long> set:entryList.subList(0, 10000)){
|
||||||
sortedMap.put(set.getKey(), set.getValue());
|
sortedMap.put(set.getKey(), set.getValue());
|
||||||
}
|
}
|
||||||
}else {
|
}else {
|
||||||
@@ -86,15 +77,4 @@ public class LocateFqdn2Ip extends Relationship {
|
|||||||
return sortedMap;
|
return sortedMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Object[] distinctIp(Object[] distCipTotalsSrc,Object[] distCipRecentsSrc){
|
|
||||||
HashSet<Object> dIpSet = new HashSet<>();
|
|
||||||
dIpSet.addAll(Arrays.asList(distCipRecentsSrc));
|
|
||||||
dIpSet.addAll(Arrays.asList(distCipTotalsSrc));
|
|
||||||
Object[] distCipTotals = dIpSet.toArray();
|
|
||||||
if (distCipTotals.length > 100) {
|
|
||||||
System.arraycopy(distCipTotals, 0, distCipTotals, 0, 100);
|
|
||||||
}
|
|
||||||
return distCipTotals;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
package cn.ac.iie.service.update.relationship;
|
||||||
|
|
||||||
|
import cn.ac.iie.service.update.Relationship;
|
||||||
|
import cn.ac.iie.utils.ArangoDBConnect;
|
||||||
|
import com.arangodb.entity.BaseEdgeDocument;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
|
||||||
|
public class LocateSubscriber2Ip extends Relationship {
|
||||||
|
|
||||||
|
public LocateSubscriber2Ip(HashMap<String, ArrayList<BaseEdgeDocument>> newDocumentHashMap,
|
||||||
|
ArangoDBConnect arangoManger,
|
||||||
|
String collectionName,
|
||||||
|
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
|
||||||
|
CountDownLatch countDownLatch) {
|
||||||
|
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,36 @@
|
|||||||
|
package cn.ac.iie.service.update.relationship;
|
||||||
|
|
||||||
|
import cn.ac.iie.service.read.ReadClickhouseData;
|
||||||
|
import cn.ac.iie.service.update.Relationship;
|
||||||
|
import cn.ac.iie.utils.ArangoDBConnect;
|
||||||
|
import com.arangodb.entity.BaseEdgeDocument;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
|
||||||
|
public class VisitIp2Fqdn extends Relationship {
|
||||||
|
public VisitIp2Fqdn(HashMap<String, ArrayList<BaseEdgeDocument>> newDocumentHashMap,
|
||||||
|
ArangoDBConnect arangoManger,
|
||||||
|
String collectionName,
|
||||||
|
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
|
||||||
|
CountDownLatch countDownLatch) {
|
||||||
|
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument) {
|
||||||
|
super.updateFunction(newEdgeDocument, historyEdgeDocument);
|
||||||
|
for (String schema: ReadClickhouseData.protocolSet){
|
||||||
|
updateProcotol(historyEdgeDocument,schema,newEdgeDocument);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void mergeFunction(Map<String, Object> newProperties, BaseEdgeDocument lastDoc) {
|
||||||
|
super.mergeFunction(newProperties, lastDoc);
|
||||||
|
mergeProtocol(newProperties, lastDoc);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,9 +1,8 @@
|
|||||||
package cn.ac.iie.service.vertex;
|
package cn.ac.iie.service.update.vertex;
|
||||||
|
|
||||||
import cn.ac.iie.service.update.Vertex;
|
import cn.ac.iie.service.update.Vertex;
|
||||||
import cn.ac.iie.utils.ArangoDBConnect;
|
import cn.ac.iie.utils.ArangoDBConnect;
|
||||||
import com.arangodb.entity.BaseDocument;
|
import com.arangodb.entity.BaseDocument;
|
||||||
import com.arangodb.entity.BaseEdgeDocument;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
@@ -15,7 +14,7 @@ public class Fqdn extends Vertex {
|
|||||||
public Fqdn(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
|
public Fqdn(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
|
||||||
ArangoDBConnect arangoManger,
|
ArangoDBConnect arangoManger,
|
||||||
String collectionName,
|
String collectionName,
|
||||||
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
|
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
|
||||||
CountDownLatch countDownLatch) {
|
CountDownLatch countDownLatch) {
|
||||||
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
|
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
|
||||||
}
|
}
|
||||||
@@ -1,9 +1,8 @@
|
|||||||
package cn.ac.iie.service.vertex;
|
package cn.ac.iie.service.update.vertex;
|
||||||
|
|
||||||
import cn.ac.iie.service.update.Vertex;
|
import cn.ac.iie.service.update.Vertex;
|
||||||
import cn.ac.iie.utils.ArangoDBConnect;
|
import cn.ac.iie.utils.ArangoDBConnect;
|
||||||
import com.arangodb.entity.BaseDocument;
|
import com.arangodb.entity.BaseDocument;
|
||||||
import com.arangodb.entity.BaseEdgeDocument;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
@@ -16,11 +15,17 @@ public class Ip extends Vertex {
|
|||||||
public Ip(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
|
public Ip(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
|
||||||
ArangoDBConnect arangoManger,
|
ArangoDBConnect arangoManger,
|
||||||
String collectionName,
|
String collectionName,
|
||||||
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
|
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
|
||||||
CountDownLatch countDownLatch) {
|
CountDownLatch countDownLatch) {
|
||||||
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
|
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void updateFunction(BaseDocument newDocument, BaseDocument historyDocument) {
|
||||||
|
super.updateFunction(newDocument, historyDocument);
|
||||||
|
updateIpByType(newDocument, historyDocument);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void mergeFunction(Map<String, Object> properties, BaseDocument doc) {
|
protected void mergeFunction(Map<String, Object> properties, BaseDocument doc) {
|
||||||
super.mergeFunction(properties, doc);
|
super.mergeFunction(properties, doc);
|
||||||
@@ -40,8 +45,8 @@ public class Ip extends Vertex {
|
|||||||
if (!properties.containsKey(property)){
|
if (!properties.containsKey(property)){
|
||||||
properties.put(property,0L);
|
properties.put(property,0L);
|
||||||
checkIpTypeProperty(properties,mergeProperties,property);
|
checkIpTypeProperty(properties,mergeProperties,property);
|
||||||
}else if (properties.get(property).toString().equals("0") && mergeProperties.containsKey(property)){
|
}else if ("0".equals(properties.get(property).toString()) && mergeProperties.containsKey(property)){
|
||||||
if (!mergeProperties.get(property).toString().equals("0")){
|
if (!"0".equals(mergeProperties.get(property).toString())){
|
||||||
properties.put(property,Long.parseLong(mergeProperties.get(property).toString()));
|
properties.put(property,Long.parseLong(mergeProperties.get(property).toString()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -50,12 +55,6 @@ public class Ip extends Vertex {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void updateFunction(BaseDocument newDocument, BaseDocument historyDocument) {
|
|
||||||
super.updateFunction(newDocument, historyDocument);
|
|
||||||
updateIpByType(newDocument, historyDocument);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateIpByType(BaseDocument newDocument, BaseDocument historyDocument){
|
private void updateIpByType(BaseDocument newDocument, BaseDocument historyDocument){
|
||||||
addProperty(newDocument,historyDocument,"CLIENT_SESSION_COUNT");
|
addProperty(newDocument,historyDocument,"CLIENT_SESSION_COUNT");
|
||||||
addProperty(newDocument,historyDocument,"CLIENT_BYTES_SUM");
|
addProperty(newDocument,historyDocument,"CLIENT_BYTES_SUM");
|
||||||
@@ -1,9 +1,8 @@
|
|||||||
package cn.ac.iie.service.vertex;
|
package cn.ac.iie.service.update.vertex;
|
||||||
|
|
||||||
import cn.ac.iie.service.update.Vertex;
|
import cn.ac.iie.service.update.Vertex;
|
||||||
import cn.ac.iie.utils.ArangoDBConnect;
|
import cn.ac.iie.utils.ArangoDBConnect;
|
||||||
import com.arangodb.entity.BaseDocument;
|
import com.arangodb.entity.BaseDocument;
|
||||||
import com.arangodb.entity.BaseEdgeDocument;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
@@ -15,7 +14,7 @@ public class Subscriber extends Vertex {
|
|||||||
public Subscriber(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
|
public Subscriber(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
|
||||||
ArangoDBConnect arangoManger,
|
ArangoDBConnect arangoManger,
|
||||||
String collectionName,
|
String collectionName,
|
||||||
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
|
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
|
||||||
CountDownLatch countDownLatch) {
|
CountDownLatch countDownLatch) {
|
||||||
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch);
|
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch);
|
||||||
}
|
}
|
||||||
@@ -108,7 +108,6 @@ public class ArangoDBConnect {
|
|||||||
}
|
}
|
||||||
}catch (Exception e){
|
}catch (Exception e){
|
||||||
LOG.error("更新失败:"+e.toString());
|
LOG.error("更新失败:"+e.toString());
|
||||||
// clean();
|
|
||||||
}finally {
|
}finally {
|
||||||
docOverwrite.clear();
|
docOverwrite.clear();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
#arangoDB参数配置
|
#arangoDB参数配置
|
||||||
arangoDB.host=192.168.40.127
|
arangoDB.host=192.168.40.182
|
||||||
arangoDB.port=8529
|
arangoDB.port=8529
|
||||||
arangoDB.user=root
|
arangoDB.user=root
|
||||||
arangoDB.password=111111
|
arangoDB.password=111111
|
||||||
@@ -13,6 +13,5 @@ update.arango.batch=10000
|
|||||||
thread.pool.number=10
|
thread.pool.number=10
|
||||||
thread.await.termination.time=10
|
thread.await.termination.time=10
|
||||||
|
|
||||||
read.clickhouse.max.time=1594809098
|
read.clickhouse.max.time=1594981808
|
||||||
#read.clickhouse.min.time=1594622638
|
read.clickhouse.min.time=1593878400
|
||||||
read.clickhouse.min.time=1593792000
|
|
||||||
Reference in New Issue
Block a user