更新client ip属性

This commit is contained in:
wanglihui
2020-07-13 20:00:59 +08:00
parent 705ed03926
commit c9010b2605
9 changed files with 142 additions and 77 deletions

View File

@@ -12,6 +12,9 @@ import org.slf4j.LoggerFactory;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
/**
* 获取arangoDB历史数据
*/
public class BaseArangoData { public class BaseArangoData {
private static final Logger LOG = LoggerFactory.getLogger(BaseArangoData.class); private static final Logger LOG = LoggerFactory.getLogger(BaseArangoData.class);

View File

@@ -34,7 +34,7 @@ public class BaseClickhouseData {
private Statement statement; private Statement statement;
void BaseVFqdn() { void BaseVFqdn() {
initializeVertexMap(vFqdnMap); initializeMap(vFqdnMap);
LOG.info("FQDN resultMap初始化完成"); LOG.info("FQDN resultMap初始化完成");
String sql = getVFqdnSql(); String sql = getVFqdnSql();
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
@@ -63,7 +63,7 @@ public class BaseClickhouseData {
} }
void BaseVIp() { void BaseVIp() {
initializeVertexMap(vIpMap); initializeMap(vIpMap);
LOG.info("IP resultMap初始化完成"); LOG.info("IP resultMap初始化完成");
String sql = getVIpSql(); String sql = getVIpSql();
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
@@ -90,7 +90,7 @@ public class BaseClickhouseData {
} }
void BaseVertexSubscriber(){ void BaseVertexSubscriber(){
initializeVertexMap(vSubscriberMap); initializeMap(vSubscriberMap);
LOG.info("SUBSCRIBER resultMap初始化完成"); LOG.info("SUBSCRIBER resultMap初始化完成");
String sql = getVertexSubscriberSql(); String sql = getVertexSubscriberSql();
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
@@ -118,7 +118,7 @@ public class BaseClickhouseData {
} }
void BaseRelationshipSubscriberLocateIp(){ void BaseRelationshipSubscriberLocateIp(){
initializeVertexMap(eSubsciberLocateIpMap); initializeMap(eSubsciberLocateIpMap);
LOG.info("R_LOCATE_SUBSCRIBER2IP"); LOG.info("R_LOCATE_SUBSCRIBER2IP");
String sql = getRelationshipSubsciberLocateIpSql(); String sql = getRelationshipSubsciberLocateIpSql();
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
@@ -142,7 +142,7 @@ public class BaseClickhouseData {
} }
void BaseEFqdnAddressIp() { void BaseEFqdnAddressIp() {
initializeVertexMap(eFqdnAddressIpMap); initializeMap(eFqdnAddressIpMap);
LOG.info("R_LOCATE_FQDN2IP resultMap初始化完成"); LOG.info("R_LOCATE_FQDN2IP resultMap初始化完成");
String sql = getEFqdnAddressIpSql(); String sql = getEFqdnAddressIpSql();
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
@@ -166,7 +166,7 @@ public class BaseClickhouseData {
} }
void BaseEIpVisitFqdn() { void BaseEIpVisitFqdn() {
initializeVertexMap(eIpVisitFqdnMap); initializeMap(eIpVisitFqdnMap);
LOG.info("R_VISIT_IP2FQDN resultMap初始化完成"); LOG.info("R_VISIT_IP2FQDN resultMap初始化完成");
String sql = getEIpVisitFqdnSql(); String sql = getEIpVisitFqdnSql();
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
@@ -188,7 +188,7 @@ public class BaseClickhouseData {
} }
} }
private void initializeVertexMap(Map map){ private void initializeMap(Map map){
try { try {
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) { for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
map.put(i, new HashMap<>()); map.put(i, new HashMap<>());

View File

@@ -13,6 +13,8 @@ import java.util.regex.Pattern;
public class ReadClickhouseData { public class ReadClickhouseData {
public static long currentHour = System.currentTimeMillis() / (60 * 60 * 1000) * 60 * 60;
private static Pattern pattern = Pattern.compile("^[\\d]*$"); private static Pattern pattern = Pattern.compile("^[\\d]*$");
private static final Logger LOG = LoggerFactory.getLogger(ReadClickhouseData.class); private static final Logger LOG = LoggerFactory.getLogger(ReadClickhouseData.class);
@@ -100,6 +102,10 @@ public class ReadClickhouseData {
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME"); long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
long countTotal = resultSet.getLong("COUNT_TOTAL"); long countTotal = resultSet.getLong("COUNT_TOTAL");
String[] distCipRecents = (String[]) resultSet.getArray("DIST_CIP_RECENT").getArray(); String[] distCipRecents = (String[]) resultSet.getArray("DIST_CIP_RECENT").getArray();
long[] clientIpTs = new long[distCipRecents.length];
for (int i = 0;i < clientIpTs.length;i++){
clientIpTs[i] = currentHour;
}
String key = vFqdn + "-" + vIp; String key = vFqdn + "-" + vIp;
newDoc = new BaseEdgeDocument(); newDoc = new BaseEdgeDocument();
@@ -109,8 +115,8 @@ public class ReadClickhouseData {
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime); newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime); newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
newDoc.addAttribute("COUNT_TOTAL", countTotal); newDoc.addAttribute("COUNT_TOTAL", countTotal);
newDoc.addAttribute("DIST_CIP_RECENT", distCipRecents); newDoc.addAttribute("DIST_CIP", distCipRecents);
newDoc.addAttribute("DIST_CIP_TOTAL", distCipRecents); newDoc.addAttribute("DIST_CIP_TS",clientIpTs);
} }
return newDoc; return newDoc;
@@ -176,7 +182,7 @@ public class ReadClickhouseData {
long[] timeLimit = getTimeLimit(); long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0]; long maxTime = timeLimit[0];
long minTime = timeLimit[1]; long minTime = timeLimit[1];
String where = "common_recv_time >= " + minTime + " AND common_recv_time <= " + maxTime; String where = "common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String sslSql = "SELECT ssl_sni AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'SSL' GROUP BY ssl_sni"; String sslSql = "SELECT ssl_sni AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'SSL' GROUP BY ssl_sni";
String httpSql = "SELECT http_host AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host"; String httpSql = "SELECT http_host AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host";
return "SELECT FQDN,MAX( LAST_FOUND_TIME ) AS LAST_FOUND_TIME,MIN( FIRST_FOUND_TIME ) AS FIRST_FOUND_TIME FROM ((" + sslSql + ") UNION ALL (" + httpSql + ")) GROUP BY FQDN HAVING FQDN != ''"; return "SELECT FQDN,MAX( LAST_FOUND_TIME ) AS LAST_FOUND_TIME,MIN( FIRST_FOUND_TIME ) AS FIRST_FOUND_TIME FROM ((" + sslSql + ") UNION ALL (" + httpSql + ")) GROUP BY FQDN HAVING FQDN != ''";
@@ -186,7 +192,7 @@ public class ReadClickhouseData {
long[] timeLimit = getTimeLimit(); long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0]; long maxTime = timeLimit[0];
long minTime = timeLimit[1]; long minTime = timeLimit[1];
String where = " common_recv_time >= " + minTime + " AND common_recv_time <= " + maxTime; String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String clientIpSql = "SELECT common_client_ip AS IP, MIN(common_recv_time) AS FIRST_FOUND_TIME,MAX(common_recv_time) AS LAST_FOUND_TIME,count(*) as SESSION_COUNT,sum(common_c2s_byte_num) as BYTES_SUM,'client' as ip_type FROM tsg_galaxy_v3.connection_record_log where " + where + " group by IP"; String clientIpSql = "SELECT common_client_ip AS IP, MIN(common_recv_time) AS FIRST_FOUND_TIME,MAX(common_recv_time) AS LAST_FOUND_TIME,count(*) as SESSION_COUNT,sum(common_c2s_byte_num) as BYTES_SUM,'client' as ip_type FROM tsg_galaxy_v3.connection_record_log where " + where + " group by IP";
String serverIpSql = "SELECT common_server_ip AS IP, MIN(common_recv_time) AS FIRST_FOUND_TIME,MAX(common_recv_time) AS LAST_FOUND_TIME,count(*) as SESSION_COUNT,sum(common_s2c_byte_num) as BYTES_SUM,'server' as ip_type FROM tsg_galaxy_v3.connection_record_log where " + where + " group by IP"; String serverIpSql = "SELECT common_server_ip AS IP, MIN(common_recv_time) AS FIRST_FOUND_TIME,MAX(common_recv_time) AS LAST_FOUND_TIME,count(*) as SESSION_COUNT,sum(common_s2c_byte_num) as BYTES_SUM,'server' as ip_type FROM tsg_galaxy_v3.connection_record_log where " + where + " group by IP";
return "SELECT * FROM((" + clientIpSql + ") UNION ALL (" + serverIpSql + "))"; return "SELECT * FROM((" + clientIpSql + ") UNION ALL (" + serverIpSql + "))";
@@ -196,9 +202,9 @@ public class ReadClickhouseData {
long[] timeLimit = getTimeLimit(); long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0]; long maxTime = timeLimit[0];
long minTime = timeLimit[1]; long minTime = timeLimit[1];
String where = " common_recv_time >= " + minTime + " AND common_recv_time <= " + maxTime; String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String sslSql = "SELECT ssl_sni AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(30)(common_client_ip) AS DIST_CIP_RECENT,'SSL' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'SSL' GROUP BY ssl_sni,common_server_ip"; String sslSql = "SELECT ssl_sni AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(100)(common_client_ip) AS DIST_CIP_RECENT,'SSL' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'SSL' GROUP BY ssl_sni,common_server_ip";
String httpSql = "SELECT http_host AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(30)(common_client_ip) AS DIST_CIP_RECENT,'HTTP' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_server_ip"; String httpSql = "SELECT http_host AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(100)(common_client_ip) AS DIST_CIP_RECENT,'HTTP' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_server_ip";
return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''"; return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''";
} }
@@ -206,7 +212,7 @@ public class ReadClickhouseData {
long[] timeLimit = getTimeLimit(); long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0]; long maxTime = timeLimit[0];
long minTime = timeLimit[1]; long minTime = timeLimit[1];
String where = " common_recv_time >= " + minTime + " AND common_recv_time <= " + maxTime; String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String httpSql = "SELECT http_host AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'HTTP' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_client_ip"; String httpSql = "SELECT http_host AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'HTTP' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_client_ip";
String sslSql = "SELECT ssl_sni AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'SSL' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE common_schema_type = 'SSL' GROUP BY ssl_sni,common_client_ip"; String sslSql = "SELECT ssl_sni AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'SSL' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE common_schema_type = 'SSL' GROUP BY ssl_sni,common_client_ip";
return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''"; return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''";
@@ -216,7 +222,7 @@ public class ReadClickhouseData {
long[] timeLimit = getTimeLimit(); long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0]; long maxTime = timeLimit[0];
long minTime = timeLimit[1]; long minTime = timeLimit[1];
String where = " common_recv_time >= " + minTime + " AND common_recv_time <= " + maxTime + " AND common_subscriber_id != '' AND radius_packet_type = 4 AND radius_acct_status_type = 1"; String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime + " AND common_subscriber_id != '' AND radius_packet_type = 4 AND radius_acct_status_type = 1";
return "SELECT common_subscriber_id,MAX(common_recv_time) as LAST_FOUND_TIME,MIN(common_recv_time) as FIRST_FOUND_TIME FROM radius_record_log WHERE" + where + " GROUP BY common_subscriber_id"; return "SELECT common_subscriber_id,MAX(common_recv_time) as LAST_FOUND_TIME,MIN(common_recv_time) as FIRST_FOUND_TIME FROM radius_record_log WHERE" + where + " GROUP BY common_subscriber_id";
} }
@@ -224,12 +230,12 @@ public class ReadClickhouseData {
long[] timeLimit = getTimeLimit(); long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0]; long maxTime = timeLimit[0];
long minTime = timeLimit[1]; long minTime = timeLimit[1];
String where = " common_recv_time >= " + minTime + " AND common_recv_time <= " + maxTime + " AND common_subscriber_id != '' AND radius_framed_ip != '' AND radius_packet_type = 4 AND radius_acct_status_type = 1"; String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime + " AND common_subscriber_id != '' AND radius_framed_ip != '' AND radius_packet_type = 4 AND radius_acct_status_type = 1";
return "SELECT common_subscriber_id,radius_framed_ip,MAX(common_recv_time) as LAST_FOUND_TIME,MIN(common_recv_time) as FIRST_FOUND_TIME,COUNT(*) as COUNT_TOTAL FROM radius_record_log WHERE" + where + " GROUP BY common_subscriber_id,radius_framed_ip"; return "SELECT common_subscriber_id,radius_framed_ip,MAX(common_recv_time) as LAST_FOUND_TIME,MIN(common_recv_time) as FIRST_FOUND_TIME,COUNT(*) as COUNT_TOTAL FROM radius_record_log WHERE" + where + " GROUP BY common_subscriber_id,radius_framed_ip";
} }
private static long[] getTimeLimit() { private static long[] getTimeLimit() {
long maxTime = System.currentTimeMillis() / 1000; long maxTime = currentHour;
long minTime = maxTime - 3600; long minTime = maxTime - 3600;
// long maxTime = ApplicationConfig.READ_CLICKHOUSE_MAX_TIME; // long maxTime = ApplicationConfig.READ_CLICKHOUSE_MAX_TIME;
// long minTime = ApplicationConfig.READ_CLICKHOUSE_MIN_TIME; // long minTime = ApplicationConfig.READ_CLICKHOUSE_MIN_TIME;

View File

@@ -1,11 +1,11 @@
package cn.ac.iie.service.relationship; package cn.ac.iie.service.relationship;
import cn.ac.iie.service.read.ReadClickhouseData;
import cn.ac.iie.service.update.Relationship; import cn.ac.iie.service.update.Relationship;
import cn.ac.iie.utils.ArangoDBConnect; import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseEdgeDocument; import com.arangodb.entity.BaseEdgeDocument;
import java.util.HashMap; import java.util.*;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
@@ -22,12 +22,79 @@ public class LocateFqdn2Ip extends Relationship {
@Override @Override
protected void mergeFunction(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc){ protected void mergeFunction(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc){
super.mergeFunction(properties,schemaEdgeDoc); super.mergeFunction(properties,schemaEdgeDoc);
super.mergeDistinctClientIp(properties,schemaEdgeDoc); mergeDistinctClientIp(properties,schemaEdgeDoc);
} }
@Override @Override
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument) { protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument) {
super.updateFunction(newEdgeDocument, historyEdgeDocument); super.updateFunction(newEdgeDocument, historyEdgeDocument);
super.updateDistinctClientIp(newEdgeDocument, historyEdgeDocument); updateDistinctClientIp(newEdgeDocument, historyEdgeDocument);
} }
private void mergeDistinctClientIp(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc){
String[] schemaDistCipRecents = (String[]) schemaEdgeDoc.getAttribute("DIST_CIP");
String[] distCipRecents = (String[]) properties.get("DIST_CIP");
Object[] mergeClientIp = distinctIp(schemaDistCipRecents, distCipRecents);
long[] mergeClientIpTs = new long[mergeClientIp.length];
for (int i = 0;i < mergeClientIpTs.length;i++){
mergeClientIpTs[i] = ReadClickhouseData.currentHour;
}
properties.put("DIST_CIP", mergeClientIp);
properties.put("DIST_CIP_TS",mergeClientIpTs);
}
private void updateDistinctClientIp(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument edgeDocument){
ArrayList<String> distCip = (ArrayList<String>) edgeDocument.getAttribute("DIST_CIP");
ArrayList<Long> distCipTs = (ArrayList<Long>) edgeDocument.getAttribute("DIST_CIP_TS");
HashMap<String, Long> distCipToTs = new HashMap<>();
if (distCip.size() == distCipTs.size()){
for (int i = 0;i < distCip.size();i++){
distCipToTs.put(distCip.get(i),distCipTs.get(i));
}
}
Object[] distCipRecent = (Object[])newEdgeDocument.getAttribute("DIST_CIP");
for (Object cip:distCipRecent){
distCipToTs.put(cip.toString(),ReadClickhouseData.currentHour);
}
Map<String, Long> sortDistCip = sortMapByValue(distCipToTs);
edgeDocument.addAttribute("DIST_CIP",sortDistCip.keySet().toArray());
edgeDocument.addAttribute("DIST_CIP_TS",sortDistCip.values().toArray());
}
/**
* 使用 Map按value进行排序
*/
private Map<String, Long> sortMapByValue(Map<String, Long> oriMap) {
if (oriMap == null || oriMap.isEmpty()) {
return null;
}
Map<String, Long> sortedMap = new LinkedHashMap<>();
List<Map.Entry<String, Long>> entryList = new ArrayList<>(oriMap.entrySet());
entryList.sort((o1, o2) -> o2.getValue().compareTo(o1.getValue()));
if(entryList.size() > 100){
for(Map.Entry<String, Long> set:entryList.subList(0, 100)){
sortedMap.put(set.getKey(), set.getValue());
}
}else {
for(Map.Entry<String, Long> set:entryList){
sortedMap.put(set.getKey(), set.getValue());
}
}
return sortedMap;
}
private Object[] distinctIp(Object[] distCipTotalsSrc,Object[] distCipRecentsSrc){
HashSet<Object> dIpSet = new HashSet<>();
dIpSet.addAll(Arrays.asList(distCipRecentsSrc));
dIpSet.addAll(Arrays.asList(distCipTotalsSrc));
Object[] distCipTotals = dIpSet.toArray();
if (distCipTotals.length > 100) {
System.arraycopy(distCipTotals, 0, distCipTotals, 0, 100);
}
return distCipTotals;
}
} }

View File

@@ -13,11 +13,11 @@ import java.util.concurrent.CountDownLatch;
public class Relationship extends Thread { public class Relationship extends Thread {
private static final Logger LOG = LoggerFactory.getLogger(Relationship.class); private static final Logger LOG = LoggerFactory.getLogger(Relationship.class);
protected HashMap<String, HashMap<String, BaseEdgeDocument>> newDocumentHashMap; private HashMap<String, HashMap<String, BaseEdgeDocument>> newDocumentHashMap;
protected ArangoDBConnect arangoManger; private ArangoDBConnect arangoManger;
protected String collectionName; private String collectionName;
protected ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap; private ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap;
protected CountDownLatch countDownLatch; private CountDownLatch countDownLatch;
public Relationship(HashMap<String, HashMap<String, BaseEdgeDocument>> newDocumentHashMap, public Relationship(HashMap<String, HashMap<String, BaseEdgeDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger, ArangoDBConnect arangoManger,
@@ -98,7 +98,7 @@ public class Relationship extends Thread {
updateFoundTime(newEdgeDocument,historyEdgeDocument); updateFoundTime(newEdgeDocument,historyEdgeDocument);
setSchemaCntByHistory(historyEdgeDocument,"TLS_CNT_RECENT","TLS_CNT_TOTAL",newEdgeDocument); setSchemaCntByHistory(historyEdgeDocument,"TLS_CNT_RECENT","TLS_CNT_TOTAL",newEdgeDocument);
setSchemaCntByHistory(historyEdgeDocument,"HTTP_CNT_RECENT","HTTP_CNT_TOTAL",newEdgeDocument); setSchemaCntByHistory(historyEdgeDocument,"HTTP_CNT_RECENT","HTTP_CNT_TOTAL",newEdgeDocument);
// updateDistinctClientIp(newEdgeDocument,historyEdgeDocument); setSchemaCntByHistory(historyEdgeDocument,"DNS_CNT_RECENT","DNS_CNT_TOTAL",newEdgeDocument);
} }
protected void updateFoundTime(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument historyEdgeDocument){ protected void updateFoundTime(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument historyEdgeDocument){
@@ -106,13 +106,13 @@ public class Relationship extends Thread {
historyEdgeDocument.addAttribute("LAST_FOUND_TIME", lastFoundTime); historyEdgeDocument.addAttribute("LAST_FOUND_TIME", lastFoundTime);
} }
protected void setSchemaCntByHistory(BaseEdgeDocument historyEdgeDocument,String schema,String totalSchema,BaseEdgeDocument newEdgeDocument){ private void setSchemaCntByHistory(BaseEdgeDocument historyEdgeDocument,String schema,String totalSchema,BaseEdgeDocument newEdgeDocument){
long countTotal = Long.parseLong(newEdgeDocument.getAttribute(totalSchema).toString()); long countTotal = Long.parseLong(newEdgeDocument.getAttribute(totalSchema).toString());
long updateCountTotal = Long.parseLong(historyEdgeDocument.getAttribute(totalSchema).toString()); long updateCountTotal = Long.parseLong(historyEdgeDocument.getAttribute(totalSchema).toString());
ArrayList<Long> cntRecent = (ArrayList<Long>) historyEdgeDocument.getAttribute(schema); ArrayList<Long> cntRecent = (ArrayList<Long>) historyEdgeDocument.getAttribute(schema);
Long[] cntRecentsSrc = cntRecent.toArray(new Long[cntRecent.size()]); Long[] cntRecentsSrc = cntRecent.toArray(new Long[cntRecent.size()]);
Long[] cntRecentsDst = new Long[7]; Long[] cntRecentsDst = new Long[24];
System.arraycopy(cntRecentsSrc, 0, cntRecentsDst, 1, cntRecentsSrc.length - 1); System.arraycopy(cntRecentsSrc, 0, cntRecentsDst, 1, cntRecentsSrc.length - 1);
cntRecentsDst[0] = countTotal; cntRecentsDst[0] = countTotal;
@@ -122,41 +122,9 @@ public class Relationship extends Thread {
protected void mergeFunction(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc) { protected void mergeFunction(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc) {
mergeFoundTime(properties, schemaEdgeDoc); mergeFoundTime(properties, schemaEdgeDoc);
// mergeDistinctClientIp(properties,schemaEdgeDoc);
} }
protected void mergeDistinctClientIp(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc){ private void mergeFoundTime(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc) {
String[] schemaDistCipRecents = (String[]) schemaEdgeDoc.getAttribute("DIST_CIP_RECENT");
String[] distCipRecents = (String[]) properties.get("DIST_CIP_RECENT");
Object[] mergeClientIp = distinctIp(schemaDistCipRecents, distCipRecents);
properties.put("DIST_CIP_RECENT", mergeClientIp);
properties.put("DIST_CIP_TOTAL",mergeClientIp);
}
protected void updateDistinctClientIp(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument edgeDocument){
ArrayList<String> distCipTotal = (ArrayList<String>) edgeDocument.getAttribute("DIST_CIP_TOTAL");
String[] distCipTotalsSrc = distCipTotal.toArray(new String[distCipTotal.size()]);
Object[] distCipRecentsSrc = (Object[])newEdgeDocument.getAttribute("DIST_CIP_RECENT");
if (distCipTotalsSrc.length == 30) {
Object[] distCipTotals = distinctIp(distCipTotalsSrc, distCipRecentsSrc);
edgeDocument.addAttribute("DIST_CIP_TOTAL", distCipTotals);
}
edgeDocument.addAttribute("DIST_CIP_RECENT", distCipRecentsSrc);
}
protected Object[] distinctIp(Object[] distCipTotalsSrc,Object[] distCipRecentsSrc){
HashSet<Object> dIpSet = new HashSet<>();
dIpSet.addAll(Arrays.asList(distCipRecentsSrc));
dIpSet.addAll(Arrays.asList(distCipTotalsSrc));
Object[] distCipTotals = dIpSet.toArray();
if (distCipTotals.length > 30) {
System.arraycopy(distCipTotals, 0, distCipTotals, 0, 30);
}
return distCipTotals;
}
protected void mergeFoundTime(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc) {
long schemaFirstFoundTime = Long.parseLong(schemaEdgeDoc.getAttribute("FIRST_FOUND_TIME").toString()); long schemaFirstFoundTime = Long.parseLong(schemaEdgeDoc.getAttribute("FIRST_FOUND_TIME").toString());
long firstFoundTime = Long.parseLong(properties.get("FIRST_FOUND_TIME").toString()); long firstFoundTime = Long.parseLong(properties.get("FIRST_FOUND_TIME").toString());
properties.put("FIRST_FOUND_TIME", schemaFirstFoundTime < firstFoundTime ? schemaFirstFoundTime : firstFoundTime); properties.put("FIRST_FOUND_TIME", schemaFirstFoundTime < firstFoundTime ? schemaFirstFoundTime : firstFoundTime);
@@ -165,19 +133,19 @@ public class Relationship extends Thread {
properties.put("LAST_FOUND_TIME", schemaLastFoundTime > lastFoundTime ? schemaLastFoundTime : lastFoundTime); properties.put("LAST_FOUND_TIME", schemaLastFoundTime > lastFoundTime ? schemaLastFoundTime : lastFoundTime);
} }
protected void setSchemaCount(String schema, BaseEdgeDocument schemaEdgeDoc, Map<String, Object> properties) { private void setSchemaCount(String schema, BaseEdgeDocument schemaEdgeDoc, Map<String, Object> properties) {
switch (schema) { switch (schema) {
case "HTTP": case "HTTP":
long httpCntTotal = Long.parseLong(schemaEdgeDoc.getAttribute("COUNT_TOTAL").toString()); long httpCntTotal = Long.parseLong(schemaEdgeDoc.getAttribute("COUNT_TOTAL").toString());
properties.put("HTTP_CNT_TOTAL", httpCntTotal); properties.put("HTTP_CNT_TOTAL", httpCntTotal);
long[] httpCntRecentsDst = new long[7]; long[] httpCntRecentsDst = new long[24];
httpCntRecentsDst[0] = httpCntTotal; httpCntRecentsDst[0] = httpCntTotal;
properties.put("HTTP_CNT_RECENT", httpCntRecentsDst); properties.put("HTTP_CNT_RECENT", httpCntRecentsDst);
break; break;
case "SSL": case "SSL":
long tlsCntTotal = Long.parseLong(schemaEdgeDoc.getAttribute("COUNT_TOTAL").toString()); long tlsCntTotal = Long.parseLong(schemaEdgeDoc.getAttribute("COUNT_TOTAL").toString());
properties.put("TLS_CNT_TOTAL", tlsCntTotal); properties.put("TLS_CNT_TOTAL", tlsCntTotal);
long[] tlsCntRecentsDst = new long[7]; long[] tlsCntRecentsDst = new long[24];
tlsCntRecentsDst[0] = tlsCntTotal; tlsCntRecentsDst[0] = tlsCntTotal;
properties.put("TLS_CNT_RECENT", tlsCntRecentsDst); properties.put("TLS_CNT_RECENT", tlsCntRecentsDst);
break; break;
@@ -189,10 +157,15 @@ public class Relationship extends Thread {
private void checkSchemaProperty(Map<String, Object> properties){ private void checkSchemaProperty(Map<String, Object> properties){
if (!properties.containsKey("TLS_CNT_TOTAL")){ if (!properties.containsKey("TLS_CNT_TOTAL")){
properties.put("TLS_CNT_TOTAL",0L); properties.put("TLS_CNT_TOTAL",0L);
properties.put("TLS_CNT_RECENT",new long[7]); properties.put("TLS_CNT_RECENT",new long[24]);
}else if (!properties.containsKey("HTTP_CNT_TOTAL")){ }
if (!properties.containsKey("HTTP_CNT_TOTAL")){
properties.put("HTTP_CNT_TOTAL",0L); properties.put("HTTP_CNT_TOTAL",0L);
properties.put("HTTP_CNT_RECENT",new long[7]); properties.put("HTTP_CNT_RECENT",new long[24]);
}
if (!properties.containsKey("DNS_CNT_TOTAL")){
properties.put("DNS_CNT_TOTAL",0L);
properties.put("DNS_CNT_RECENT",new long[24]);
} }
} }

View File

@@ -21,11 +21,11 @@ import java.util.concurrent.CountDownLatch;
public class Vertex extends Thread{ public class Vertex extends Thread{
private static final Logger LOG = LoggerFactory.getLogger(Vertex.class); private static final Logger LOG = LoggerFactory.getLogger(Vertex.class);
protected HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap; private HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap;
protected ArangoDBConnect arangoManger; private ArangoDBConnect arangoManger;
protected String collectionName; private String collectionName;
protected ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap; private ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap;
protected CountDownLatch countDownLatch; private CountDownLatch countDownLatch;
public Vertex(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap, public Vertex(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger, ArangoDBConnect arangoManger,

View File

@@ -10,6 +10,7 @@ public class IpLearningApplicationTest {
private static final Logger LOG = LoggerFactory.getLogger(IpLearningApplicationTest.class); private static final Logger LOG = LoggerFactory.getLogger(IpLearningApplicationTest.class);
public static void main(String[] args) { public static void main(String[] args) {
long start = System.currentTimeMillis();
LOG.info("Ip Learning Application开始运行"); LOG.info("Ip Learning Application开始运行");
BaseArangoData baseArangoData = new BaseArangoData(); BaseArangoData baseArangoData = new BaseArangoData();
baseArangoData.baseDocumentDataMap(); baseArangoData.baseDocumentDataMap();
@@ -17,6 +18,8 @@ public class IpLearningApplicationTest {
LOG.info("历史数据读取完成,开始更新数据"); LOG.info("历史数据读取完成,开始更新数据");
UpdateGraphData updateGraphData = new UpdateGraphData(); UpdateGraphData updateGraphData = new UpdateGraphData();
updateGraphData.updateArango(); updateGraphData.updateArango();
long last = System.currentTimeMillis();
LOG.info("共计运行时间:"+(last - start));
} }
} }

View File

@@ -13,5 +13,6 @@ update.arango.batch=10000
thread.pool.number=10 thread.pool.number=10
thread.await.termination.time=10 thread.await.termination.time=10
read.clickhouse.max.time=1594376834 read.clickhouse.max.time=1594627747
read.clickhouse.min.time=1593676953 #read.clickhouse.min.time=1594622638
read.clickhouse.min.time=1593676807

View File

@@ -1,5 +1,6 @@
package cn.ac.iie; package cn.ac.iie;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
public class ArrayTest { public class ArrayTest {
@@ -8,6 +9,17 @@ public class ArrayTest {
long[] longs1 = new long[7]; long[] longs1 = new long[7];
System.arraycopy(longs, 0, longs1, 1, longs.length - 1); System.arraycopy(longs, 0, longs1, 1, longs.length - 1);
longs1[0] = 8; longs1[0] = 8;
System.out.println(Arrays.toString(longs1)); // System.out.println(Arrays.toString(longs1));
ArrayList<Long> longs2 = new ArrayList<>();
longs2.add(3L);
longs2.add(34L);
longs2.add(312L);
longs2.add(12433L);
longs2.add(34L);
longs2.add(4124L);
System.out.println(longs2.subList(0,3).size());
System.out.println(longs2.subList(0,3).get(3));
} }
} }