14 Commits

Author SHA1 Message Date
wanglihui
74b1fe6616 修改读取arangoDb方式为分页读取。 2020-08-20 09:21:47 +08:00
wanglihui
0cb75f3eab 根据server与client IP类型区分链路信息 2020-08-19 13:58:27 +08:00
wanglihui
611335c053 YSP分析spark测试 2020-08-12 15:53:55 +08:00
wanglihui
3b0a0517f5 ignore scala.xml 2020-08-12 14:49:00 +08:00
wanglihui
885be09053 整合YSP项目 2020-08-12 14:42:32 +08:00
wanglihui
55879a2c32 YSP分析spark版本 2020-08-11 15:18:45 +08:00
wanglihui
ad6582893b merge from master 2020-08-11 09:30:50 +08:00
wanglihui
9a5cf0126b Merge branch 'master' into ip-learning-graph-datacenter 2020-08-11 09:28:54 +08:00
wanglihui
10c742e9c2 Merge branch 'master' into ip-learning-graph-datacenter
# Conflicts:
#	IP-learning-graph/src/main/resources/clickhouse.properties
2020-08-06 16:51:47 +08:00
wanglihui
c391fbffba IP Learning tsg项目 spark版本首次提交 2020-08-06 16:42:35 +08:00
wanglihui
a034238679 delete directory ip-learning 2020-08-06 16:36:40 +08:00
wanglihui
95603676bb tsg iplearning 数据中心部署版本 2020-08-06 13:59:28 +08:00
wanglihui
c7af3cf247 tsg iplearning 数据中心部署版本 2020-08-06 13:39:41 +08:00
wanglihui
87010127ae tsg iplearning 数据中心部署版本 2020-08-05 14:24:37 +08:00
62 changed files with 315 additions and 2255 deletions

View File

@@ -13,8 +13,7 @@ public class ApplicationConfig {
public static final Integer ARANGODB_TTL = ConfigUtils.getIntProperty( "arangoDB.ttl");
public static final Integer ARANGODB_BATCH = ConfigUtils.getIntProperty( "arangoDB.batch");
public static final Integer UPDATE_ARANGO_BATCH = ConfigUtils.getIntProperty("update.arango.batch");
public static final String ARANGODB_READ_LIMIT = ConfigUtils.getStringProperty("arangoDB.read.limit");
public static final Integer UPDATE_ARANGO_BATCH =ConfigUtils.getIntProperty("update.arango.batch");
public static final Integer THREAD_POOL_NUMBER = ConfigUtils.getIntProperty( "thread.pool.number");
public static final Integer THREAD_AWAIT_TERMINATION_TIME = ConfigUtils.getIntProperty( "thread.await.termination.time");
@@ -22,11 +21,19 @@ public class ApplicationConfig {
public static final Long READ_CLICKHOUSE_MAX_TIME = ConfigUtils.getLongProperty("read.clickhouse.max.time");
public static final Long READ_CLICKHOUSE_MIN_TIME = ConfigUtils.getLongProperty("read.clickhouse.min.time");
public static final Integer TIME_LIMIT_TYPE = ConfigUtils.getIntProperty("time.limit.type");
public static final Integer CLICKHOUSE_TIME_LIMIT_TYPE = ConfigUtils.getIntProperty("clickhouse.time.limit.type");
public static final Integer UPDATE_INTERVAL = ConfigUtils.getIntProperty("update.interval");
public static final Integer DISTINCT_CLIENT_IP_NUM = ConfigUtils.getIntProperty("distinct.client.ip.num");
public static final Integer RECENT_COUNT_HOUR = ConfigUtils.getIntProperty("recent.count.hour");
public static final String TOP_DOMAIN_FILE_NAME = ConfigUtils.getStringProperty("top.domain.file.name");
public static final String ARANGODB_READ_LIMIT = ConfigUtils.getStringProperty("arangoDB.read.limit");
public static final Integer ARANGO_TIME_LIMIT_TYPE = ConfigUtils.getIntProperty("arango.time.limit.type");
public static final Long READ_ARANGO_MAX_TIME = ConfigUtils.getLongProperty("read.arango.max.time");
public static final Long READ_ARANGO_MIN_TIME = ConfigUtils.getLongProperty("read.arango.min.time");
}

View File

@@ -1,7 +1,7 @@
package cn.ac.iie.dao;
import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.service.ingestion.ReadHistoryArangoData;
import cn.ac.iie.service.read.ReadHistoryArangoData;
import cn.ac.iie.utils.ArangoDBConnect;
import cn.ac.iie.utils.ExecutorThreadPool;
import com.arangodb.ArangoCursor;
@@ -10,84 +10,120 @@ import com.arangodb.entity.BaseEdgeDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
/**
* 获取arangoDB历史数据
*
* @author wlh
*/
public class BaseArangoData {
private static final Logger LOG = LoggerFactory.getLogger(BaseArangoData.class);
static ConcurrentHashMap<Integer,ConcurrentHashMap<String, BaseDocument>> historyVertexFqdnMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer,ConcurrentHashMap<String, BaseDocument>> historyVertexIpMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer,ConcurrentHashMap<String, BaseDocument>> historyVertexSubscriberMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer,ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationFqdnAddressIpMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer,ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationIpVisitFqdnMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer,ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationFqdnSameFqdnMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer,ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationSubsciberLocateIpMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseDocument>> historyVertexFqdnMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseDocument>> historyVertexIpMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseDocument>> historyVertexSubscriberMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationFqdnAddressIpMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationIpVisitFqdnMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationFqdnSameFqdnMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationSubsciberLocateIpMap = new ConcurrentHashMap<>();
private static ArangoDBConnect arangoDBConnect = ArangoDBConnect.getInstance();
private ExecutorThreadPool threadPool = ExecutorThreadPool.getInstance();
<T extends BaseDocument> void readHistoryData(String table,
ConcurrentHashMap<Integer,ConcurrentHashMap<String, T>> map,
ConcurrentHashMap<Integer, ConcurrentHashMap<String, T>> historyMap,
Class<T> type) {
try {
LOG.info("开始更新"+table);
LOG.info("开始更新" + table);
long start = System.currentTimeMillis();
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++){
map.put(i,new ConcurrentHashMap<>());
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
historyMap.put(i, new ConcurrentHashMap<>());
}
CountDownLatch countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
long[] timeRange = getTimeRange(table);
// long[] timeRange = getTimeRange(table);
Long countTotal = getCountTotal(table);
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
String sql = getQuerySql(timeRange, i, table);
ReadHistoryArangoData<T> readHistoryArangoData =
new ReadHistoryArangoData<>(arangoDBConnect, sql, map,type,table,countDownLatch);
// String sql = getQuerySql(timeRange, i, table);
String sql = getQuerySql(countTotal, i, table);
ReadHistoryArangoData<T> readHistoryArangoData = new ReadHistoryArangoData<>(arangoDBConnect, sql, historyMap, type, table, countDownLatch);
threadPool.executor(readHistoryArangoData);
}
countDownLatch.await();
long last = System.currentTimeMillis();
LOG.info("读取"+table+" arangoDB 共耗时:"+(last-start));
}catch (Exception e){
LOG.info("读取" + table + " arangoDB 共耗时:" + (last - start));
} catch (Exception e) {
e.printStackTrace();
}
}
private long[] getTimeRange(String table){
private long[] getTimeRange(String table) {
long minTime = 0L;
long maxTime = 0L;
long startTime = System.currentTimeMillis();
String sql = "LET doc = (FOR doc IN "+table+" RETURN doc) return {max_time:MAX(doc[*].FIRST_FOUND_TIME),min_time:MIN(doc[*].FIRST_FOUND_TIME)}";
ArangoCursor<BaseDocument> timeDoc = arangoDBConnect.executorQuery(sql, BaseDocument.class);
try {
if (timeDoc != null){
while (timeDoc.hasNext()) {
BaseDocument doc = timeDoc.next();
maxTime = Long.parseLong(doc.getAttribute("max_time").toString()) + ApplicationConfig.THREAD_POOL_NUMBER;
minTime = Long.parseLong(doc.getAttribute("min_time").toString());
String sql = "LET doc = (FOR doc IN " + table + " RETURN doc) return {max_time:MAX(doc[*].FIRST_FOUND_TIME),min_time:MIN(doc[*].FIRST_FOUND_TIME)}";
switch (ApplicationConfig.ARANGO_TIME_LIMIT_TYPE) {
case 0:
ArangoCursor<BaseDocument> timeDoc = arangoDBConnect.executorQuery(sql, BaseDocument.class);
try {
if (timeDoc != null) {
while (timeDoc.hasNext()) {
BaseDocument doc = timeDoc.next();
maxTime = Long.parseLong(doc.getAttribute("max_time").toString()) + ApplicationConfig.THREAD_POOL_NUMBER;
minTime = Long.parseLong(doc.getAttribute("min_time").toString());
}
} else {
LOG.warn("获取ArangoDb时间范围为空");
}
} catch (Exception e) {
e.printStackTrace();
}
long lastTime = System.currentTimeMillis();
LOG.info(sql+"\n查询最大最小时间用时" + (lastTime - startTime));
}else {
LOG.warn("获取ArangoDb时间范围为空");
}
}catch (Exception e){
e.printStackTrace();
break;
case 1:
maxTime = ApplicationConfig.READ_ARANGO_MAX_TIME;
minTime = ApplicationConfig.READ_ARANGO_MIN_TIME;
break;
default:
}
long lastTime = System.currentTimeMillis();
LOG.info(sql + "\n查询最大最小时间用时" + (lastTime - startTime));
return new long[]{minTime, maxTime};
}
private String getQuerySql(long[] timeRange,int threadNumber,String table){
private Long getCountTotal(String table){
long start = System.currentTimeMillis();
Long cnt = 0L;
String sql = "RETURN LENGTH("+table+")";
try {
ArangoCursor<Long> longs = arangoDBConnect.executorQuery(sql, Long.class);
while (longs.hasNext()){
cnt = longs.next();
}
}catch (Exception e){
LOG.error(sql +"执行异常");
}
long last = System.currentTimeMillis();
LOG.info(sql+" 结果:"+cnt+" 执行时间:"+(last-start));
return cnt;
}
private String getQuerySql(long[] timeRange, int threadNumber, String table) {
long minTime = timeRange[0];
long maxTime = timeRange[1];
long diffTime = (maxTime - minTime) / ApplicationConfig.THREAD_POOL_NUMBER;
long maxThreadTime = minTime + (threadNumber + 1)* diffTime;
long maxThreadTime = minTime + (threadNumber + 1) * diffTime;
long minThreadTime = minTime + threadNumber * diffTime;
return "FOR doc IN "+table+" filter doc.FIRST_FOUND_TIME >= "+minThreadTime+" and doc.FIRST_FOUND_TIME <= "+maxThreadTime+" " + ApplicationConfig.ARANGODB_READ_LIMIT + " RETURN doc";
return "FOR doc IN " + table + " filter doc.FIRST_FOUND_TIME >= " + minThreadTime + " and doc.FIRST_FOUND_TIME <= " + maxThreadTime + " " + ApplicationConfig.ARANGODB_READ_LIMIT + " RETURN doc";
}
private String getQuerySql(Long cnt,int threadNumber, String table){
long sepNum = cnt / ApplicationConfig.THREAD_POOL_NUMBER + 1;
long offsetNum = threadNumber * sepNum;
return "FOR doc IN " + table + " limit "+offsetNum+","+sepNum+" RETURN doc";
}
}

View File

@@ -15,7 +15,7 @@ import java.util.HashMap;
import java.util.function.Function;
import java.util.function.Supplier;
import static cn.ac.iie.service.ingestion.ReadClickhouseData.*;
import static cn.ac.iie.service.read.ReadClickhouseData.putMapByHashcode;
/**
* 读取clickhouse数据封装到map
@@ -24,23 +24,25 @@ import static cn.ac.iie.service.ingestion.ReadClickhouseData.*;
public class BaseClickhouseData {
private static final Logger LOG = LoggerFactory.getLogger(BaseClickhouseData.class);
private static ClickhouseConnect manger = ClickhouseConnect.getInstance();
static HashMap<Integer, HashMap<String, ArrayList<BaseDocument>>> newVertexFqdnMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseDocument>>> newVertexIpMap = new HashMap<>();
static HashMap<Integer, HashMap<String,ArrayList<BaseDocument>>> newVertexSubscriberMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> newRelationFqdnAddressIpMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> newRelationIpVisitFqdnMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> newRelationSubsciberLocateIpMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> newRelationFqdnSameFqdnMap = new HashMap<>();
private static ClickhouseConnect manger = ClickhouseConnect.getInstance();
private DruidPooledConnection connection;
private Statement statement;
<T extends BaseDocument> void baseDocumentFromClickhouse(HashMap<Integer, HashMap<String, ArrayList<T>>> newMap,
Supplier<String> getSqlSupplier,
Function<ResultSet,T> formatResultFunc) {
Function<ResultSet,T> formatResultFunc){
long start = System.currentTimeMillis();
initializeMap(newMap);
String sql = getSqlSupplier.get();
LOG.info(sql);
try {
connection = manger.getConnection();
statement = connection.createStatement();
@@ -54,7 +56,7 @@ public class BaseClickhouseData {
}
}
long last = System.currentTimeMillis();
LOG.info(sql + "\n读取"+i+"条数据,运行时间:" + (last - start));
LOG.info("读取"+i+"条数据,运行时间:" + (last - start));
}catch (Exception e){
e.printStackTrace();
}finally {
@@ -65,7 +67,7 @@ public class BaseClickhouseData {
private <T extends BaseDocument> void initializeMap(HashMap<Integer, HashMap<String,ArrayList<T>>> map){
try {
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
map.put(i, new HashMap<>());
map.put(i, new HashMap<>(16));
}
}catch (Exception e){
e.printStackTrace();

View File

@@ -1,14 +1,13 @@
package cn.ac.iie.dao;
import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.service.ingestion.ReadClickhouseData;
import cn.ac.iie.service.read.ReadClickhouseData;
import cn.ac.iie.service.update.Document;
import cn.ac.iie.service.update.relationship.LocateFqdn2Ip;
import cn.ac.iie.service.update.relationship.LocateSubscriber2Ip;
import cn.ac.iie.service.update.relationship.SameFqdn2Fqdn;
import cn.ac.iie.service.update.relationship.VisitIp2Fqdn;
import cn.ac.iie.service.update.vertex.Fqdn;
import cn.ac.iie.service.update.vertex.Ip;
import cn.ac.iie.service.update.vertex.Subscriber;
import cn.ac.iie.utils.ArangoDBConnect;
import cn.ac.iie.utils.ExecutorThreadPool;
import com.arangodb.entity.BaseDocument;
@@ -36,41 +35,36 @@ public class UpdateGraphData {
private static final Logger LOG = LoggerFactory.getLogger(UpdateGraphData.class);
private static ExecutorThreadPool pool = ExecutorThreadPool.getInstance();
private static ArangoDBConnect arangoManger = ArangoDBConnect.getInstance();
private static BaseArangoData baseArangoData = new BaseArangoData();
private static BaseClickhouseData baseClickhouseData = new BaseClickhouseData();
public void updateArango(){
long start = System.currentTimeMillis();
try {
updateDocument(newVertexFqdnMap, historyVertexFqdnMap, "FQDN",
Fqdn.class,BaseDocument.class,
ReadClickhouseData::getVertexFqdnSql,ReadClickhouseData::getVertexFqdnDocument);
ReadClickhouseData::getVertexFqdnSql, ReadClickhouseData::getVertexFqdnDocument);
updateDocument(newVertexIpMap,historyVertexIpMap,"IP",
Ip.class,BaseDocument.class,
ReadClickhouseData::getVertexIpSql,ReadClickhouseData::getVertexIpDocument);
updateDocument(newVertexSubscriberMap,historyVertexSubscriberMap,"SUBSCRIBER",
Subscriber.class,BaseDocument.class,
ReadClickhouseData::getVertexSubscriberSql,ReadClickhouseData::getVertexSubscriberDocument);
ReadClickhouseData::getVertexIpSql, ReadClickhouseData::getVertexIpDocument);
updateDocument(newRelationFqdnAddressIpMap,historyRelationFqdnAddressIpMap,"R_LOCATE_FQDN2IP",
LocateFqdn2Ip.class,BaseEdgeDocument.class,
ReadClickhouseData::getRelationshipFqdnAddressIpSql,ReadClickhouseData::getRelationFqdnAddressIpDocument);
ReadClickhouseData::getRelationshipFqdnAddressIpSql, ReadClickhouseData::getRelationFqdnAddressIpDocument);
// updateDocument(newRelationIpVisitFqdnMap,historyRelationIpVisitFqdnMap,"R_VISIT_IP2FQDN",
// VisitIp2Fqdn.class,BaseEdgeDocument.class,
// ReadClickhouseData::getRelationshipIpVisitFqdnSql,ReadClickhouseData::getRelationIpVisitFqdnDocument);
updateDocument(newRelationIpVisitFqdnMap,historyRelationIpVisitFqdnMap,"R_VISIT_IP2FQDN",
VisitIp2Fqdn.class,BaseEdgeDocument.class,
ReadClickhouseData::getRelationshipIpVisitFqdnSql, ReadClickhouseData::getRelationIpVisitFqdnDocument);
updateDocument(newRelationSubsciberLocateIpMap,historyRelationSubsciberLocateIpMap,"R_LOCATE_SUBSCRIBER2IP",
LocateSubscriber2Ip.class,BaseEdgeDocument.class,
ReadClickhouseData::getRelationshipSubsciberLocateIpSql,ReadClickhouseData::getRelationshipSubsciberLocateIpDocument);
updateDocument(newRelationFqdnSameFqdnMap,historyRelationFqdnSameFqdnMap,"R_SAME_ORIGIN_FQDN2FQDN",
SameFqdn2Fqdn.class,BaseEdgeDocument.class,
ReadClickhouseData::getRelationshipFqdnSameFqdnSql, ReadClickhouseData::getRelationshipFqdnSameFqdnDocument);
long last = System.currentTimeMillis();
LOG.info("iplearning application运行完毕用时"+(last - start));
LOG.info("更新图数据库时间共计"+(last - start));
}catch (Exception e){
e.printStackTrace();
}finally {
@@ -79,15 +73,13 @@ public class UpdateGraphData {
}
}
private <T extends BaseDocument> void updateDocument(HashMap<Integer, HashMap<String, ArrayList<T>>> newMap,
ConcurrentHashMap<Integer, ConcurrentHashMap<String, T>> historyMap,
String collection,
Class<? extends Document<T>> taskType,
Class<T> docmentType,
Supplier<String> getSqlSupplier,
Function<ResultSet,T> formatResultFunc
) {
Function<ResultSet,T> formatResultFunc) {
try {
baseArangoData.readHistoryData(collection,historyMap,docmentType);

View File

@@ -1,313 +0,0 @@
package cn.ac.iie.service.ingestion;
import cn.ac.iie.config.ApplicationConfig;
import com.arangodb.entity.BaseDocument;
import com.arangodb.entity.BaseEdgeDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.regex.Pattern;
/**
* @author wlh
*/
public class ReadClickhouseData {
public static long currentHour = System.currentTimeMillis() / (60 * 60 * 1000) * 60 * 60;
private static Pattern pattern = Pattern.compile("^[\\d]*$");
private static final Logger LOG = LoggerFactory.getLogger(ReadClickhouseData.class);
private static long[] timeLimit = getTimeLimit();
private static long maxTime = timeLimit[0];
private static long minTime = timeLimit[1];
public static final Integer DISTINCT_CLIENT_IP_NUM = ApplicationConfig.DISTINCT_CLIENT_IP_NUM;
static final Integer RECENT_COUNT_HOUR = ApplicationConfig.RECENT_COUNT_HOUR;
public static final HashSet<String> PROTOCOL_SET;
static {
PROTOCOL_SET = new HashSet<>();
PROTOCOL_SET.add("HTTP");
PROTOCOL_SET.add("TLS");
PROTOCOL_SET.add("DNS");
}
public static BaseDocument getVertexFqdnDocument(ResultSet resultSet) {
BaseDocument newDoc = null;
try {
String fqdnName = resultSet.getString("FQDN");
if (isDomain(fqdnName)) {
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
newDoc = new BaseDocument();
newDoc.setKey(fqdnName);
newDoc.addAttribute("FQDN_NAME", fqdnName);
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
}
} catch (Exception e) {
e.printStackTrace();
}
return newDoc;
}
public static BaseDocument getVertexIpDocument(ResultSet resultSet) {
BaseDocument newDoc = new BaseDocument();
try {
String ip = resultSet.getString("IP");
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
long sessionCount = resultSet.getLong("SESSION_COUNT");
long bytesSum = resultSet.getLong("BYTES_SUM");
String ipType = resultSet.getString("ip_type");
String[] commonLinkInfos = (String[]) resultSet.getArray("common_link_info").getArray();
String commonLinkInfo = "";
if (commonLinkInfos.length > 1){
commonLinkInfo = commonLinkInfos[1];
}
newDoc.setKey(ip);
newDoc.addAttribute("IP", ip);
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
switch (ipType) {
case "client":
newDoc.addAttribute("CLIENT_SESSION_COUNT", sessionCount);
newDoc.addAttribute("CLIENT_BYTES_SUM", bytesSum);
newDoc.addAttribute("SERVER_SESSION_COUNT", 0L);
newDoc.addAttribute("SERVER_BYTES_SUM", 0L);
break;
case "server":
newDoc.addAttribute("SERVER_SESSION_COUNT", sessionCount);
newDoc.addAttribute("SERVER_BYTES_SUM", bytesSum);
newDoc.addAttribute("CLIENT_SESSION_COUNT", 0L);
newDoc.addAttribute("CLIENT_BYTES_SUM", 0L);
break;
default:
}
newDoc.addAttribute("COMMON_LINK_INFO", commonLinkInfo);
} catch (Exception e) {
e.printStackTrace();
}
return newDoc;
}
public static BaseDocument getVertexSubscriberDocument(ResultSet resultSet) {
BaseDocument newDoc = new BaseDocument();
try {
String subscriberId = resultSet.getString("common_subscriber_id");
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
newDoc.setKey(subscriberId);
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
newDoc.addAttribute("SUBSCRIBER_ID",subscriberId);
} catch (Exception e) {
e.printStackTrace();
}
return newDoc;
}
public static BaseEdgeDocument getRelationshipSubsciberLocateIpDocument(ResultSet resultSet) {
BaseEdgeDocument newDoc = new BaseEdgeDocument();
try {
String subscriberId = resultSet.getString("common_subscriber_id");
String framedIp = resultSet.getString("radius_framed_ip");
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
long countTotal = resultSet.getLong("COUNT_TOTAL");
String key = subscriberId + "-" + framedIp;
newDoc.setKey(key);
newDoc.setFrom("SUBSCRIBER/" + subscriberId);
newDoc.setTo("IP/" + framedIp);
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
newDoc.addAttribute("COUNT_TOTAL", countTotal);
} catch (Exception e) {
e.printStackTrace();
}
return newDoc;
}
public static BaseEdgeDocument getRelationFqdnAddressIpDocument(ResultSet resultSet) {
BaseEdgeDocument newDoc = null;
try {
String vFqdn = resultSet.getString("FQDN");
if (isDomain(vFqdn)) {
String vIp = resultSet.getString("common_server_ip");
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
long countTotal = resultSet.getLong("COUNT_TOTAL");
String schemaType = resultSet.getString("schema_type");
String[] distCipRecents = (String[]) resultSet.getArray("DIST_CIP_RECENT").getArray();
long[] clientIpTs = new long[distCipRecents.length];
for (int i = 0; i < clientIpTs.length; i++) {
clientIpTs[i] = currentHour;
}
String key = vFqdn + "-" + vIp;
newDoc = new BaseEdgeDocument();
newDoc.setKey(key);
newDoc.setFrom("FQDN/" + vFqdn);
newDoc.setTo("IP/" + vIp);
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
newDoc.addAttribute("DIST_CIP", distCipRecents);
newDoc.addAttribute("DIST_CIP_TS", clientIpTs);
newDoc.addAttribute("PROTOCOL_TYPE", schemaType);
checkSchemaProperty(newDoc, schemaType, countTotal);
}
} catch (Exception e) {
e.printStackTrace();
}
return newDoc;
}
public static BaseEdgeDocument getRelationIpVisitFqdnDocument(ResultSet resultSet) {
BaseEdgeDocument newDoc = null;
try {
String vFqdn = resultSet.getString("FQDN");
if (isDomain(vFqdn)) {
String vIp = resultSet.getString("common_client_ip");
String key = vIp + "-" + vFqdn;
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
long countTotal = resultSet.getLong("COUNT_TOTAL");
String schemaType = resultSet.getString("schema_type");
newDoc = new BaseEdgeDocument();
newDoc.setKey(key);
newDoc.setFrom("IP/" + vIp);
newDoc.setTo("FQDN/" + vFqdn);
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
newDoc.addAttribute("PROTOCOL_TYPE", schemaType);
checkSchemaProperty(newDoc, schemaType, countTotal);
}
} catch (Exception e) {
e.printStackTrace();
}
return newDoc;
}
public static <T extends BaseDocument> void putMapByHashcode(T newDoc, HashMap<Integer, HashMap<String, ArrayList<T>>> map) {
if (newDoc != null) {
String key = newDoc.getKey();
int hashCode = Math.abs(key.hashCode()) % ApplicationConfig.THREAD_POOL_NUMBER;
HashMap<String, ArrayList<T>> documentHashMap = map.getOrDefault(hashCode, new HashMap<>());
ArrayList<T> documentArrayList = documentHashMap.getOrDefault(key, new ArrayList<>());
documentArrayList.add(newDoc);
documentHashMap.put(key, documentArrayList);
}
}
private static boolean isDomain(String fqdn) {
try {
if (fqdn == null || fqdn.length() == 0){
return false;
}
if (fqdn.contains(":")){
String s = fqdn.split(":")[0];
if (s.contains(":")){
return false;
}
}
String[] fqdnArr = fqdn.split("\\.");
if (fqdnArr.length < 4 || fqdnArr.length > 4) {
return true;
}
for (String f : fqdnArr) {
if (pattern.matcher(f).matches()) {
long i = Long.parseLong(f);
if (i < 0 || i > 255) {
return true;
}
} else {
return true;
}
}
} catch (Exception e) {
LOG.error("解析域名 " + fqdn + " 失败:\n" + e.toString());
}
return false;
}
private static void checkSchemaProperty(BaseEdgeDocument newDoc, String schema, long countTotal) {
long[] recentCnt = new long[RECENT_COUNT_HOUR];
recentCnt[0] = countTotal;
for (String protocol : PROTOCOL_SET) {
String protocolRecent = protocol + "_CNT_RECENT";
String protocolTotal = protocol + "_CNT_TOTAL";
if (protocol.equals(schema)) {
newDoc.addAttribute(protocolTotal, countTotal);
newDoc.addAttribute(protocolRecent, recentCnt);
} else {
newDoc.addAttribute(protocolTotal, 0L);
newDoc.addAttribute(protocolRecent, new long[RECENT_COUNT_HOUR]);
}
}
}
public static String getVertexFqdnSql() {
String where = "common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String sslSql = "SELECT ssl_sni AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'SSL' GROUP BY ssl_sni";
String httpSql = "SELECT http_host AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host";
return "SELECT FQDN,MAX( LAST_FOUND_TIME ) AS LAST_FOUND_TIME,MIN( FIRST_FOUND_TIME ) AS FIRST_FOUND_TIME FROM ((" + sslSql + ") UNION ALL (" + httpSql + ")) GROUP BY FQDN HAVING FQDN != ''";
}
public static String getVertexIpSql() {
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String clientIpSql = "SELECT common_client_ip AS IP, MIN(common_recv_time) AS FIRST_FOUND_TIME,MAX(common_recv_time) AS LAST_FOUND_TIME,count(*) as SESSION_COUNT,sum(common_c2s_byte_num) as BYTES_SUM,groupUniqArray(2)(common_link_info) as common_link_info,'client' as ip_type FROM tsg_galaxy_v3.connection_record_log where " + where + " group by IP";
String serverIpSql = "SELECT common_server_ip AS IP, MIN(common_recv_time) AS FIRST_FOUND_TIME,MAX(common_recv_time) AS LAST_FOUND_TIME,count(*) as SESSION_COUNT,sum(common_s2c_byte_num) as BYTES_SUM,groupUniqArray(2)(common_link_info) as common_link_info,'server' as ip_type FROM tsg_galaxy_v3.connection_record_log where " + where + " group by IP";
return "SELECT * FROM((" + clientIpSql + ") UNION ALL (" + serverIpSql + "))";
}
public static String getRelationshipFqdnAddressIpSql() {
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String sslSql = "SELECT ssl_sni AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray("+DISTINCT_CLIENT_IP_NUM+")(common_client_ip) AS DIST_CIP_RECENT,'TLS' AS schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'SSL' GROUP BY ssl_sni,common_server_ip";
String httpSql = "SELECT http_host AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray("+DISTINCT_CLIENT_IP_NUM+")(common_client_ip) AS DIST_CIP_RECENT,'HTTP' AS schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_server_ip";
return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''";
}
public static String getRelationshipIpVisitFqdnSql() {
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String httpSql = "SELECT http_host AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'HTTP' AS schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_client_ip";
String sslSql = "SELECT ssl_sni AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'TLS' AS schema_type FROM tsg_galaxy_v3.connection_record_log WHERE common_schema_type = 'SSL' GROUP BY ssl_sni,common_client_ip";
return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''";
}
public static String getVertexSubscriberSql() {
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime + " AND common_subscriber_id != '' AND radius_packet_type = 4 AND radius_acct_status_type = 1";
return "SELECT common_subscriber_id,MAX(common_recv_time) as LAST_FOUND_TIME,MIN(common_recv_time) as FIRST_FOUND_TIME FROM radius_record_log WHERE" + where + " GROUP BY common_subscriber_id";
}
public static String getRelationshipSubsciberLocateIpSql() {
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime + " AND common_subscriber_id != '' AND radius_framed_ip != '' AND radius_packet_type = 4 AND radius_acct_status_type = 1";
return "SELECT common_subscriber_id,radius_framed_ip,MAX(common_recv_time) as LAST_FOUND_TIME,MIN(common_recv_time) as FIRST_FOUND_TIME,COUNT(*) as COUNT_TOTAL FROM radius_record_log WHERE" + where + " GROUP BY common_subscriber_id,radius_framed_ip";
}
private static long[] getTimeLimit() {
long maxTime = 0L;
long minTime = 0L;
switch (ApplicationConfig.TIME_LIMIT_TYPE) {
case 0:
maxTime = currentHour;
minTime = maxTime - ApplicationConfig.UPDATE_INTERVAL;
break;
case 1:
maxTime = ApplicationConfig.READ_CLICKHOUSE_MAX_TIME;
minTime = ApplicationConfig.READ_CLICKHOUSE_MIN_TIME;
break;
default:
}
return new long[]{maxTime, minTime};
}
}

View File

@@ -1,115 +0,0 @@
package cn.ac.iie.service.ingestion;
import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.ArangoCursor;
import com.arangodb.entity.BaseDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import static cn.ac.iie.service.ingestion.ReadClickhouseData.*;
/**
* @author wlh
* 多线程全量读取arangoDb历史数据封装到map
*/
@SuppressWarnings("unchecked")
public class ReadHistoryArangoData<T extends BaseDocument> extends Thread {
private static final Logger LOG = LoggerFactory.getLogger(ReadHistoryArangoData.class);
private ArangoDBConnect arangoConnect;
private String query;
private ConcurrentHashMap<Integer, ConcurrentHashMap<String, T>> map;
private Class<T> type;
private String table;
private CountDownLatch countDownLatch;
public ReadHistoryArangoData(ArangoDBConnect arangoConnect,
String query,
ConcurrentHashMap<Integer, ConcurrentHashMap<String, T>> map,
Class<T> type,
String table,
CountDownLatch countDownLatch) {
this.arangoConnect = arangoConnect;
this.query = query;
this.map = map;
this.type = type;
this.table = table;
this.countDownLatch = countDownLatch;
}
@Override
public void run() {
try {
long s = System.currentTimeMillis();
ArangoCursor<T> docs = arangoConnect.executorQuery(query, type);
if (docs != null) {
List<T> baseDocuments = docs.asListRemaining();
int i = 0;
for (T doc : baseDocuments) {
String key = doc.getKey();
switch (table) {
case "R_LOCATE_FQDN2IP":
updateProtocolDocument(doc);
deleteDistinctClientIpByTime(doc);
break;
case "R_VISIT_IP2FQDN":
updateProtocolDocument(doc);
break;
default:
}
int hashCode = Math.abs(key.hashCode()) % ApplicationConfig.THREAD_POOL_NUMBER;
ConcurrentHashMap<String, T> tmpMap = map.get(hashCode);
tmpMap.put(key, doc);
i++;
}
long l = System.currentTimeMillis();
LOG.info(query + "\n读取" + i + "条数据,运行时间:" + (l - s));
}
} catch (Exception e) {
e.printStackTrace();
} finally {
countDownLatch.countDown();
LOG.info("本线程读取完毕,剩余线程数量:" + countDownLatch.getCount());
}
}
private void updateProtocolDocument(T doc) {
if (doc.getProperties().containsKey("PROTOCOL_TYPE")) {
for (String protocol : ReadClickhouseData.PROTOCOL_SET) {
String protocolRecent = protocol + "_CNT_RECENT";
ArrayList<Long> cntRecent = (ArrayList<Long>) doc.getAttribute(protocolRecent);
Long[] cntRecentsSrc = cntRecent.toArray(new Long[0]);
Long[] cntRecentsDst = new Long[RECENT_COUNT_HOUR];
System.arraycopy(cntRecentsSrc, 0, cntRecentsDst, 1, cntRecentsSrc.length - 1);
cntRecentsDst[0] = 0L;
doc.addAttribute(protocolRecent, cntRecentsDst);
}
}
}
private void deleteDistinctClientIpByTime(T doc) {
ArrayList<String> distCip = (ArrayList<String>) doc.getAttribute("DIST_CIP");
ArrayList<Long> distCipTs = (ArrayList<Long>) doc.getAttribute("DIST_CIP_TS");
distCipTs.add(currentHour - RECENT_COUNT_HOUR * 3600);
Collections.sort(distCipTs);
int index = distCipTs.indexOf(currentHour - RECENT_COUNT_HOUR * 3600);
String[] distCipArr = new String[index];
long[] disCipTsArr = new long[index];
if (distCip.size() + 1 == distCipTs.size()){
for (int i = 0; i < index; i++) {
distCipArr[i] = distCip.get(i);
disCipTsArr[i] = distCipTs.get(i);
}
}
doc.updateAttribute("DIST_CIP", distCipArr);
doc.updateAttribute("DIST_CIP_TS", disCipTsArr);
}
}

View File

@@ -124,10 +124,5 @@ public class Document<T extends BaseDocument> extends Thread{
lastDoc.addAttribute(attribute,firstSumAttribute+lastSumAttribute);
}
protected void replaceAttribute(T firstDoc,T lastDoc,String attribute){
Object attributeObj = firstDoc.getAttribute(attribute);
lastDoc.addAttribute(attribute,attributeObj);
}
}

View File

@@ -1,6 +1,6 @@
package cn.ac.iie.service.update;
import cn.ac.iie.service.ingestion.ReadClickhouseData;
import cn.ac.iie.service.read.ReadClickhouseData;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseEdgeDocument;
@@ -24,11 +24,6 @@ public class Relationship extends Document<BaseEdgeDocument> {
super.updateFunction(newEdgeDocument,historyEdgeDocument);
}
@Override
protected void mergeFunction(BaseEdgeDocument lastDoc,BaseEdgeDocument newDocument) {
super.mergeFunction(lastDoc, newDocument);
}
protected void updateProcotol(BaseEdgeDocument historyEdgeDocument, String schema, BaseEdgeDocument newEdgeDocument){
String recentSchema = schema +"_CNT_RECENT";
String totalSchema = schema + "_CNT_TOTAL";
@@ -49,6 +44,11 @@ public class Relationship extends Document<BaseEdgeDocument> {
}
}
@Override
protected void mergeFunction(BaseEdgeDocument lastDoc,BaseEdgeDocument newDocument) {
super.mergeFunction(lastDoc, newDocument);
}
protected void mergeProtocol(BaseEdgeDocument lastDoc,BaseEdgeDocument newDocument) {
String schema = lastDoc.getAttribute("PROTOCOL_TYPE").toString();
if (ReadClickhouseData.PROTOCOL_SET.contains(schema)){

View File

@@ -22,4 +22,19 @@ public class Vertex extends Document<BaseDocument> {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch);
}
@Override
protected void updateFunction(BaseDocument newDocument, BaseDocument historyDocument) {
super.updateFunction(newDocument, historyDocument);
}
@Override
protected void mergeFunction(BaseDocument lastDoc,BaseDocument newDocument) {
super.mergeFunction(lastDoc, newDocument);
}
@Override
public void run() {
super.run();
}
}

View File

@@ -1,6 +1,5 @@
package cn.ac.iie.service.update.relationship;
import cn.ac.iie.service.ingestion.ReadClickhouseData;
import cn.ac.iie.service.update.Relationship;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseEdgeDocument;
@@ -9,8 +8,8 @@ import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import static cn.ac.iie.service.ingestion.ReadClickhouseData.DISTINCT_CLIENT_IP_NUM;
import static cn.ac.iie.service.ingestion.ReadClickhouseData.currentHour;
import static cn.ac.iie.service.read.ReadClickhouseData.DISTINCT_CLIENT_IP_NUM;
import static cn.ac.iie.service.read.ReadClickhouseData.currentHour;
public class LocateFqdn2Ip extends Relationship {
@@ -26,7 +25,7 @@ public class LocateFqdn2Ip extends Relationship {
protected void mergeFunction(BaseEdgeDocument lastDoc,BaseEdgeDocument newDocument){
super.mergeFunction(lastDoc, newDocument);
mergeDistinctClientIp(lastDoc, newDocument);
mergeProtocol(lastDoc, newDocument);
putSumAttribute(lastDoc, newDocument,"CNT_TOTAL");
}
private void mergeDistinctClientIp(BaseEdgeDocument lastDoc,BaseEdgeDocument newDocument){
@@ -46,19 +45,17 @@ public class LocateFqdn2Ip extends Relationship {
@Override
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument) {
super.updateFunction(newEdgeDocument, historyEdgeDocument);
for (String schema:ReadClickhouseData.PROTOCOL_SET){
updateProcotol(historyEdgeDocument,schema,newEdgeDocument);
}
updateDistinctClientIp(newEdgeDocument, historyEdgeDocument);
putSumAttribute(newEdgeDocument, historyEdgeDocument,"CNT_TOTAL");
}
private void updateDistinctClientIp(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument edgeDocument){
String[] distCip = (String[]) edgeDocument.getAttribute("DIST_CIP");
long[] distCipTs = (long[]) edgeDocument.getAttribute("DIST_CIP_TS");
ArrayList<String> distCip = (ArrayList<String>) edgeDocument.getAttribute("DIST_CIP");
ArrayList<Long> distCipTs = (ArrayList<Long>) edgeDocument.getAttribute("DIST_CIP_TS");
HashMap<String, Long> distCipToTs = new HashMap<>();
if (distCip.length == distCipTs.length){
for (int i = 0;i < distCip.length;i++){
distCipToTs.put(distCip[i],distCipTs[i]);
if (distCip.size() == distCipTs.size()){
for (int i = 0;i < distCip.size();i++){
distCipToTs.put(distCip.get(i),distCipTs.get(i));
}
}
Object[] distCipRecent = (Object[])newEdgeDocument.getAttribute("DIST_CIP");

View File

@@ -1,13 +1,11 @@
package cn.ac.iie.service.update.relationship;
import cn.ac.iie.service.ingestion.ReadClickhouseData;
import cn.ac.iie.service.update.Relationship;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseEdgeDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
@@ -23,14 +21,12 @@ public class VisitIp2Fqdn extends Relationship {
@Override
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument) {
super.updateFunction(newEdgeDocument, historyEdgeDocument);
for (String schema: ReadClickhouseData.PROTOCOL_SET){
updateProcotol(historyEdgeDocument,schema,newEdgeDocument);
}
putSumAttribute(newEdgeDocument,historyEdgeDocument,"CNT_TOTAL");
}
@Override
protected void mergeFunction(BaseEdgeDocument lastDoc,BaseEdgeDocument newDocument) {
super.mergeFunction(lastDoc, newDocument);
mergeProtocol(lastDoc,newDocument);
putSumAttribute(lastDoc,newDocument,"CNT_TOTAL");
}
}

View File

@@ -16,20 +16,26 @@ public class Ip extends Vertex {
String collectionName,
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch);
}
@Override
protected void updateFunction(BaseDocument newDocument, BaseDocument historyDocument) {
super.updateFunction(newDocument, historyDocument);
updateIpByType(newDocument, historyDocument);
super.replaceAttribute(newDocument,historyDocument,"COMMON_LINK_INFO");
}
@Override
protected void mergeFunction(BaseDocument lastDoc, BaseDocument newDocument) {
super.mergeFunction(lastDoc, newDocument);
updateIpByType(lastDoc, newDocument);
mergeIpByType(lastDoc, newDocument);
}
private void mergeIpByType(BaseDocument lastDoc, BaseDocument newDocument) {
putSumAttribute(lastDoc,newDocument,"CLIENT_SESSION_COUNT");
putSumAttribute(lastDoc,newDocument,"CLIENT_BYTES_SUM");
putSumAttribute(lastDoc,newDocument,"SERVER_SESSION_COUNT");
putSumAttribute(lastDoc,newDocument,"SERVER_BYTES_SUM");
}
private void updateIpByType(BaseDocument newDocument, BaseDocument historyDocument) {

View File

@@ -2,9 +2,15 @@ package cn.ac.iie.test;
import cn.ac.iie.dao.UpdateGraphData;
/**
* iplearning程序入口
* @author wlh
*/
public class IpLearningApplicationTest {
public static void main(String[] args) {
UpdateGraphData updateGraphData = new UpdateGraphData();
updateGraphData.updateArango();

View File

@@ -16,7 +16,6 @@ import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
public class ArangoDBConnect {
@@ -24,10 +23,10 @@ public class ArangoDBConnect {
private static ArangoDB arangoDB = null;
private static ArangoDBConnect conn = null;
static {
getArangoDB();
getArangoDatabase();
}
private static void getArangoDB(){
private static void getArangoDatabase(){
arangoDB = new ArangoDB.Builder()
.maxConnections(ApplicationConfig.THREAD_POOL_NUMBER)
.host(ApplicationConfig.ARANGODB_HOST, ApplicationConfig.ARANGODB_PORT)
@@ -53,26 +52,45 @@ public class ArangoDBConnect {
arangoDB.shutdown();
}
}catch (Exception e){
LOG.error(e.getMessage());
e.printStackTrace();
}
}
public <T> ArangoCursor<T> executorQuery(String query,Class<T> type){
ArangoDatabase database = getDatabase();
Map<String, Object> bindVars = new MapBuilder().get();
AqlQueryOptions options = new AqlQueryOptions()
.ttl(ApplicationConfig.ARANGODB_TTL);
AqlQueryOptions options = new AqlQueryOptions().ttl(ApplicationConfig.ARANGODB_TTL);
try {
return database.query(query, bindVars, options, type);
}catch (Exception e){
LOG.error(e.getMessage());
e.printStackTrace();
return null;
}finally {
bindVars.clear();
}
}
public <T> void overwrite(List<T> docOverwrite, String collectionName){
@Deprecated
public <T> void insertAndUpdate(ArrayList<T> docInsert,ArrayList<T> docUpdate,String collectionName){
ArangoDatabase database = getDatabase();
try {
ArangoCollection collection = database.collection(collectionName);
if (!docInsert.isEmpty()){
collection.importDocuments(docInsert);
}
if (!docUpdate.isEmpty()){
collection.replaceDocuments(docUpdate);
}
}catch (Exception e){
System.out.println("更新失败");
e.printStackTrace();
}finally {
docInsert.clear();
docInsert.clear();
}
}
public <T> void overwrite(ArrayList<T> docOverwrite,String collectionName){
ArangoDatabase database = getDatabase();
try {
ArangoCollection collection = database.collection(collectionName);
@@ -83,14 +101,16 @@ public class ArangoDBConnect {
MultiDocumentEntity<DocumentCreateEntity<T>> documentCreateEntityMultiDocumentEntity = collection.insertDocuments(docOverwrite, documentCreateOptions);
Collection<ErrorEntity> errors = documentCreateEntityMultiDocumentEntity.getErrors();
for (ErrorEntity errorEntity:errors){
LOG.error("写入arangoDB异常"+errorEntity.getErrorMessage());
LOG.debug("写入arangoDB异常"+errorEntity.getErrorMessage());
}
}
}catch (Exception e){
LOG.error("更新失败:"+e.toString());
System.out.println("更新失败:"+e.toString());
}finally {
docOverwrite.clear();
}
}
}

View File

@@ -2,8 +2,6 @@ package cn.ac.iie.utils;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.ResultSet;
@@ -12,7 +10,6 @@ import java.sql.Statement;
import java.util.Properties;
public class ClickhouseConnect {
private static final Logger LOG = LoggerFactory.getLogger(ClickhouseConnect.class);
private static DruidDataSource dataSource = null;
private static ClickhouseConnect dbConnect = null;
private static Properties props = new Properties();
@@ -46,7 +43,7 @@ public class ClickhouseConnect {
dataSource.setKeepAlive(true);
}
} catch (Exception e) {
LOG.error(e.getMessage());
e.printStackTrace();
}
}
@@ -88,7 +85,7 @@ public class ClickhouseConnect {
connection.close();
}
} catch (SQLException e) {
LOG.error(e.getMessage());
e.printStackTrace();
}
}
@@ -101,7 +98,7 @@ public class ClickhouseConnect {
pstm = connection.createStatement();
return pstm.executeQuery(query);
}catch (Exception e){
LOG.error(e.getMessage());
e.printStackTrace();
return null;
}
}

View File

@@ -1,12 +1,8 @@
package cn.ac.iie.utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Properties;
public class ConfigUtils {
private static final Logger LOG = LoggerFactory.getLogger(ConfigUtils.class);
private static Properties propCommon = new Properties();
public static String getStringProperty(String key) {
@@ -29,12 +25,12 @@ public class ConfigUtils {
static {
try {
propCommon.load(ConfigUtils.class.getClassLoader().getResourceAsStream("application.properties"));
LOG.info("application.properties加载成功");
System.out.println("application.properties加载成功");
} catch (Exception e) {
propCommon = null;
LOG.error("配置加载失败");
System.err.println("配置加载失败");
}
}
}

View File

@@ -2,13 +2,14 @@ package cn.ac.iie.utils;
import cn.ac.iie.config.ApplicationConfig;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.*;
/**
* 线程池管理
* @author wlh
*/
public class ExecutorThreadPool {
private static final Logger LOG = LoggerFactory.getLogger(ExecutorThreadPool.class);
private static ExecutorService pool = null ;
private static ExecutorThreadPool poolExecutor = null;
@@ -19,9 +20,13 @@ public class ExecutorThreadPool {
private static void getThreadPool(){
ThreadFactory namedThreadFactory = new ThreadFactoryBuilder()
.setNameFormat("iplearning-application-pool-%d").build();
pool = new ThreadPoolExecutor(ApplicationConfig.THREAD_POOL_NUMBER, ApplicationConfig.THREAD_POOL_NUMBER,
0L, TimeUnit.SECONDS,
//Common Thread Pool
pool = new ThreadPoolExecutor(ApplicationConfig.THREAD_POOL_NUMBER, ApplicationConfig.THREAD_POOL_NUMBER*2,
0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<>(1024), namedThreadFactory, new ThreadPoolExecutor.AbortPolicy());
// pool = Executors.newFixedThreadPool(ApplicationConfig.THREAD_POOL_NUMBER);
}
public static ExecutorThreadPool getInstance(){
@@ -39,7 +44,7 @@ public class ExecutorThreadPool {
public void awaitThreadTask(){
try {
while (!pool.awaitTermination(ApplicationConfig.THREAD_AWAIT_TERMINATION_TIME, TimeUnit.SECONDS)) {
LOG.warn("线程池没有关闭");
System.out.println("线程池没有关闭");
}
} catch (InterruptedException e) {
e.printStackTrace();

View File

@@ -4,7 +4,10 @@ import cn.ac.iie.config.ApplicationConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.HashMap;

View File

@@ -1,27 +1,33 @@
#arangoDB参数配置
arangoDB.host=192.168.40.182
#arangoDB.host=192.168.40.224
arangoDB.port=8529
arangoDB.user=upsert
arangoDB.password=ceiec2018
arangoDB.DB.name=ip-learning-test
#arangoDB.DB.name=tsg_galaxy_v3
arangoDB.batch=100000
arangoDB.ttl=3600
arangoDB.read.limit=
update.arango.batch=10000
thread.pool.number=10
thread.await.termination.time=10
#读取clickhouse时间范围方式0读取过去一小时1指定时间范围
time.limit.type=1
read.clickhouse.max.time=1596684142
read.clickhouse.min.time=1596425769
clickhouse.time.limit.type=0
read.clickhouse.max.time=1571245230
read.clickhouse.min.time=1571245220
#读取arangoDB时间范围方式0正常读1指定时间范围
arango.time.limit.type=0
read.arango.max.time=1571245220
read.arango.min.time=1571245210
update.interval=3600
distinct.client.ip.num=10000
recent.count.hour=24
top.domain.file.name=topDomain.txt
arangoDB.read.limit=

View File

@@ -1,9 +1,8 @@
drivers=ru.yandex.clickhouse.ClickHouseDriver
db.id=192.168.40.193:8123/tsg_galaxy_zx?socket_timeout=3600000
#db.id=192.168.40.186:8123/tsg_galaxy_v3?socket_timeout=300000
mdb.user=default
db.id=192.168.40.186:8123/tsg_galaxy_v3?socket_timeout=300000
mdb.password=111111
#db.id=192.168.40.224:8123/tsg_galaxy_v3?socket_timeout=300000
#mdb.password=ceiec2019
initialsize=1
minidle=1
maxactive=50

View File

@@ -4,18 +4,18 @@ log4j.logger.org.apache.http.wire=OFF
#Log4j
log4j.rootLogger=info,console,file
# <20><><EFBFBD><EFBFBD>̨<EFBFBD><CCA8>־<EFBFBD><D6BE><EFBFBD><EFBFBD>
# <20><><EFBFBD><EFBFBD>̨<EFBFBD><CCA8>־<EFBFBD><D6BE><EFBFBD><EFBFBD>
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.Threshold=info
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
# <20>ļ<EFBFBD><C4BC><EFBFBD>־<EFBFBD><D6BE><EFBFBD><EFBFBD>
# <20>ļ<EFBFBD><C4BC><EFBFBD>־<EFBFBD><D6BE><EFBFBD><EFBFBD>
log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
log4j.appender.file.Threshold=info
log4j.appender.file.encoding=UTF-8
log4j.appender.file.Append=true
<><C2B7><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>·<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ز<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Ӧ<EFBFBD><EFBFBD>Ŀ<EFBFBD><EFBFBD>
<><C2B7><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>·<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ز<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Ӧ<EFBFBD><EFBFBD>Ŀ<EFBFBD><EFBFBD>
#log4j.appender.file.file=/home/ceiec/iplearning/logs/ip-learning-application.log
#log4j.appender.file.file=/home/ceiec/iplearning/testLog/ip-learning-application.log
log4j.appender.file.file=./logs/ip-learning-application.log

View File

@@ -1,10 +1,27 @@
package cn.ac.iie;
import cn.ac.iie.dao.BaseArangoData;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.ArangoCursor;
import com.arangodb.entity.BaseDocument;
import com.arangodb.entity.BaseEdgeDocument;
public class readHistoryDataTest {
public static void main(String[] args) {
BaseArangoData baseArangoData = new BaseArangoData();
ArangoDBConnect instance = ArangoDBConnect.getInstance();
// ArangoCursor<Long> baseDocuments = instance.executorQuery("RETURN LENGTH(R_LOCATE_FQDN2IP)", Long.class);
// while (baseDocuments.hasNext()){
// Long next = baseDocuments.next();
// System.out.println(next.toString());
// }
// instance.clean();
String sql = "FOR doc IN FQDN filter doc.FIRST_FOUND_TIME >= 1595423493 and doc.FIRST_FOUND_TIME <= 1595809766 limit 763,10 RETURN doc";
ArangoCursor<BaseDocument> baseDocuments = instance.executorQuery(sql, BaseDocument.class);
while (baseDocuments.hasNext()){
BaseDocument next = baseDocuments.next();
System.out.println(next.toString());
}
instance.clean();
}
}

View File

@@ -1,9 +0,0 @@
# Created by .ignore support plugin (hsz.mobi)
### Example user template template
### Example user template
# IntelliJ project files
.idea
*.iml
target
logs/

View File

@@ -1,90 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>cn.ac.iie</groupId>
<artifactId>ip-learning-java-test</artifactId>
<version>1.0-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.21</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.21</version>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>0.2.4</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.1.10</version>
</dependency>
<dependency>
<groupId>com.typesafe</groupId>
<artifactId>config</artifactId>
<version>1.2.1</version>
</dependency>
<dependency>
<groupId>com.arangodb</groupId>
<artifactId>arangodb-java-driver</artifactId>
<version>6.6.3</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.6</version>
<configuration>
<archive>
<manifest>
<mainClass>cn.ac.iie.test.IpLearningApplicationTest</mainClass>
</manifest>
</archive>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@@ -1,39 +0,0 @@
package cn.ac.iie.config;
import cn.ac.iie.utils.ConfigUtils;
public class ApplicationConfig {
public static final String ARANGODB_HOST = ConfigUtils.getStringProperty( "arangoDB.host");
public static final Integer ARANGODB_PORT = ConfigUtils.getIntProperty("arangoDB.port");
public static final String ARANGODB_USER = ConfigUtils.getStringProperty( "arangoDB.user");
public static final String ARANGODB_PASSWORD = ConfigUtils.getStringProperty( "arangoDB.password");
public static final String ARANGODB_DB_NAME = ConfigUtils.getStringProperty( "arangoDB.DB.name");
public static final Integer ARANGODB_TTL = ConfigUtils.getIntProperty( "arangoDB.ttl");
public static final Integer ARANGODB_BATCH = ConfigUtils.getIntProperty( "arangoDB.batch");
public static final Integer UPDATE_ARANGO_BATCH =ConfigUtils.getIntProperty("update.arango.batch");
public static final Integer THREAD_POOL_NUMBER = ConfigUtils.getIntProperty( "thread.pool.number");
public static final Integer THREAD_AWAIT_TERMINATION_TIME = ConfigUtils.getIntProperty( "thread.await.termination.time");
public static final Long READ_CLICKHOUSE_MAX_TIME = ConfigUtils.getLongProperty("read.clickhouse.max.time");
public static final Long READ_CLICKHOUSE_MIN_TIME = ConfigUtils.getLongProperty("read.clickhouse.min.time");
public static final Integer CLICKHOUSE_TIME_LIMIT_TYPE = ConfigUtils.getIntProperty("clickhouse.time.limit.type");
public static final Integer UPDATE_INTERVAL = ConfigUtils.getIntProperty("update.interval");
public static final Integer DISTINCT_CLIENT_IP_NUM = ConfigUtils.getIntProperty("distinct.client.ip.num");
public static final Integer RECENT_COUNT_HOUR = ConfigUtils.getIntProperty("recent.count.hour");
public static final String TOP_DOMAIN_FILE_NAME = ConfigUtils.getStringProperty("top.domain.file.name");
public static final String ARANGODB_READ_LIMIT = ConfigUtils.getStringProperty("arangoDB.read.limit");
public static final Integer ARANGO_TIME_LIMIT_TYPE = ConfigUtils.getIntProperty("arango.time.limit.type");
public static final Long READ_ARANGO_MAX_TIME = ConfigUtils.getLongProperty("read.arango.max.time");
public static final Long READ_ARANGO_MIN_TIME = ConfigUtils.getLongProperty("read.arango.min.time");
}

View File

@@ -1,103 +0,0 @@
package cn.ac.iie.dao;
import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.service.read.ReadHistoryArangoData;
import cn.ac.iie.utils.ArangoDBConnect;
import cn.ac.iie.utils.ExecutorThreadPool;
import com.arangodb.ArangoCursor;
import com.arangodb.entity.BaseDocument;
import com.arangodb.entity.BaseEdgeDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
/**
* 获取arangoDB历史数据
*
* @author wlh
*/
public class BaseArangoData {
private static final Logger LOG = LoggerFactory.getLogger(BaseArangoData.class);
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseDocument>> historyVertexFqdnMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseDocument>> historyVertexIpMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseDocument>> historyVertexSubscriberMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationFqdnAddressIpMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationIpVisitFqdnMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationFqdnSameFqdnMap = new ConcurrentHashMap<>();
static ConcurrentHashMap<Integer, ConcurrentHashMap<String, BaseEdgeDocument>> historyRelationSubsciberLocateIpMap = new ConcurrentHashMap<>();
private static ArangoDBConnect arangoDBConnect = ArangoDBConnect.getInstance();
private ExecutorThreadPool threadPool = ExecutorThreadPool.getInstance();
<T extends BaseDocument> void readHistoryData(String table,
ConcurrentHashMap<Integer, ConcurrentHashMap<String, T>> historyMap,
Class<T> type) {
try {
LOG.info("开始更新" + table);
long start = System.currentTimeMillis();
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
historyMap.put(i, new ConcurrentHashMap<>());
}
CountDownLatch countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
long[] timeRange = getTimeRange(table);
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
String sql = getQuerySql(timeRange, i, table);
ReadHistoryArangoData<T> readHistoryArangoData = new ReadHistoryArangoData<>(arangoDBConnect, sql, historyMap, type, table, countDownLatch);
threadPool.executor(readHistoryArangoData);
}
countDownLatch.await();
long last = System.currentTimeMillis();
LOG.info("读取" + table + " arangoDB 共耗时:" + (last - start));
} catch (Exception e) {
e.printStackTrace();
}
}
private long[] getTimeRange(String table) {
long minTime = 0L;
long maxTime = 0L;
long startTime = System.currentTimeMillis();
String sql = "LET doc = (FOR doc IN " + table + " RETURN doc) return {max_time:MAX(doc[*].FIRST_FOUND_TIME),min_time:MIN(doc[*].FIRST_FOUND_TIME)}";
switch (ApplicationConfig.ARANGO_TIME_LIMIT_TYPE) {
case 0:
ArangoCursor<BaseDocument> timeDoc = arangoDBConnect.executorQuery(sql, BaseDocument.class);
try {
if (timeDoc != null) {
while (timeDoc.hasNext()) {
BaseDocument doc = timeDoc.next();
maxTime = Long.parseLong(doc.getAttribute("max_time").toString()) + ApplicationConfig.THREAD_POOL_NUMBER;
minTime = Long.parseLong(doc.getAttribute("min_time").toString());
}
} else {
LOG.warn("获取ArangoDb时间范围为空");
}
} catch (Exception e) {
e.printStackTrace();
}
break;
case 1:
maxTime = ApplicationConfig.READ_ARANGO_MAX_TIME;
minTime = ApplicationConfig.READ_ARANGO_MIN_TIME;
break;
default:
}
long lastTime = System.currentTimeMillis();
LOG.info(sql + "\n查询最大最小时间用时" + (lastTime - startTime));
return new long[]{minTime, maxTime};
}
private String getQuerySql(long[] timeRange, int threadNumber, String table) {
long minTime = timeRange[0];
long maxTime = timeRange[1];
long diffTime = (maxTime - minTime) / ApplicationConfig.THREAD_POOL_NUMBER;
long maxThreadTime = minTime + (threadNumber + 1) * diffTime;
long minThreadTime = minTime + threadNumber * diffTime;
return "FOR doc IN " + table + " filter doc.FIRST_FOUND_TIME >= " + minThreadTime + " and doc.FIRST_FOUND_TIME <= " + maxThreadTime + " " + ApplicationConfig.ARANGODB_READ_LIMIT + " RETURN doc";
}
}

View File

@@ -1,79 +0,0 @@
package cn.ac.iie.dao;
import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.utils.ClickhouseConnect;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.arangodb.entity.BaseDocument;
import com.arangodb.entity.BaseEdgeDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.function.Function;
import java.util.function.Supplier;
import static cn.ac.iie.service.read.ReadClickhouseData.putMapByHashcode;
/**
* 读取clickhouse数据封装到map
* @author wlh
*/
public class BaseClickhouseData {
private static final Logger LOG = LoggerFactory.getLogger(BaseClickhouseData.class);
static HashMap<Integer, HashMap<String, ArrayList<BaseDocument>>> newVertexFqdnMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseDocument>>> newVertexIpMap = new HashMap<>();
static HashMap<Integer, HashMap<String,ArrayList<BaseDocument>>> newVertexSubscriberMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> newRelationFqdnAddressIpMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> newRelationIpVisitFqdnMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> newRelationSubsciberLocateIpMap = new HashMap<>();
static HashMap<Integer, HashMap<String, ArrayList<BaseEdgeDocument>>> newRelationFqdnSameFqdnMap = new HashMap<>();
private static ClickhouseConnect manger = ClickhouseConnect.getInstance();
private DruidPooledConnection connection;
private Statement statement;
<T extends BaseDocument> void baseDocumentFromClickhouse(HashMap<Integer, HashMap<String, ArrayList<T>>> newMap,
Supplier<String> getSqlSupplier,
Function<ResultSet,T> formatResultFunc){
long start = System.currentTimeMillis();
initializeMap(newMap);
String sql = getSqlSupplier.get();
LOG.info(sql);
try {
connection = manger.getConnection();
statement = connection.createStatement();
ResultSet resultSet = statement.executeQuery(sql);
int i = 0;
while (resultSet.next()) {
T newDoc = formatResultFunc.apply(resultSet);
if (newDoc != null) {
i+=1;
putMapByHashcode(newDoc, newMap);
}
}
long last = System.currentTimeMillis();
LOG.info("读取"+i+"条数据,运行时间:" + (last - start));
}catch (Exception e){
e.printStackTrace();
}finally {
manger.clear(statement,connection);
}
}
private <T extends BaseDocument> void initializeMap(HashMap<Integer, HashMap<String,ArrayList<T>>> map){
try {
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) {
map.put(i, new HashMap<>(16));
}
}catch (Exception e){
e.printStackTrace();
LOG.error("初始化数据失败");
}
}
}

View File

@@ -1,116 +0,0 @@
package cn.ac.iie.dao;
import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.service.read.ReadClickhouseData;
import cn.ac.iie.service.update.Document;
import cn.ac.iie.service.update.relationship.LocateFqdn2Ip;
import cn.ac.iie.service.update.relationship.SameFqdn2Fqdn;
import cn.ac.iie.service.update.relationship.VisitIp2Fqdn;
import cn.ac.iie.service.update.vertex.Fqdn;
import cn.ac.iie.service.update.vertex.Ip;
import cn.ac.iie.utils.ArangoDBConnect;
import cn.ac.iie.utils.ExecutorThreadPool;
import com.arangodb.entity.BaseDocument;
import com.arangodb.entity.BaseEdgeDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Constructor;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.function.Function;
import java.util.function.Supplier;
import static cn.ac.iie.dao.BaseArangoData.*;
import static cn.ac.iie.dao.BaseClickhouseData.*;
/**
* 更新图数据库业务类
* @author wlh
*/
public class UpdateGraphData {
private static final Logger LOG = LoggerFactory.getLogger(UpdateGraphData.class);
private static ExecutorThreadPool pool = ExecutorThreadPool.getInstance();
private static ArangoDBConnect arangoManger = ArangoDBConnect.getInstance();
private static BaseArangoData baseArangoData = new BaseArangoData();
private static BaseClickhouseData baseClickhouseData = new BaseClickhouseData();
public void updateArango(){
long start = System.currentTimeMillis();
try {
updateDocument(newVertexFqdnMap, historyVertexFqdnMap, "FQDN",
Fqdn.class,BaseDocument.class,
ReadClickhouseData::getVertexFqdnSql,ReadClickhouseData::getVertexFqdnDocument);
updateDocument(newVertexIpMap,historyVertexIpMap,"IP",
Ip.class,BaseDocument.class,
ReadClickhouseData::getVertexIpSql,ReadClickhouseData::getVertexIpDocument);
updateDocument(newRelationFqdnAddressIpMap,historyRelationFqdnAddressIpMap,"R_LOCATE_FQDN2IP",
LocateFqdn2Ip.class,BaseEdgeDocument.class,
ReadClickhouseData::getRelationshipFqdnAddressIpSql,ReadClickhouseData::getRelationFqdnAddressIpDocument);
updateDocument(newRelationIpVisitFqdnMap,historyRelationIpVisitFqdnMap,"R_VISIT_IP2FQDN",
VisitIp2Fqdn.class,BaseEdgeDocument.class,
ReadClickhouseData::getRelationshipIpVisitFqdnSql,ReadClickhouseData::getRelationIpVisitFqdnDocument);
updateDocument(newRelationFqdnSameFqdnMap,historyRelationFqdnSameFqdnMap,"R_SAME_ORIGIN_FQDN2FQDN",
SameFqdn2Fqdn.class,BaseEdgeDocument.class,
ReadClickhouseData::getRelationshipFqdnSameFqdnSql,ReadClickhouseData::getRelationshipFqdnSameFqdnDocument);
long last = System.currentTimeMillis();
LOG.info("更新图数据库时间共计:"+(last - start));
}catch (Exception e){
e.printStackTrace();
}finally {
arangoManger.clean();
pool.shutdown();
}
}
private <T extends BaseDocument> void updateDocument(HashMap<Integer, HashMap<String, ArrayList<T>>> newMap,
ConcurrentHashMap<Integer, ConcurrentHashMap<String, T>> historyMap,
String collection,
Class<? extends Document<T>> taskType,
Class<T> docmentType,
Supplier<String> getSqlSupplier,
Function<ResultSet,T> formatResultFunc) {
try {
baseArangoData.readHistoryData(collection,historyMap,docmentType);
LOG.info(collection+" 读取clickhouse,封装结果集");
baseClickhouseData.baseDocumentFromClickhouse(newMap, getSqlSupplier,formatResultFunc);
LOG.info(collection+" 开始更新");
long start = System.currentTimeMillis();
CountDownLatch countDownLatch = new CountDownLatch(ApplicationConfig.THREAD_POOL_NUMBER);
for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++){
HashMap<String, ArrayList<T>> tmpNewMap = newMap.get(i);
ConcurrentHashMap<String, T> tmpHisMap = historyMap.get(i);
Constructor constructor = taskType.getConstructor(
HashMap.class,
ArangoDBConnect.class,
String.class,
ConcurrentHashMap.class,
CountDownLatch.class);
Document docTask = (Document)constructor.newInstance(tmpNewMap, arangoManger, collection, tmpHisMap, countDownLatch);
pool.executor(docTask);
}
countDownLatch.await();
long last = System.currentTimeMillis();
LOG.info(collection+" 更新完毕,共耗时:"+(last-start));
}catch (Exception e){
e.printStackTrace();
}finally {
newMap.clear();
historyMap.clear();
}
}
}

View File

@@ -1,128 +0,0 @@
package cn.ac.iie.service.update;
import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class Document<T extends BaseDocument> extends Thread{
private static final Logger LOG = LoggerFactory.getLogger(Document.class);
private HashMap<String, ArrayList<T>> newDocumentMap;
private ArangoDBConnect arangoManger;
private String collectionName;
private ConcurrentHashMap<String, T> historyDocumentMap;
private CountDownLatch countDownLatch;
Document(HashMap<String, ArrayList<T>> newDocumentMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, T> historyDocumentMap,
CountDownLatch countDownLatch) {
this.newDocumentMap = newDocumentMap;
this.arangoManger = arangoManger;
this.collectionName = collectionName;
this.historyDocumentMap = historyDocumentMap;
this.countDownLatch = countDownLatch;
}
@Override
public void run() {
long start = System.currentTimeMillis();
LOG.info("新读取数据"+newDocumentMap.size()+"条,历史数据"+historyDocumentMap.size()+"");
try {
Set<String> keySet = newDocumentMap.keySet();
ArrayList<T> resultDocumentList = new ArrayList<>();
int i = 0;
for (String key : keySet) {
ArrayList<T> newDocumentSchemaList = newDocumentMap.getOrDefault(key, null);
if (newDocumentSchemaList != null) {
T newDocument = mergeDocument(newDocumentSchemaList);
i += 1;
T historyDocument = historyDocumentMap.getOrDefault(key, null);
updateDocument(newDocument,historyDocument,resultDocumentList);
if (i >= ApplicationConfig.UPDATE_ARANGO_BATCH) {
arangoManger.overwrite(resultDocumentList, collectionName);
LOG.info("更新"+collectionName+":" + i);
i = 0;
}
}
}
if (i != 0) {
arangoManger.overwrite(resultDocumentList, collectionName);
LOG.info("更新"+collectionName+":" + i);
}
} catch (Exception e) {
e.printStackTrace();
LOG.error(e.toString());
}finally {
countDownLatch.countDown();
long last = System.currentTimeMillis();
LOG.info("本线程更新完毕,用时:"+(last-start)+",剩余线程数量:"+countDownLatch.getCount());
}
}
private void updateDocument(T newDocument, T historyDocument, ArrayList<T> resultDocumentList) {
if (historyDocument != null){
updateFunction(newDocument,historyDocument);
resultDocumentList.add(historyDocument);
}else {
resultDocumentList.add(newDocument);
}
}
protected void updateFunction(T newDocument, T historyDocument) {
Object lastFoundTime = newDocument.getAttribute("LAST_FOUND_TIME");
historyDocument.addAttribute("LAST_FOUND_TIME",lastFoundTime);
}
private T mergeDocument(ArrayList<T> newDocumentSchemaList){
if (newDocumentSchemaList == null || newDocumentSchemaList.isEmpty()){
return null;
}else if (newDocumentSchemaList.size() == 1){
return newDocumentSchemaList.get(0);
}else {
T newDocument = null;
for (T lastDoc:newDocumentSchemaList){
if (newDocument == null){
newDocument = lastDoc;
}else {
mergeFunction(lastDoc,newDocument);
}
}
return newDocument;
}
}
protected void mergeFunction(T lastDoc,T newDocument) {
putMinAttribute(lastDoc,newDocument,"FIRST_FOUND_TIME");
putMaxAttribute(lastDoc,newDocument,"LAST_FOUND_TIME");
}
protected void putMinAttribute(T firstDoc,T lastDoc,String attribute){
long firstMinAttribute = Long.parseLong(firstDoc.getAttribute(attribute).toString());
long lastMinAttribute = Long.parseLong(lastDoc.getAttribute(attribute).toString());
lastDoc.addAttribute(attribute,firstMinAttribute<lastMinAttribute? firstMinAttribute:lastMinAttribute);
}
protected void putMaxAttribute(T firstDoc,T lastDoc,String attribute){
long firstMaxAttribute = Long.parseLong(firstDoc.getAttribute(attribute).toString());
long lastMaxAttribute = Long.parseLong(lastDoc.getAttribute(attribute).toString());
lastDoc.addAttribute(attribute,firstMaxAttribute>lastMaxAttribute? firstMaxAttribute:lastMaxAttribute);
}
protected void putSumAttribute(T firstDoc,T lastDoc,String attribute){
long firstSumAttribute = Long.parseLong(firstDoc.getAttribute(attribute).toString());
long lastSumAttribute = Long.parseLong(lastDoc.getAttribute(attribute).toString());
lastDoc.addAttribute(attribute,firstSumAttribute+lastSumAttribute);
}
}

View File

@@ -1,76 +0,0 @@
package cn.ac.iie.service.update;
import cn.ac.iie.service.read.ReadClickhouseData;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseEdgeDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class Relationship extends Document<BaseEdgeDocument> {
public Relationship(HashMap<String, ArrayList<BaseEdgeDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap,arangoManger,collectionName,historyDocumentMap,countDownLatch);
}
@Override
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument){
super.updateFunction(newEdgeDocument,historyEdgeDocument);
}
protected void updateProcotol(BaseEdgeDocument historyEdgeDocument, String schema, BaseEdgeDocument newEdgeDocument){
String recentSchema = schema +"_CNT_RECENT";
String totalSchema = schema + "_CNT_TOTAL";
long countTotal = Long.parseLong(newEdgeDocument.getAttribute(totalSchema).toString());
if (countTotal > 0L){
long updateCountTotal = Long.parseLong(historyEdgeDocument.getAttribute(totalSchema).toString());
Long[] cntRecent = (Long[]) historyEdgeDocument.getAttribute(recentSchema);
cntRecent[0] = countTotal;
historyEdgeDocument.addAttribute(recentSchema, cntRecent);
historyEdgeDocument.addAttribute(totalSchema, countTotal + updateCountTotal);
String hisProtocolType = historyEdgeDocument.getAttribute("PROTOCOL_TYPE").toString();
if (!hisProtocolType.contains(schema)){
hisProtocolType = hisProtocolType + "," + schema;
historyEdgeDocument.addAttribute("PROTOCOL_TYPE",hisProtocolType);
}
}
}
@Override
protected void mergeFunction(BaseEdgeDocument lastDoc,BaseEdgeDocument newDocument) {
super.mergeFunction(lastDoc, newDocument);
}
protected void mergeProtocol(BaseEdgeDocument lastDoc,BaseEdgeDocument newDocument) {
String schema = lastDoc.getAttribute("PROTOCOL_TYPE").toString();
if (ReadClickhouseData.PROTOCOL_SET.contains(schema)){
setProtocolProperties(schema,newDocument,lastDoc);
}
}
private void setProtocolProperties(String protocol,BaseEdgeDocument lastDoc,BaseEdgeDocument newDocument){
String protocolRecent = protocol +"_CNT_RECENT";
String protocolTotal = protocol + "_CNT_TOTAL";
putSumAttribute(lastDoc,newDocument,protocolTotal);
long[] cntRecents = (long[]) lastDoc.getAttribute(protocolRecent);
newDocument.addAttribute(protocolRecent, cntRecents);
String protocolType = newDocument.getAttribute("PROTOCOL_TYPE").toString();
newDocument.addAttribute("PROTOCOL_TYPE",addProcotolType(protocolType,protocol));
}
private String addProcotolType(String protocolType,String schema){
if (!protocolType.contains(schema)){
protocolType = protocolType + "," + schema;
}
return protocolType;
}
}

View File

@@ -1,39 +0,0 @@
package cn.ac.iie.service.update;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
/**
* @author wlh
* 多线程更新vertex数据
*/
public class Vertex extends Document<BaseDocument> {
public Vertex(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch);
}
@Override
protected void updateFunction(BaseDocument newDocument, BaseDocument historyDocument) {
super.updateFunction(newDocument, historyDocument);
}
@Override
protected void mergeFunction(BaseDocument lastDoc,BaseDocument newDocument) {
super.mergeFunction(lastDoc, newDocument);
}
@Override
public void run() {
super.run();
}
}

View File

@@ -1,94 +0,0 @@
package cn.ac.iie.service.update.relationship;
import cn.ac.iie.service.update.Relationship;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseEdgeDocument;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import static cn.ac.iie.service.read.ReadClickhouseData.*;
public class LocateFqdn2Ip extends Relationship {
public LocateFqdn2Ip(HashMap<String, ArrayList<BaseEdgeDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
}
@Override
protected void mergeFunction(BaseEdgeDocument lastDoc,BaseEdgeDocument newDocument){
super.mergeFunction(lastDoc, newDocument);
mergeDistinctClientIp(lastDoc, newDocument);
putSumAttribute(lastDoc, newDocument,"CNT_TOTAL");
}
private void mergeDistinctClientIp(BaseEdgeDocument lastDoc,BaseEdgeDocument newDocument){
HashSet<String> clientIpSet = new HashSet<>();
String[] distCips = (String[]) newDocument.getAttribute("DIST_CIP");
String[] lastDistCips = (String[]) lastDoc.getAttribute("DIST_CIP");
clientIpSet.addAll(Arrays.asList(distCips));
clientIpSet.addAll(Arrays.asList(lastDistCips));
long[] clientIpTs = new long[clientIpSet.size()];
for (int i = 0; i < clientIpTs.length; i++) {
clientIpTs[i] = currentHour;
}
newDocument.addAttribute("DIST_CIP", clientIpSet.toArray());
newDocument.addAttribute("DIST_CIP_TS", clientIpTs);
}
@Override
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument) {
super.updateFunction(newEdgeDocument, historyEdgeDocument);
updateDistinctClientIp(newEdgeDocument, historyEdgeDocument);
putSumAttribute(newEdgeDocument, historyEdgeDocument,"CNT_TOTAL");
}
private void updateDistinctClientIp(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument edgeDocument){
ArrayList<String> distCip = (ArrayList<String>) edgeDocument.getAttribute("DIST_CIP");
ArrayList<Long> distCipTs = (ArrayList<Long>) edgeDocument.getAttribute("DIST_CIP_TS");
HashMap<String, Long> distCipToTs = new HashMap<>();
if (distCip.size() == distCipTs.size()){
for (int i = 0;i < distCip.size();i++){
distCipToTs.put(distCip.get(i),distCipTs.get(i));
}
}
Object[] distCipRecent = (Object[])newEdgeDocument.getAttribute("DIST_CIP");
for (Object cip:distCipRecent){
distCipToTs.put(cip.toString(), currentHour);
}
Map<String, Long> sortDistCip = sortMapByValue(distCipToTs);
edgeDocument.addAttribute("DIST_CIP",sortDistCip.keySet().toArray());
edgeDocument.addAttribute("DIST_CIP_TS",sortDistCip.values().toArray());
}
/**
* 使用 Map按value进行排序
*/
private Map<String, Long> sortMapByValue(Map<String, Long> oriMap) {
if (oriMap == null || oriMap.isEmpty()) {
return null;
}
Map<String, Long> sortedMap = new LinkedHashMap<>();
List<Map.Entry<String, Long>> entryList = new ArrayList<>(oriMap.entrySet());
entryList.sort((o1, o2) -> o2.getValue().compareTo(o1.getValue()));
if(entryList.size() > DISTINCT_CLIENT_IP_NUM){
for(Map.Entry<String, Long> set:entryList.subList(0, DISTINCT_CLIENT_IP_NUM)){
sortedMap.put(set.getKey(), set.getValue());
}
}else {
for(Map.Entry<String, Long> set:entryList){
sortedMap.put(set.getKey(), set.getValue());
}
}
return sortedMap;
}
}

View File

@@ -1,21 +0,0 @@
package cn.ac.iie.service.update.relationship;
import cn.ac.iie.service.update.Relationship;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseEdgeDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class LocateSubscriber2Ip extends Relationship {
public LocateSubscriber2Ip(HashMap<String, ArrayList<BaseEdgeDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch);
}
}

View File

@@ -1,32 +0,0 @@
package cn.ac.iie.service.update.relationship;
import cn.ac.iie.service.update.Relationship;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseEdgeDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class VisitIp2Fqdn extends Relationship {
public VisitIp2Fqdn(HashMap<String, ArrayList<BaseEdgeDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
}
@Override
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument) {
super.updateFunction(newEdgeDocument, historyEdgeDocument);
putSumAttribute(newEdgeDocument,historyEdgeDocument,"CNT_TOTAL");
}
@Override
protected void mergeFunction(BaseEdgeDocument lastDoc,BaseEdgeDocument newDocument) {
super.mergeFunction(lastDoc, newDocument);
putSumAttribute(lastDoc,newDocument,"CNT_TOTAL");
}
}

View File

@@ -1,21 +0,0 @@
package cn.ac.iie.service.update.vertex;
import cn.ac.iie.service.update.Vertex;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class Fqdn extends Vertex {
public Fqdn(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
}
}

View File

@@ -1,48 +0,0 @@
package cn.ac.iie.service.update.vertex;
import cn.ac.iie.service.update.Vertex;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class Ip extends Vertex {
public Ip(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch);
}
@Override
protected void updateFunction(BaseDocument newDocument, BaseDocument historyDocument) {
super.updateFunction(newDocument, historyDocument);
updateIpByType(newDocument, historyDocument);
}
@Override
protected void mergeFunction(BaseDocument lastDoc, BaseDocument newDocument) {
super.mergeFunction(lastDoc, newDocument);
mergeIpByType(lastDoc, newDocument);
}
private void mergeIpByType(BaseDocument lastDoc, BaseDocument newDocument) {
putSumAttribute(lastDoc,newDocument,"CLIENT_SESSION_COUNT");
putSumAttribute(lastDoc,newDocument,"CLIENT_BYTES_SUM");
putSumAttribute(lastDoc,newDocument,"SERVER_SESSION_COUNT");
putSumAttribute(lastDoc,newDocument,"SERVER_BYTES_SUM");
}
private void updateIpByType(BaseDocument newDocument, BaseDocument historyDocument) {
putSumAttribute(newDocument, historyDocument, "CLIENT_SESSION_COUNT");
putSumAttribute(newDocument, historyDocument, "CLIENT_BYTES_SUM");
putSumAttribute(newDocument, historyDocument, "SERVER_SESSION_COUNT");
putSumAttribute(newDocument, historyDocument, "SERVER_BYTES_SUM");
}
}

View File

@@ -1,21 +0,0 @@
package cn.ac.iie.service.update.vertex;
import cn.ac.iie.service.update.Vertex;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class Subscriber extends Vertex {
public Subscriber(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch);
}
}

View File

@@ -1,18 +0,0 @@
package cn.ac.iie.test;
import cn.ac.iie.dao.UpdateGraphData;
/**
* iplearning程序入口
* @author wlh
*/
public class IpLearningApplicationTest {
public static void main(String[] args) {
UpdateGraphData updateGraphData = new UpdateGraphData();
updateGraphData.updateArango();
}
}

View File

@@ -1,116 +0,0 @@
package cn.ac.iie.utils;
import cn.ac.iie.config.ApplicationConfig;
import com.arangodb.ArangoCollection;
import com.arangodb.ArangoCursor;
import com.arangodb.ArangoDB;
import com.arangodb.ArangoDatabase;
import com.arangodb.entity.DocumentCreateEntity;
import com.arangodb.entity.ErrorEntity;
import com.arangodb.entity.MultiDocumentEntity;
import com.arangodb.model.AqlQueryOptions;
import com.arangodb.model.DocumentCreateOptions;
import com.arangodb.util.MapBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
public class ArangoDBConnect {
private static final Logger LOG = LoggerFactory.getLogger(ArangoDBConnect.class);
private static ArangoDB arangoDB = null;
private static ArangoDBConnect conn = null;
static {
getArangoDatabase();
}
private static void getArangoDatabase(){
arangoDB = new ArangoDB.Builder()
.maxConnections(ApplicationConfig.THREAD_POOL_NUMBER)
.host(ApplicationConfig.ARANGODB_HOST, ApplicationConfig.ARANGODB_PORT)
.user(ApplicationConfig.ARANGODB_USER)
.password(ApplicationConfig.ARANGODB_PASSWORD)
.build();
}
public static synchronized ArangoDBConnect getInstance(){
if (null == conn){
conn = new ArangoDBConnect();
}
return conn;
}
private ArangoDatabase getDatabase(){
return arangoDB.db(ApplicationConfig.ARANGODB_DB_NAME);
}
public void clean(){
try {
if (arangoDB != null){
arangoDB.shutdown();
}
}catch (Exception e){
e.printStackTrace();
}
}
public <T> ArangoCursor<T> executorQuery(String query,Class<T> type){
ArangoDatabase database = getDatabase();
Map<String, Object> bindVars = new MapBuilder().get();
AqlQueryOptions options = new AqlQueryOptions().ttl(ApplicationConfig.ARANGODB_TTL);
try {
return database.query(query, bindVars, options, type);
}catch (Exception e){
e.printStackTrace();
return null;
}finally {
bindVars.clear();
}
}
@Deprecated
public <T> void insertAndUpdate(ArrayList<T> docInsert,ArrayList<T> docUpdate,String collectionName){
ArangoDatabase database = getDatabase();
try {
ArangoCollection collection = database.collection(collectionName);
if (!docInsert.isEmpty()){
collection.importDocuments(docInsert);
}
if (!docUpdate.isEmpty()){
collection.replaceDocuments(docUpdate);
}
}catch (Exception e){
System.out.println("更新失败");
e.printStackTrace();
}finally {
docInsert.clear();
docInsert.clear();
}
}
public <T> void overwrite(ArrayList<T> docOverwrite,String collectionName){
ArangoDatabase database = getDatabase();
try {
ArangoCollection collection = database.collection(collectionName);
if (!docOverwrite.isEmpty()){
DocumentCreateOptions documentCreateOptions = new DocumentCreateOptions();
documentCreateOptions.overwrite(true);
documentCreateOptions.silent(true);
MultiDocumentEntity<DocumentCreateEntity<T>> documentCreateEntityMultiDocumentEntity = collection.insertDocuments(docOverwrite, documentCreateOptions);
Collection<ErrorEntity> errors = documentCreateEntityMultiDocumentEntity.getErrors();
for (ErrorEntity errorEntity:errors){
LOG.debug("写入arangoDB异常"+errorEntity.getErrorMessage());
}
}
}catch (Exception e){
System.out.println("更新失败:"+e.toString());
}finally {
docOverwrite.clear();
}
}
}

View File

@@ -1,103 +0,0 @@
package cn.ac.iie.utils;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import java.sql.*;
import java.util.Properties;
public class ClickhouseConnect {
private static DruidDataSource dataSource = null;
private static ClickhouseConnect dbConnect = null;
private static Properties props = new Properties();
static {
getDbConnect();
}
private static void getDbConnect() {
try {
if (dataSource == null) {
dataSource = new DruidDataSource();
props.load(ClickhouseConnect.class.getClassLoader().getResourceAsStream("clickhouse.properties"));
//设置连接参数
dataSource.setUrl("jdbc:clickhouse://" + props.getProperty("db.id"));
dataSource.setDriverClassName(props.getProperty("drivers"));
dataSource.setUsername(props.getProperty("mdb.user"));
dataSource.setPassword(props.getProperty("mdb.password"));
//配置初始化大小、最小、最大
dataSource.setInitialSize(Integer.parseInt(props.getProperty("initialsize")));
dataSource.setMinIdle(Integer.parseInt(props.getProperty("minidle")));
dataSource.setMaxActive(Integer.parseInt(props.getProperty("maxactive")));
//配置获取连接等待超时的时间
dataSource.setMaxWait(30000);
//配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
dataSource.setTimeBetweenEvictionRunsMillis(2000);
//防止过期
dataSource.setValidationQuery("SELECT 1");
dataSource.setTestWhileIdle(true);
dataSource.setTestOnBorrow(true);
dataSource.setKeepAlive(true);
}
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 数据库连接池单例
*
* @return dbConnect
*/
public static synchronized ClickhouseConnect getInstance() {
if (null == dbConnect) {
dbConnect = new ClickhouseConnect();
}
return dbConnect;
}
/**
* 返回druid数据库连接
*
* @return 连接
* @throws SQLException sql异常
*/
public DruidPooledConnection getConnection() throws SQLException {
return dataSource.getConnection();
}
/**
* 清空PreparedStatement、Connection对象未定义的置空。
*
* @param pstmt PreparedStatement对象
* @param connection Connection对象
*/
public void clear(Statement pstmt, Connection connection) {
try {
if (pstmt != null) {
pstmt.close();
}
if (connection != null) {
connection.close();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
public ResultSet executorQuery(String query,Connection connection,Statement pstm){
// Connection connection = null;
// Statement pstm = null;
try {
connection = getConnection();
pstm = connection.createStatement();
return pstm.executeQuery(query);
}catch (Exception e){
e.printStackTrace();
return null;
}
}
}

View File

@@ -1,36 +0,0 @@
package cn.ac.iie.utils;
import java.util.Properties;
public class ConfigUtils {
private static Properties propCommon = new Properties();
public static String getStringProperty(String key) {
return propCommon.getProperty(key);
}
public static Integer getIntProperty(String key) {
return Integer.parseInt(propCommon.getProperty(key));
}
public static Long getLongProperty(String key) {
return Long.parseLong(propCommon.getProperty(key));
}
public static Boolean getBooleanProperty(String key) {
return "true".equals(propCommon.getProperty(key).toLowerCase().trim());
}
static {
try {
propCommon.load(ConfigUtils.class.getClassLoader().getResourceAsStream("application.properties"));
System.out.println("application.properties加载成功");
} catch (Exception e) {
propCommon = null;
System.err.println("配置加载失败");
}
}
}

View File

@@ -1,67 +0,0 @@
package cn.ac.iie.utils;
import cn.ac.iie.config.ApplicationConfig;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.util.concurrent.*;
/**
* 线程池管理
* @author wlh
*/
public class ExecutorThreadPool {
private static ExecutorService pool = null ;
private static ExecutorThreadPool poolExecutor = null;
static {
getThreadPool();
}
private static void getThreadPool(){
ThreadFactory namedThreadFactory = new ThreadFactoryBuilder()
.setNameFormat("iplearning-application-pool-%d").build();
//Common Thread Pool
pool = new ThreadPoolExecutor(ApplicationConfig.THREAD_POOL_NUMBER, ApplicationConfig.THREAD_POOL_NUMBER*2,
0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<>(1024), namedThreadFactory, new ThreadPoolExecutor.AbortPolicy());
// pool = Executors.newFixedThreadPool(ApplicationConfig.THREAD_POOL_NUMBER);
}
public static ExecutorThreadPool getInstance(){
if (null == poolExecutor){
poolExecutor = new ExecutorThreadPool();
}
return poolExecutor;
}
public void executor(Runnable command){
pool.execute(command);
}
@Deprecated
public void awaitThreadTask(){
try {
while (!pool.awaitTermination(ApplicationConfig.THREAD_AWAIT_TERMINATION_TIME, TimeUnit.SECONDS)) {
System.out.println("线程池没有关闭");
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public void shutdown(){
pool.shutdown();
}
@Deprecated
public static Long getThreadNumber(){
String name = Thread.currentThread().getName();
String[] split = name.split("-");
return Long.parseLong(split[3]);
}
}

View File

@@ -1,34 +0,0 @@
#arangoDB参数配置
arangoDB.host=192.168.40.182
arangoDB.port=8529
arangoDB.user=root
arangoDB.password=111111
#arangoDB.DB.name=ip-learning-test
arangoDB.DB.name=insert_iplearn_index
arangoDB.batch=100000
arangoDB.ttl=3600
update.arango.batch=10000
thread.pool.number=10
thread.await.termination.time=10
#读取clickhouse时间范围方式0读取过去一小时1指定时间范围
clickhouse.time.limit.type=1
read.clickhouse.max.time=1571245220
read.clickhouse.min.time=1571245210
#读取arangoDB时间范围方式0正常读1指定时间范围
arango.time.limit.type=1
read.arango.max.time=1571245220
read.arango.min.time=1571245210
update.interval=3600
distinct.client.ip.num=10000
recent.count.hour=24
top.domain.file.name=topDomain.txt
arangoDB.read.limit=

View File

@@ -1,8 +0,0 @@
drivers=ru.yandex.clickhouse.ClickHouseDriver
db.id=192.168.40.193:8123/av_miner?socket_timeout=300000
#db.id=192.168.40.186:8123/tsg_galaxy_v3?socket_timeout=300000
mdb.user=default
mdb.password=111111
initialsize=1
minidle=1
maxactive=50

View File

@@ -1,25 +0,0 @@
######################### logger ##############################
log4j.logger.org.apache.http=OFF
log4j.logger.org.apache.http.wire=OFF
#Log4j
log4j.rootLogger=info,console,file
# <20><><EFBFBD><EFBFBD>̨<EFBFBD><CCA8>־<EFBFBD><D6BE><EFBFBD><EFBFBD>
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.Threshold=info
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
# <20>ļ<EFBFBD><C4BC><EFBFBD>־<EFBFBD><D6BE><EFBFBD><EFBFBD>
log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
log4j.appender.file.Threshold=info
log4j.appender.file.encoding=UTF-8
log4j.appender.file.Append=true
<><C2B7><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>·<EFBFBD><C2B7><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ز<EFBFBD><D8B2><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Ӧ<EFBFBD><D3A6>Ŀ<EFBFBD><C4BF>
#log4j.appender.file.file=/home/ceiec/iplearning/logs/ip-learning-application.log
#log4j.appender.file.file=/home/ceiec/iplearning/testLog/ip-learning-application.log
log4j.appender.file.file=./logs/ip-learning-application.log
log4j.appender.file.DatePattern='.'yyyy-MM-dd
log4j.appender.file.layout=org.apache.log4j.PatternLayout
#log4j.appender.file.layout.ConversionPattern=%d{HH:mm:ss} %X{ip} [%t] %5p %c{1} %m%n
log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] %X{ip} [Thread\:%t] %l %x - %m%n

View File

@@ -1,29 +0,0 @@
package cn.ac.iie;
import java.lang.reflect.Array;
import java.util.ArrayList;
public class Test {
public static void main(String args[]) throws Exception {
Tester test = new Tester();
Tester[] tests = new Tester[0];
// ArrayList<Tester> testers = new ArrayList<>();
// testers.add(test);
Class<?> c1 = tests.getClass().getComponentType();
Class<?> c2 = Tester.class;
Class<?> c3 = test.getClass();
System.out.println(c1.getName());
System.out.println(c2.getName());
System.out.println(c3.getName());
// Tester[] newTesters = (Tester[]) Array.newInstance(c1, 10);
// Tester newTester = (Tester) c1.newInstance();
// System.out.println(newTesters.length);
}
}
class Tester {
private String name;
private String mem;
}

View File

@@ -1,37 +0,0 @@
package cn.ac.iie;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.ArangoCursor;
import com.arangodb.ArangoDatabase;
import com.arangodb.entity.BaseEdgeDocument;
import java.util.ArrayList;
import java.util.List;
public class TestArango {
public static void main(String[] args) {
ArangoDBConnect instance = ArangoDBConnect.getInstance();
/*
String query = "FOR doc IN IP filter doc.FIRST_FOUND_TIME >= 1592996080 and doc.FIRST_FOUND_TIME <= 1593112913 RETURN doc";
ArangoCursor<BaseEdgeDocument> baseEdgeDocuments = instance.executorQuery(query, BaseEdgeDocument.class);
while (baseEdgeDocuments.hasNext()){
BaseEdgeDocument next = baseEdgeDocuments.next();
System.out.println(next.toString());
}
*/
BaseEdgeDocument baseEdgeDocument = new BaseEdgeDocument();
baseEdgeDocument.setKey("192.168.50.6-www.liftopia.com");
baseEdgeDocument.setFrom("IP/192.168.50.6");
baseEdgeDocument.setTo("FQDN/www.liftopia.com");
baseEdgeDocument.addAttribute("HTTP_CNT_TOTAL",3L);
baseEdgeDocument.addAttribute("DNS_CNT_RECENT",new long[24]);
baseEdgeDocument.addAttribute("PROTOCOL_TYPE","HTTP");
ArrayList<BaseEdgeDocument> baseEdgeDocuments = new ArrayList<>();
baseEdgeDocuments.add(baseEdgeDocument);
instance.overwrite(baseEdgeDocuments,"R_LOCATE_FQDN2IP");
instance.clean();
}
}

View File

@@ -1,30 +0,0 @@
package cn.ac.iie;
import java.io.*;
import java.net.URL;
import java.util.TreeMap;
import java.util.regex.Pattern;
public class TestReadLine {
public static void main(String[] args) throws Exception {
Pattern pattern = Pattern.compile("^[^.]*$");
String encoding = "UTF-8";
// File file = new File("C:\\Users\\94976\\Desktop\\test.txt");
URL url = TestReadLine.class.getClassLoader().getResource("topDomain.txt");
assert url != null;
File file = new File(url.getFile());
InputStreamReader read = new InputStreamReader(
new FileInputStream(file), encoding);
BufferedReader bufferedReader = new BufferedReader(read);
String lineTxt;
int cnt = 0;
while ((lineTxt = bufferedReader.readLine()) != null){
if (pattern.matcher(lineTxt).matches()){
cnt += 1;
System.out.println(lineTxt);
}
}
System.out.println(cnt);
System.out.println(url.getFile());
}
}

View File

@@ -1,20 +0,0 @@
package cn.ac.iie;
import cn.ac.iie.service.update.vertex.Fqdn;
import cn.ac.iie.utils.ArangoDBConnect;
import java.lang.reflect.Constructor;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class TestReflect {
public static void main(String[] args) throws Exception {
Class<Fqdn> fqdnClass = Fqdn.class;
// for(Constructor constructor : fqdnClass.getConstructors()){
// System.out.println(constructor);
// }
Constructor<Fqdn> constructor = fqdnClass.getConstructor(HashMap.class, ArangoDBConnect.class, String.class, ConcurrentHashMap.class, CountDownLatch.class);
System.out.println(constructor);
}
}

View File

@@ -1,10 +0,0 @@
package cn.ac.iie;
import java.util.regex.Pattern;
public class TestRegex {
public static void main(String[] args) {
Pattern pattern = Pattern.compile("^[^.]*$");
System.out.println(pattern.matcher("com.dz").matches());
}
}

View File

@@ -1,162 +0,0 @@
<component name="libraryTable">
<library name="scala-sdk-2.11.7" type="Scala">
<properties>
<language-level>Scala_2_11</language-level>
<compiler-classpath>
<root url="file://D:/soft/scala/scala-2.11.7/lib/scala-compiler.jar" />
<root url="file://D:/soft/scala/scala-2.11.7/lib/scala-library.jar" />
<root url="file://D:/soft/scala/scala-2.11.7/lib/scala-reflect.jar" />
</compiler-classpath>
</properties>
<CLASSES>
<root url="jar://D:/soft/scala/scala-2.11.7/lib/scala-actors-2.11.0.jar!/" />
<root url="jar://D:/soft/scala/scala-2.11.7/lib/scala-actors-migration_2.11-1.1.0.jar!/" />
<root url="jar://D:/soft/scala/scala-2.11.7/lib/scala-library.jar!/" />
<root url="jar://D:/soft/scala/scala-2.11.7/lib/scala-parser-combinators_2.11-1.0.4.jar!/" />
<root url="jar://D:/soft/scala/scala-2.11.7/lib/scala-reflect.jar!/" />
<root url="jar://D:/soft/scala/scala-2.11.7/lib/scala-swing_2.11-1.0.2.jar!/" />
<root url="jar://D:/soft/scala/scala-2.11.7/lib/scala-xml_2.11-1.0.4.jar!/" />
</CLASSES>
<JAVADOC>
<root url="http://www.scala-lang.org/api/2.11.7/" />
</JAVADOC>
<SOURCES>
<root url="file://D:/tar/scala-2.11.7" />
<root url="file://D:/tar/scala-2.11.7/src/actors" />
<root url="file://D:/tar/scala-2.11.7/src/forkjoin" />
<root url="file://D:/tar/scala-2.11.7/src/library" />
<root url="file://D:/tar/scala-2.11.7/src/partest-extras" />
<root url="file://D:/tar/scala-2.11.7/src/partest-javaagent" />
<root url="file://D:/tar/scala-2.11.7/src/repl" />
<root url="file://D:/tar/scala-2.11.7/test/disabled/pos/t1737" />
<root url="file://D:/tar/scala-2.11.7/test/disabled/presentation/akka/src" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm/deprecation" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm/duration-java" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm/t1143-2" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm/t1342" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm/t1464" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm/t2163" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm/t2470" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm/t2570" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm/t2585" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm/t3003" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm/t3415" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm/t7253" />
<root url="file://D:/tar/scala-2.11.7/test/files/jvm/varargs" />
<root url="file://D:/tar/scala-2.11.7/test/files/neg/abstract-class-error" />
<root url="file://D:/tar/scala-2.11.7/test/files/neg/primitive-sigs-1" />
<root url="file://D:/tar/scala-2.11.7/test/files/neg/raw-types-stubs" />
<root url="file://D:/tar/scala-2.11.7/test/files/neg/t0673" />
<root url="file://D:/tar/scala-2.11.7/test/files/neg/t2442" />
<root url="file://D:/tar/scala-2.11.7/test/files/neg/t4851" />
<root url="file://D:/tar/scala-2.11.7/test/files/neg/t6013" />
<root url="file://D:/tar/scala-2.11.7/test/files/neg/t6289" />
<root url="file://D:/tar/scala-2.11.7/test/files/neg/t750" />
<root url="file://D:/tar/scala-2.11.7/test/files/neg/t750b" />
<root url="file://D:/tar/scala-2.11.7/test/files/neg/t8244" />
<root url="file://D:/tar/scala-2.11.7/test/files/neg/t8244e" />
<root url="file://D:/tar/scala-2.11.7/test/files/neg/t8376" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/ilya2" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/super" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t0695" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1101" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1102" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1150" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1152" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1176" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1186" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1196" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1197" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1230" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1231" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1232" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1235" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1254" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1409" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1642" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1711" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1745" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1751" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1782" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t1836" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t2377" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t2409" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t2433" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t2464" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t294" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t2940" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t2956" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t3120" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t3249" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t3349" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t3404" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t3429" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t3486" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t3521" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t3567" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t3642" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t3938" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t3943" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t4603" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t4744" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t5165" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t5703" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t5957" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t6169" />
<root url="file://D:/tar/scala-2.11.7/test/files/pos/t942" />
<root url="file://D:/tar/scala-2.11.7/test/files/presentation/ide-bug-1000469/src" />
<root url="file://D:/tar/scala-2.11.7/test/files/presentation/ide-bug-1000531/src" />
<root url="file://D:/tar/scala-2.11.7/test/files/res/t6613" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/bcodeInlinerMixed" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/reflection-fancy-java-classes" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/reflection-java-annotations" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/reflection-java-crtp" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t3452a" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t3452b" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t3452b-bcode" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t3452d" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t3452e" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t3452g" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t3897" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t4238" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t4317" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t4729" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t4788" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t4788-separate-compilation" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t4891" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t6168" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t6168b" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t6240a" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t6240b" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t6548" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t7008" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t7246" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t7246b" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t7359" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t7374" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t7439" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t7455" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t7741a" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t7741b" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t8442" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t8601e" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t9268" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t9298" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t9298b" />
<root url="file://D:/tar/scala-2.11.7/test/files/run/t9359" />
<root url="file://D:/tar/scala-2.11.7/test/files/t8449" />
<root url="file://D:/tar/scala-2.11.7/test/flaky/pos/t2868" />
<root url="file://D:/tar/scala-2.11.7/test/instrumented/library" />
<root url="file://D:/tar/scala-2.11.7/test/junit" />
<root url="file://D:/tar/scala-2.11.7/test/pending/jvm/t2705" />
<root url="file://D:/tar/scala-2.11.7/test/pending/pos/misc" />
<root url="file://D:/tar/scala-2.11.7/test/pending/pos/t3943" />
<root url="file://D:/tar/scala-2.11.7/test/pending/pos/t7778" />
<root url="file://D:/tar/scala-2.11.7/test/pending/run/t3899" />
<root url="file://D:/tar/scala-2.11.7/test/pending/run/t4713" />
<root url="file://D:/tar/scala-2.11.7/test/support/annotations" />
</SOURCES>
</library>
</component>

View File

@@ -66,12 +66,9 @@ public class ReadHistoryArangoData<T extends BaseDocument> extends Thread {
String key = doc.getKey();
switch (table) {
case "R_LOCATE_FQDN2IP":
updateProtocolDocument(doc);
// updateProtocolDocument(doc);
deleteDistinctClientIpByTime(doc);
break;
case "R_VISIT_IP2FQDN":
updateProtocolDocument(doc);
break;
default:
}
int hashCode = Math.abs(key.hashCode()) % ApplicationConfig.THREAD_POOL_NUMBER();

View File

@@ -0,0 +1,25 @@
package cn.ac.iie.utils;
public class TopDomainUtils {
/**
* 通用方法,传入url,返回domain,这里的domain不包含端口号,含有:一定是v6
* @param oriUrl
* @return
*/
public static String getDomainFromUrl(String oriUrl) {
String url = oriUrl.split("[?]")[0];
url = url.replaceAll("https://", "").replaceAll("http://", "");
String domain;
if (url.split("/")[0].split(":").length <= 2) {
domain = url
.split("/")[0]
.split(":")[0];
} else {
domain = url.split("/")[0];
}
return domain;
}
}

View File

@@ -7,13 +7,14 @@ repartitionNumber=36
spark.serializer=org.apache.spark.serializer.KryoSerializer
master=local[*]
#spark读取clickhouse配置
spark.read.clickhouse.url=jdbc:clickhouse://192.168.40.186:8123/tsg_galaxy_v3
#spark.read.clickhouse.url=jdbc:clickhouse://192.168.40.186:8123/tsg_galaxy_v3
spark.read.clickhouse.url=jdbc:clickhouse://192.168.40.193:8123/tsg_galaxy_zx
spark.read.clickhouse.driver=ru.yandex.clickhouse.ClickHouseDriver
spark.read.clickhouse.user=default
spark.read.clickhouse.password=111111
spark.read.clickhouse.numPartitions=144
spark.read.clickhouse.fetchsize=10000
spark.read.clickhouse.partitionColumn=common_recv_time
spark.read.clickhouse.partitionColumn=recv_time
clickhouse.socket.timeout=300000
#arangoDB配置
arangoDB.host=192.168.40.182
@@ -27,9 +28,9 @@ arangoDB.ttl=3600
thread.pool.number=5
#读取clickhouse时间范围方式0读取过去一小时1指定时间范围
clickhouse.time.limit.type=0
read.clickhouse.max.time=1571245220
read.clickhouse.min.time=1571245210
clickhouse.time.limit.type=1
read.clickhouse.max.time=1571241640
read.clickhouse.min.time=1571241600
#读取arangoDB时间范围方式0正常读1指定时间范围
arango.time.limit.type=0

View File

@@ -2,6 +2,7 @@ package cn.ac.iie.dao
import cn.ac.iie.config.ApplicationConfig
import cn.ac.iie.utils.SparkSessionUtil.spark
import cn.ac.iie.utils.TopDomainUtils
import org.apache.spark.sql.DataFrame
import org.slf4j.LoggerFactory
@@ -31,13 +32,13 @@ object BaseClickhouseData {
}
def loadConnectionDataFromCk(): Unit ={
val where = "common_recv_time >= " + timeLimit._2 + " AND common_recv_time < " + timeLimit._1
val where = "recv_time >= " + timeLimit._2 + " AND recv_time < " + timeLimit._1
val sql =
s"""
|(SELECT
| ssl_sni,http_host,common_client_ip,common_server_ip,common_recv_time,common_c2s_byte_num,common_s2c_byte_num,common_schema_type
| s1_domain,s1_referer,s1_s_ip,s1_d_ip,recv_time,media_len
|FROM
| connection_record_log
| media_expire_patch
|WHERE $where) as dbtable
""".stripMargin
@@ -48,8 +49,8 @@ object BaseClickhouseData {
private def loadRadiusDataFromCk(): Unit ={
val where =
s"""
| common_recv_time >= ${timeLimit._2}
| AND common_recv_time < ${timeLimit._1}
| common_start_time >= ${timeLimit._2}
| AND common_start_time < ${timeLimit._1}
| AND common_subscriber_id != ''
| AND radius_framed_ip != ''
| AND radius_packet_type = 4
@@ -58,7 +59,7 @@ object BaseClickhouseData {
val sql =
s"""
|(SELECT
| common_subscriber_id,radius_framed_ip,common_recv_time
| common_subscriber_id,radius_framed_ip,common_start_time
|FROM
| tsg_galaxy_v3.radius_record_log
|WHERE
@@ -68,28 +69,31 @@ object BaseClickhouseData {
initClickhouseData(sql)
}
def getDomain(url:String): String ={
TopDomainUtils.getDomainFromUrl(url)
}
def getVertexFqdnDf: DataFrame ={
loadConnectionDataFromCk()
spark.udf.register("getDomain",TopDomainUtils.getDomainFromUrl _)
val sql =
"""
|SELECT
| FQDN,MAX( LAST_FOUND_TIME ) AS LAST_FOUND_TIME,MIN( FIRST_FOUND_TIME ) AS FIRST_FOUND_TIME
| FQDN,MAX(LAST_FOUND_TIME) AS LAST_FOUND_TIME,MIN(FIRST_FOUND_TIME) AS FIRST_FOUND_TIME
|FROM
| (
| (SELECT
| ssl_sni AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME
| s1_domain AS FQDN,MAX(recv_time) AS LAST_FOUND_TIME,MIN(recv_time) AS FIRST_FOUND_TIME
| FROM
| global_temp.dbtable
| WHERE
| common_schema_type = 'SSL' GROUP BY ssl_sni
| GROUP BY s1_domain
| )
| UNION ALL
| (SELECT
| http_host AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME
| getDomain(s1_referer) AS FQDN,MAX(recv_time) AS LAST_FOUND_TIME,MIN(recv_time) AS FIRST_FOUND_TIME
| FROM
| global_temp.dbtable
| WHERE
| common_schema_type = 'HTTP' GROUP BY http_host
| GROUP BY getDomain(s1_referer)
| )
| )
|GROUP BY
@@ -103,6 +107,11 @@ object BaseClickhouseData {
vertexFqdnDf
}
def main(args: Array[String]): Unit = {
val df = getRelationFqdnLocateIpDf
df.show(10)
}
def getVertexIpDf: DataFrame ={
loadConnectionDataFromCk()
val sql =
@@ -113,11 +122,11 @@ object BaseClickhouseData {
| (
| (
| SELECT
| common_client_ip AS IP,
| MIN(common_recv_time) AS FIRST_FOUND_TIME,
| MAX(common_recv_time) AS LAST_FOUND_TIME,
| s1_s_ip AS IP,
| MIN(recv_time) AS FIRST_FOUND_TIME,
| MAX(recv_time) AS LAST_FOUND_TIME,
| count(*) as SESSION_COUNT,
| sum(common_c2s_byte_num) as BYTES_SUM,
| sum(media_len) as BYTES_SUM,
| 'client' as ip_type
| FROM
| global_temp.dbtable
@@ -127,11 +136,11 @@ object BaseClickhouseData {
| UNION ALL
| (
| SELECT
| common_server_ip AS IP,
| MIN(common_recv_time) AS FIRST_FOUND_TIME,
| MAX(common_recv_time) AS LAST_FOUND_TIME,
| s1_d_ip AS IP,
| MIN(recv_time) AS FIRST_FOUND_TIME,
| MAX(recv_time) AS LAST_FOUND_TIME,
| count(*) as SESSION_COUNT,
| sum(common_s2c_byte_num) as BYTES_SUM,
| sum(media_len) as BYTES_SUM,
| 'server' as ip_type
| FROM
| global_temp.dbtable
@@ -148,42 +157,23 @@ object BaseClickhouseData {
def getRelationFqdnLocateIpDf: DataFrame ={
loadConnectionDataFromCk()
val sslSql =
"""
|SELECT
| ssl_sni AS FQDN,
| common_server_ip,
| MAX(common_recv_time) AS LAST_FOUND_TIME,
| MIN(common_recv_time) AS FIRST_FOUND_TIME,
| COUNT(*) AS COUNT_TOTAL,
| collect_set(common_client_ip) AS DIST_CIP_RECENT,
| 'TLS' AS schema_type
|FROM
| global_temp.dbtable
|WHERE
| common_schema_type = 'SSL'
|GROUP BY
| ssl_sni,common_server_ip
""".stripMargin
val httpSql =
val sql =
"""
|SELECT
| http_host AS FQDN,
| common_server_ip,
| MAX(common_recv_time) AS LAST_FOUND_TIME,
| MIN(common_recv_time) AS FIRST_FOUND_TIME,
| s1_domain AS FQDN,
| s1_d_ip AS common_server_ip,
| MAX(recv_time) AS LAST_FOUND_TIME,
| MIN(recv_time) AS FIRST_FOUND_TIME,
| COUNT(*) AS COUNT_TOTAL,
| collect_set(common_client_ip) AS DIST_CIP_RECENT,
| 'HTTP' AS schema_type
| collect_set(s1_s_ip) AS DIST_CIP_RECENT
|FROM
| global_temp.dbtable
|WHERE
| common_schema_type = 'HTTP'
| s1_domain != ''
|GROUP BY
| http_host,common_server_ip
| s1_domain,s1_d_ip
""".stripMargin
val sql = s"SELECT * FROM (($sslSql) UNION ALL ($httpSql)) WHERE FQDN != ''"
LOG.warn(sql)
val relationFqdnLocateIpDf = spark.sql(sql)

View File

@@ -35,16 +35,9 @@ object MergeDataFrame {
}
def mergeRelationFqdnLocateIp(): RDD[Row] ={
val frame = BaseClickhouseData.getRelationFqdnLocateIpDf.filter(row => isDomain(row.getAs[String]("FQDN")))
.groupBy("FQDN", "common_server_ip")
.agg(
min("FIRST_FOUND_TIME").alias("FIRST_FOUND_TIME"),
max("LAST_FOUND_TIME").alias("LAST_FOUND_TIME"),
collect_list("COUNT_TOTAL").alias("COUNT_TOTAL_LIST"),
collect_list("schema_type").alias("schema_type_list"),
collect_set("DIST_CIP_RECENT").alias("DIST_CIP_RECENT")
)
frame.rdd.map(row => {
BaseClickhouseData.getRelationFqdnLocateIpDf
.filter(row => isDomain(row.getAs[String]("FQDN")))
.rdd.map(row => {
val fqdn = row.getAs[String]("FQDN")
val serverIp = row.getAs[String]("common_server_ip")
val key = fqdn.concat("-"+serverIp)

View File

@@ -93,8 +93,8 @@ object UpdateDocHandler {
doc.addAttribute("PROTOCOL_TYPE",protocolTypeBuilder.toString().replaceFirst(",",""))
}
def mergeDistinctIp(distCipRecent:ofRef[ofRef[String]]): Array[String] ={
distCipRecent.flatten.distinct.take(ApplicationConfig.DISTINCT_CLIENT_IP_NUM).toArray
def mergeDistinctIp(distCipRecent:ofRef[String]): Array[String] ={
distCipRecent.distinct.take(ApplicationConfig.DISTINCT_CLIENT_IP_NUM).toArray
}
def putDistinctIp(doc:BaseEdgeDocument,newDistinctIp:Array[String]): Unit ={

View File

@@ -131,18 +131,16 @@ object UpdateDocument {
val serverIp = row.getAs[String]("common_server_ip")
val firstFoundTime = row.getAs[Long]("FIRST_FOUND_TIME")
val lastFoundTime = row.getAs[Long]("LAST_FOUND_TIME")
val countTotalList = row.getAs[ofRef[AnyRef]]("COUNT_TOTAL_LIST")
val schemaTypeList = row.getAs[ofRef[AnyRef]]("schema_type_list")
val distCipRecent = row.getAs[ofRef[ofRef[String]]]("DIST_CIP_RECENT")
val countTotal = row.getAs[Long]("COUNT_TOTAL")
val distCipRecent = row.getAs[ofRef[String]]("DIST_CIP_RECENT")
val sepAttritubeMap: Map[String, Long] = separateAttributeByProtocol(schemaTypeList, countTotalList)
val distinctIp: Array[String] = mergeDistinctIp(distCipRecent)
val key = fqdn.concat("-" + serverIp)
var document = dictionaryMap.getOrDefault(key, null)
if (document != null) {
updateMaxAttribute(document, lastFoundTime, "LAST_FOUND_TIME")
updateProtocolAttritube(document, sepAttritubeMap)
updateSumAttribute(document,countTotal,"CNT_TOTAL")
updateDistinctIp(document, distinctIp)
} else {
document = new BaseEdgeDocument()
@@ -151,7 +149,7 @@ object UpdateDocument {
document.setTo("IP/" + serverIp)
document.addAttribute("FIRST_FOUND_TIME", firstFoundTime)
document.addAttribute("LAST_FOUND_TIME", lastFoundTime)
putProtocolAttritube(document, sepAttritubeMap)
document.addAttribute("CNT_TOTAL",countTotal)
putDistinctIp(document, distinctIp)
}
document