抽象document父类

This commit is contained in:
wanglihui
2020-07-15 19:33:59 +08:00
parent e763270d4e
commit a301d6b402
29 changed files with 1074 additions and 664 deletions

View File

@@ -0,0 +1,58 @@
package cn.ac.iie.service;
import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.dao.BaseArangoData;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import com.arangodb.entity.BaseEdgeDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Set;
public class UpdateEFqdnAddressIp implements Runnable {
private HashMap<String, BaseEdgeDocument> documentHashMap;
private static ArangoDBConnect arangoManger = ArangoDBConnect.getInstance();
public UpdateEFqdnAddressIp(HashMap<String, BaseEdgeDocument> documentHashMap) {
this.documentHashMap = documentHashMap;
}
@Override
public void run() {
Set<String> keySet = documentHashMap.keySet();
ArrayList<BaseDocument> docInsert = new ArrayList<>();
ArrayList<BaseDocument> docUpdate = new ArrayList<>();
int i = 0;
try {
for (String key:keySet){
BaseEdgeDocument newEdgeDocument = documentHashMap.getOrDefault(key, null);
if (newEdgeDocument != null){
i += 1;
BaseEdgeDocument edgeDocument = BaseArangoData.e_Fqdn_Address_Ip_Map.getOrDefault(key, null);
if (edgeDocument != null){
Object lastFoundTime = newEdgeDocument.getAttribute("LAST_FOUND_TIME");
long countTotal = Long.parseLong(newEdgeDocument.getAttribute("COUNT_TOTAL").toString());
long updateCountTotal = Long.parseLong(edgeDocument.getAttribute("COUNT_TOTAL").toString());
edgeDocument.addAttribute("LAST_FOUND_TIME",lastFoundTime);
edgeDocument.addAttribute("COUNT_TOTAL",countTotal+updateCountTotal);
docInsert.add(edgeDocument);
}else {
docUpdate.add(newEdgeDocument);
}
}
if (i >= ApplicationConfig.UPDATE_ARANGO_BATCH){
arangoManger.insertAndUpdate(docInsert,docUpdate,"E_ADDRESS_V_FQDN_TO_V_IP");
System.out.println("更新"+i);
i = 0;
}
}
if (i != 0){
arangoManger.insertAndUpdate(docInsert,docUpdate,"E_ADDRESS_V_FQDN_TO_V_IP");
System.out.println("更新"+i);
}
}catch (Exception e){
e.printStackTrace();
}
}
}

View File

@@ -0,0 +1,58 @@
package cn.ac.iie.service;
import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.dao.BaseArangoData;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import com.arangodb.entity.BaseEdgeDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Set;
public class UpdateEIpVisitFqdn implements Runnable {
private HashMap<String, BaseEdgeDocument> documentHashMap;
private static final ArangoDBConnect arangoManger = ArangoDBConnect.getInstance();
public UpdateEIpVisitFqdn(HashMap<String, BaseEdgeDocument> documentHashMap) {
this.documentHashMap = documentHashMap;
}
@Override
public void run() {
Set<String> keySet = documentHashMap.keySet();
ArrayList<BaseDocument> docInsert = new ArrayList<>();
ArrayList<BaseDocument> docUpdate = new ArrayList<>();
int i = 0;
try {
for (String key:keySet){
BaseEdgeDocument newEdgeDocument = documentHashMap.getOrDefault(key, null);
if (newEdgeDocument != null){
i += 1;
BaseEdgeDocument edgeDocument = BaseArangoData.e_Ip_Visit_Fqdn_Map.getOrDefault(key, null);
if (edgeDocument != null){
Object lastFoundTime = newEdgeDocument.getAttribute("LAST_FOUND_TIME");
long countTotal = Long.parseLong(newEdgeDocument.getAttribute("COUNT_TOTAL").toString());
long updateCountTotal = Long.parseLong(edgeDocument.getAttribute("COUNT_TOTAL").toString());
edgeDocument.addAttribute("LAST_FOUND_TIME",lastFoundTime);
edgeDocument.addAttribute("COUNT_TOTAL",countTotal+updateCountTotal);
docInsert.add(edgeDocument);
}else {
docUpdate.add(newEdgeDocument);
}
}
if (i >= ApplicationConfig.UPDATE_ARANGO_BATCH){
arangoManger.insertAndUpdate(docInsert,docUpdate,"E_VISIT_V_IP_TO_V_FQDN");
System.out.println("更新"+i);
i = 0;
}
}
if (i != 0){
arangoManger.insertAndUpdate(docInsert,docUpdate,"E_VISIT_V_IP_TO_V_FQDN");
System.out.println("更新"+i);
}
}catch (Exception e){
e.printStackTrace();
}
}
}

View File

@@ -0,0 +1,94 @@
package cn.ac.iie.service;
import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.dao.BaseArangoData;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public class UpdateVFqdn implements Runnable{
private HashMap<String, ArrayList<BaseDocument>> documentHashMap;
private static ArangoDBConnect arangoManger = ArangoDBConnect.getInstance();
public UpdateVFqdn(HashMap<String, ArrayList<BaseDocument>> documentHashMap) {
this.documentHashMap = documentHashMap;
}
@Override
public void run() {
Set<String> keySet = documentHashMap.keySet();
ArrayList<BaseDocument> docInsert = new ArrayList<>();
ArrayList<BaseDocument> docUpdate = new ArrayList<>();
int i = 0;
try {
for (String key:keySet){
ArrayList<BaseDocument> documentArrayList = documentHashMap.getOrDefault(key, null);
BaseDocument newDocument = mergeVFqdn(documentArrayList);
if (newDocument != null){
i += 1;
BaseDocument document = BaseArangoData.v_Fqdn_Map.getOrDefault(key, null);
if (document != null){
Object lastFoundTime = newDocument.getAttribute("LAST_FOUND_TIME");
long fqdnCountTotal = Long.parseLong(newDocument.getAttribute("FQDN_COUNT_TOTAL").toString());
long countTotal = Long.parseLong(document.getAttribute("FQDN_COUNT_TOTAL").toString());
document.addAttribute("LAST_FOUND_TIME",lastFoundTime);
document.addAttribute("FQDN_COUNT_TOTAL",countTotal+fqdnCountTotal);
docUpdate.add(document);
}else {
docInsert.add(newDocument);
}
}
if (i >= ApplicationConfig.UPDATE_ARANGO_BATCH){
arangoManger.insertAndUpdate(docInsert,docUpdate,"V_FQDN");
System.out.println("更新"+i);
i = 0;
}
}
if (i != 0){
arangoManger.insertAndUpdate(docInsert,docUpdate,"V_FQDN");
System.out.println("更新"+i);
}
}catch (Exception e){
e.printStackTrace();
}
}
private BaseDocument mergeVFqdn(ArrayList<BaseDocument> documentArrayList){
if (documentArrayList == null || documentArrayList.isEmpty()){
return null;
}else if (documentArrayList.size() == 1){
return documentArrayList.get(0);
}else {
BaseDocument document = new BaseDocument();
Map<String, Object> properties = document.getProperties();
for (BaseDocument doc:documentArrayList){
if (properties.isEmpty()){
document = doc;
properties = doc.getProperties();
}else {
long firstFoundTime = Long.parseLong(properties.getOrDefault("FIRST_FOUND_TIME", 0L).toString());
long docFirstFoundTime = Long.parseLong(doc.getAttribute("FIRST_FOUND_TIME").toString());
properties.put("FIRST_FOUND_TIME",firstFoundTime<docFirstFoundTime? firstFoundTime:docFirstFoundTime);
long lastFoundTime = Long.parseLong(properties.getOrDefault("LAST_FOUND_TIME", 0L).toString());
long docLastFoundTime = Long.parseLong(doc.getAttribute("LAST_FOUND_TIME").toString());
properties.put("LAST_FOUND_TIME",lastFoundTime>docLastFoundTime? lastFoundTime:docLastFoundTime);
long fqdnCountTotal = Long.parseLong(properties.getOrDefault("FQDN_COUNT_TOTAL", 0L).toString());
long docFqdnCountTotal = Long.parseLong(doc.getAttribute("FQDN_COUNT_TOTAL").toString());
properties.put("FQDN_COUNT_TOTAL",fqdnCountTotal+docFqdnCountTotal);
}
}
document.setProperties(properties);
return document;
}
}
}

View File

@@ -0,0 +1,60 @@
package cn.ac.iie.service;
import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.dao.BaseArangoData;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Set;
public class UpdateVIP implements Runnable {
private HashMap<String, BaseDocument> documentHashMap;
private static final ArangoDBConnect arangoManger = ArangoDBConnect.getInstance();
public UpdateVIP(HashMap<String, BaseDocument> documentHashMap) {
this.documentHashMap = documentHashMap;
}
@Override
public void run() {
Set<String> keySet = documentHashMap.keySet();
ArrayList<BaseDocument> docInsert = new ArrayList<>();
ArrayList<BaseDocument> docUpdate = new ArrayList<>();
int i = 0;
try {
for (String key:keySet){
BaseDocument newDocument = documentHashMap.getOrDefault(key, null);
if (newDocument != null){
i += 1;
BaseDocument document = BaseArangoData.v_Ip_Map.getOrDefault(key, null);
if (document != null){
Object lastFoundTime = newDocument.getAttribute("LAST_FOUND_TIME");
long ipCountTotal = Long.parseLong(newDocument.getAttribute("IP_COUNT_TOTAL").toString());
long countTotal = Long.parseLong(document.getAttribute("IP_COUNT_TOTAL").toString());
document.addAttribute("LAST_FOUND_TIME",lastFoundTime);
document.addAttribute("IP_COUNT_TOTAL",countTotal+ipCountTotal);
docUpdate.add(document);
}else {
docInsert.add(newDocument);
}
}
if (i >= ApplicationConfig.UPDATE_ARANGO_BATCH){
arangoManger.insertAndUpdate(docInsert,docUpdate,"V_IP");
System.out.println("更新"+i);
i = 0;
}
}
if (i != 0){
arangoManger.insertAndUpdate(docInsert,docUpdate,"V_IP");
System.out.println("更新"+i);
}
}catch (Exception e){
e.printStackTrace();
}
}
}

View File

@@ -0,0 +1,251 @@
package cn.ac.iie.service.read;
import cn.ac.iie.config.ApplicationConfig;
import com.arangodb.entity.BaseDocument;
import com.arangodb.entity.BaseEdgeDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.regex.Pattern;
/**
* @author wlh
*/
public class ReadClickhouseData {
public static long currentHour = System.currentTimeMillis() / (60 * 60 * 1000) * 60 * 60;
private static Pattern pattern = Pattern.compile("^[\\d]*$");
private static final Logger LOG = LoggerFactory.getLogger(ReadClickhouseData.class);
public static BaseDocument getVertexFqdnDocument(ResultSet resultSet) throws SQLException {
String fqdnName = resultSet.getString("FQDN");
BaseDocument newDoc = null;
if (isDomain(fqdnName)) {
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
newDoc = new BaseDocument();
newDoc.setKey(fqdnName);
newDoc.addAttribute("FQDN_NAME", fqdnName);
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
}
return newDoc;
}
public static BaseDocument getVertexIpDocument(ResultSet resultSet) throws SQLException {
BaseDocument newDoc = new BaseDocument();
String ip = resultSet.getString("IP");
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
long sessionCount = resultSet.getLong("SESSION_COUNT");
long bytesSum = resultSet.getLong("BYTES_SUM");
String ipType = resultSet.getString("ip_type");
newDoc.setKey(ip);
newDoc.addAttribute("IP", ip);
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
switch (ipType) {
case "client":
newDoc.addAttribute("CLIENT_SESSION_COUNT", sessionCount);
newDoc.addAttribute("CLIENT_BYTES_SUM", bytesSum);
newDoc.addAttribute("SERVER_SESSION_COUNT", 0L);
newDoc.addAttribute("SERVER_BYTES_SUM", 0L);
break;
case "server":
newDoc.addAttribute("SERVER_SESSION_COUNT", sessionCount);
newDoc.addAttribute("SERVER_BYTES_SUM", bytesSum);
newDoc.addAttribute("CLIENT_SESSION_COUNT", 0L);
newDoc.addAttribute("CLIENT_BYTES_SUM", 0L);
break;
default:
}
newDoc.addAttribute("COMMON_LINK_INFO", "");
return newDoc;
}
public static BaseDocument getVertexSubscriberDocument(ResultSet resultSet) throws SQLException {
String subscriberId = resultSet.getString("common_subscriber_id");
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
BaseDocument newDoc = new BaseDocument();
newDoc.setKey(subscriberId);
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
return newDoc;
}
public static BaseEdgeDocument getRelationshipSubsciberLocateIpDocument(ResultSet resultSet) throws SQLException {
String subscriberId = resultSet.getString("common_subscriber_id");
String framedIp = resultSet.getString("radius_framed_ip");
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
long countTotal = resultSet.getLong("COUNT_TOTAL");
String key = subscriberId + "-" + framedIp;
BaseEdgeDocument newDoc = new BaseEdgeDocument();
newDoc.setKey(key);
newDoc.setFrom("SUBSCRIBER/" + subscriberId);
newDoc.setTo("IP/" + framedIp);
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
newDoc.addAttribute("COUNT_TOTAL", countTotal);
return newDoc;
}
public static BaseEdgeDocument getRelationFqdnAddressIpDocument(ResultSet resultSet) throws SQLException {
String vFqdn = resultSet.getString("FQDN");
BaseEdgeDocument newDoc = null;
if (isDomain(vFqdn)) {
String vIp = resultSet.getString("common_server_ip");
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
long countTotal = resultSet.getLong("COUNT_TOTAL");
String[] distCipRecents = (String[]) resultSet.getArray("DIST_CIP_RECENT").getArray();
long[] clientIpTs = new long[distCipRecents.length];
for (int i = 0; i < clientIpTs.length; i++) {
clientIpTs[i] = currentHour;
}
String key = vFqdn + "-" + vIp;
newDoc = new BaseEdgeDocument();
newDoc.setKey(key);
newDoc.setFrom("FQDN/" + vFqdn);
newDoc.setTo("IP/" + vIp);
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
newDoc.addAttribute("COUNT_TOTAL", countTotal);
newDoc.addAttribute("DIST_CIP", distCipRecents);
newDoc.addAttribute("DIST_CIP_TS", clientIpTs);
}
return newDoc;
}
public static BaseEdgeDocument getRelationIpVisitFqdnDocument(ResultSet resultSet) throws SQLException {
BaseEdgeDocument newDoc = null;
String vFqdn = resultSet.getString("FQDN");
if (isDomain(vFqdn)) {
String vIp = resultSet.getString("common_client_ip");
String key = vIp + "-" + vFqdn;
long firstFoundTime = resultSet.getLong("FIRST_FOUND_TIME");
long lastFoundTime = resultSet.getLong("LAST_FOUND_TIME");
long countTotal = resultSet.getLong("COUNT_TOTAL");
newDoc = new BaseEdgeDocument();
newDoc.setKey(key);
newDoc.setFrom("IP/" + vIp);
newDoc.setTo("FQDN/" + vFqdn);
newDoc.addAttribute("FIRST_FOUND_TIME", firstFoundTime);
newDoc.addAttribute("LAST_FOUND_TIME", lastFoundTime);
newDoc.addAttribute("COUNT_TOTAL", countTotal);
}
return newDoc;
}
public static <T extends BaseDocument> void putMapByHashcode(T newDoc, HashMap<Integer, HashMap<String, ArrayList<T>>> map){
if (newDoc != null) {
String key = newDoc.getKey();
int i = Math.abs(key.hashCode()) % ApplicationConfig.THREAD_POOL_NUMBER;
HashMap<String, ArrayList<T>> documentHashMap = map.getOrDefault(i, new HashMap<>());
ArrayList<T> documentArrayList = documentHashMap.getOrDefault(key, new ArrayList<>());
documentArrayList.add(newDoc);
documentHashMap.put(key,documentArrayList);
}
}
private static boolean isDomain(String fqdn) {
try {
String[] fqdnArr = fqdn.split("\\.");
if (fqdnArr.length < 4 || fqdnArr.length > 4) {
return true;
}
for (String f : fqdnArr) {
if (pattern.matcher(f).matches()) {
int i = Integer.parseInt(f);
if (i < 0 || i > 255) {
return true;
}
} else {
return true;
}
}
} catch (Exception e) {
LOG.error("解析域名 " + fqdn + " 失败:\n" + e.toString());
}
return false;
}
public static String getVertexFqdnSql() {
long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0];
long minTime = timeLimit[1];
String where = "common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String sslSql = "SELECT ssl_sni AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'SSL' GROUP BY ssl_sni";
String httpSql = "SELECT http_host AS FQDN,MAX( common_recv_time ) AS LAST_FOUND_TIME,MIN( common_recv_time ) AS FIRST_FOUND_TIME FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host";
return "SELECT FQDN,MAX( LAST_FOUND_TIME ) AS LAST_FOUND_TIME,MIN( FIRST_FOUND_TIME ) AS FIRST_FOUND_TIME FROM ((" + sslSql + ") UNION ALL (" + httpSql + ")) GROUP BY FQDN HAVING FQDN != ''";
}
public static String getVertexIpSql() {
long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0];
long minTime = timeLimit[1];
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String clientIpSql = "SELECT common_client_ip AS IP, MIN(common_recv_time) AS FIRST_FOUND_TIME,MAX(common_recv_time) AS LAST_FOUND_TIME,count(*) as SESSION_COUNT,sum(common_c2s_byte_num) as BYTES_SUM,'client' as ip_type FROM tsg_galaxy_v3.connection_record_log where " + where + " group by IP";
String serverIpSql = "SELECT common_server_ip AS IP, MIN(common_recv_time) AS FIRST_FOUND_TIME,MAX(common_recv_time) AS LAST_FOUND_TIME,count(*) as SESSION_COUNT,sum(common_s2c_byte_num) as BYTES_SUM,'server' as ip_type FROM tsg_galaxy_v3.connection_record_log where " + where + " group by IP";
String frameIpSql = "";
return "SELECT * FROM((" + clientIpSql + ") UNION ALL (" + serverIpSql + "))";
}
public static String getRelationshipFqdnAddressIpSql() {
long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0];
long minTime = timeLimit[1];
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String sslSql = "SELECT ssl_sni AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(100)(common_client_ip) AS DIST_CIP_RECENT,'SSL' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'SSL' GROUP BY ssl_sni,common_server_ip";
String httpSql = "SELECT http_host AS FQDN,common_server_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,groupUniqArray(100)(common_client_ip) AS DIST_CIP_RECENT,'HTTP' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_server_ip";
return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''";
}
public static String getRelationshipIpVisitFqdnSql() {
long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0];
long minTime = timeLimit[1];
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime;
String httpSql = "SELECT http_host AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'HTTP' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE " + where + " and common_schema_type = 'HTTP' GROUP BY http_host,common_client_ip";
String sslSql = "SELECT ssl_sni AS FQDN,common_client_ip,MAX(common_recv_time) AS LAST_FOUND_TIME,MIN(common_recv_time) AS FIRST_FOUND_TIME,COUNT(*) AS COUNT_TOTAL,'SSL' AS common_schema_type FROM tsg_galaxy_v3.connection_record_log WHERE common_schema_type = 'SSL' GROUP BY ssl_sni,common_client_ip";
return "SELECT * FROM ((" + sslSql + ") UNION ALL (" + httpSql + "))WHERE FQDN != ''";
}
public static String getVertexSubscriberSql() {
long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0];
long minTime = timeLimit[1];
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime + " AND common_subscriber_id != '' AND radius_packet_type = 4 AND radius_acct_status_type = 1";
return "SELECT common_subscriber_id,MAX(common_recv_time) as LAST_FOUND_TIME,MIN(common_recv_time) as FIRST_FOUND_TIME FROM radius_record_log WHERE" + where + " GROUP BY common_subscriber_id";
}
public static String getRelationshipSubsciberLocateIpSql() {
long[] timeLimit = getTimeLimit();
long maxTime = timeLimit[0];
long minTime = timeLimit[1];
String where = " common_recv_time >= " + minTime + " AND common_recv_time < " + maxTime + " AND common_subscriber_id != '' AND radius_framed_ip != '' AND radius_packet_type = 4 AND radius_acct_status_type = 1";
return "SELECT common_subscriber_id,radius_framed_ip,MAX(common_recv_time) as LAST_FOUND_TIME,MIN(common_recv_time) as FIRST_FOUND_TIME,COUNT(*) as COUNT_TOTAL FROM radius_record_log WHERE" + where + " GROUP BY common_subscriber_id,radius_framed_ip";
}
private static long[] getTimeLimit() {
// long maxTime = currentHour;
// long minTime = maxTime - 3600;
long maxTime = ApplicationConfig.READ_CLICKHOUSE_MAX_TIME;
long minTime = ApplicationConfig.READ_CLICKHOUSE_MIN_TIME;
return new long[]{maxTime, minTime};
}
}

View File

@@ -0,0 +1,50 @@
package cn.ac.iie.service.read;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.ArangoCursor;
import com.arangodb.entity.BaseDocument;
import com.arangodb.entity.BaseEdgeDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
/**
* @author wlh
* 多线程全量读取arangoDb历史数据封装到map
*/
public class ReadHistoryArangoData<T extends BaseDocument> extends Thread {
private static final Logger LOG = LoggerFactory.getLogger(ReadHistoryArangoData.class);
private ArangoDBConnect arangoDBConnect;
private String query;
private ConcurrentHashMap<String, T> map;
private Class<T> type;
public ReadHistoryArangoData(ArangoDBConnect arangoDBConnect, String query, ConcurrentHashMap<String, T> map,Class<T> type) {
this.arangoDBConnect = arangoDBConnect;
this.query = query;
this.map = map;
this.type = type;
}
@Override
public void run() {
long s = System.currentTimeMillis();
ArangoCursor<T> docs = arangoDBConnect.executorQuery(query, type);
if (docs != null){
List<T> baseDocuments = docs.asListRemaining();
int i = 0;
for (T doc : baseDocuments) {
String key = doc.getKey();
map.put(key, doc);
i++;
}
long l = System.currentTimeMillis();
LOG.info(query+ "\n处理数据" + i + "条,运行时间:" + (l - s));
}
}
}

View File

@@ -0,0 +1,76 @@
package cn.ac.iie.service.relationship;
import cn.ac.iie.service.read.ReadClickhouseData;
import cn.ac.iie.service.update.Relationship;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseEdgeDocument;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class LocateFqdn2Ip extends Relationship {
public LocateFqdn2Ip(HashMap<String, ArrayList<BaseEdgeDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
}
@Override
protected void mergeFunction(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc){
super.mergeFunction(properties,schemaEdgeDoc);
}
@Override
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument) {
super.updateFunction(newEdgeDocument, historyEdgeDocument);
updateDistinctClientIp(newEdgeDocument, historyEdgeDocument);
}
private void updateDistinctClientIp(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument edgeDocument){
ArrayList<String> distCip = (ArrayList<String>) edgeDocument.getAttribute("DIST_CIP");
ArrayList<Long> distCipTs = (ArrayList<Long>) edgeDocument.getAttribute("DIST_CIP_TS");
HashMap<String, Long> distCipToTs = new HashMap<>();
if (distCip.size() == distCipTs.size()){
for (int i = 0;i < distCip.size();i++){
distCipToTs.put(distCip.get(i),distCipTs.get(i));
}
}
Object[] distCipRecent = (Object[])newEdgeDocument.getAttribute("DIST_CIP");
for (Object cip:distCipRecent){
distCipToTs.put(cip.toString(), ReadClickhouseData.currentHour);
}
Map<String, Long> sortDistCip = sortMapByValue(distCipToTs);
edgeDocument.addAttribute("DIST_CIP",sortDistCip.keySet().toArray());
edgeDocument.addAttribute("DIST_CIP_TS",sortDistCip.values().toArray());
}
/**
* 使用 Map按value进行排序
*/
private Map<String, Long> sortMapByValue(Map<String, Long> oriMap) {
if (oriMap == null || oriMap.isEmpty()) {
return null;
}
Map<String, Long> sortedMap = new LinkedHashMap<>();
List<Map.Entry<String, Long>> entryList = new ArrayList<>(oriMap.entrySet());
entryList.sort((o1, o2) -> o2.getValue().compareTo(o1.getValue()));
if(entryList.size() > 100){
for(Map.Entry<String, Long> set:entryList.subList(0, 100)){
sortedMap.put(set.getKey(), set.getValue());
}
}else {
for(Map.Entry<String, Long> set:entryList){
sortedMap.put(set.getKey(), set.getValue());
}
}
return sortedMap;
}
}

View File

@@ -0,0 +1,32 @@
package cn.ac.iie.service.relationship;
import cn.ac.iie.service.update.Relationship;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseEdgeDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class LocateSubscriber2Ip extends Relationship {
public LocateSubscriber2Ip(HashMap<String, ArrayList<BaseEdgeDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch);
}
@Override
protected BaseEdgeDocument mergeRelationship(ArrayList<BaseEdgeDocument> newEdgeDocumentSchemaMap) {
return super.mergeRelationship(newEdgeDocumentSchemaMap);
}
@Override
protected void updateFunction(BaseEdgeDocument newEdgeDocument, BaseEdgeDocument historyEdgeDocument) {
super.updateFoundTime(newEdgeDocument,historyEdgeDocument);
}
}

View File

@@ -0,0 +1,20 @@
package cn.ac.iie.service.relationship;
import cn.ac.iie.service.update.Relationship;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseEdgeDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class VisitIp2Fqdn extends Relationship {
public VisitIp2Fqdn(HashMap<String, ArrayList<BaseEdgeDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
}
}

View File

@@ -0,0 +1,118 @@
package cn.ac.iie.service.update;
import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class Document<T extends BaseDocument> extends Thread{
private static final Logger LOG = LoggerFactory.getLogger(Document.class);
private HashMap<String, ArrayList<T>> newDocumentMap;
private ArangoDBConnect arangoManger;
private String collectionName;
private ConcurrentHashMap<String, T> historyDocumentMap;
private CountDownLatch countDownLatch;
private Class<T> type;
Document(HashMap<String, ArrayList<T>> newDocumentMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, T> historyDocumentMap,
CountDownLatch countDownLatch,
Class<T> type) {
this.newDocumentMap = newDocumentMap;
this.arangoManger = arangoManger;
this.collectionName = collectionName;
this.historyDocumentMap = historyDocumentMap;
this.countDownLatch = countDownLatch;
this.type = type;
}
@Override
public void run() {
Set<String> keySet = newDocumentMap.keySet();
ArrayList<T> resultDocumentList = new ArrayList<>();
int i = 0;
try {
for (String key : keySet) {
ArrayList<T> newDocumentSchemaList = newDocumentMap.getOrDefault(key, null);
if (newDocumentSchemaList != null) {
T newDocument = mergeDocument(newDocumentSchemaList);
i += 1;
T historyDocument = historyDocumentMap.getOrDefault(key, null);
updateDocument(newDocument,historyDocument,resultDocumentList);
if (i >= ApplicationConfig.UPDATE_ARANGO_BATCH) {
arangoManger.overwrite(resultDocumentList, collectionName);
LOG.info("更新"+collectionName+":" + i);
i = 0;
}
}
}
if (i != 0) {
arangoManger.overwrite(resultDocumentList, collectionName);
LOG.info("更新"+collectionName+":" + i);
}
} catch (Exception e) {
e.printStackTrace();
LOG.error(e.toString());
}finally {
countDownLatch.countDown();
}
}
private void updateDocument(T newDocument, T historyDocument, ArrayList<T> resultDocumentList) {
if (historyDocument != null){
updateFunction(newDocument,historyDocument);
resultDocumentList.add(historyDocument);
}else {
resultDocumentList.add(newDocument);
}
}
protected void updateFunction(T newDocument, T historyDocument) {
Object lastFoundTime = newDocument.getAttribute("LAST_FOUND_TIME");
historyDocument.addAttribute("LAST_FOUND_TIME",lastFoundTime);
}
private T mergeDocument(ArrayList<T> newDocumentSchemaList) throws IllegalAccessException, InstantiationException {
if (newDocumentSchemaList == null || newDocumentSchemaList.isEmpty()){
return null;
}else if (newDocumentSchemaList.size() == 1){
return newDocumentSchemaList.get(0);
}else {
T document = type.newInstance();
Map<String, Object> properties = document.getProperties();
for (T doc:newDocumentSchemaList){
if (properties.isEmpty()){
document = doc;
properties = doc.getProperties();
}else {
mergeFunction(properties,doc);
}
}
document.setProperties(properties);
return document;
}
}
protected void mergeFunction(Map<String, Object> properties, T doc) {
long firstFoundTime = Long.parseLong(properties.getOrDefault("FIRST_FOUND_TIME", 0L).toString());
long docFirstFoundTime = Long.parseLong(doc.getAttribute("FIRST_FOUND_TIME").toString());
properties.put("FIRST_FOUND_TIME",firstFoundTime<docFirstFoundTime? firstFoundTime:docFirstFoundTime);
long lastFoundTime = Long.parseLong(properties.getOrDefault("LAST_FOUND_TIME", 0L).toString());
long docLastFoundTime = Long.parseLong(doc.getAttribute("LAST_FOUND_TIME").toString());
properties.put("LAST_FOUND_TIME",lastFoundTime>docLastFoundTime? lastFoundTime:docLastFoundTime);
}
}

View File

@@ -0,0 +1,158 @@
package cn.ac.iie.service.update;
import cn.ac.iie.config.ApplicationConfig;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseEdgeDocument;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class Relationship extends Thread {
private static final Logger LOG = LoggerFactory.getLogger(Relationship.class);
private HashMap<String, ArrayList<BaseEdgeDocument>> newDocumentHashMap;
private ArangoDBConnect arangoManger;
private String collectionName;
private ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap;
private CountDownLatch countDownLatch;
public Relationship(HashMap<String, ArrayList<BaseEdgeDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseEdgeDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
this.newDocumentHashMap = newDocumentHashMap;
this.arangoManger = arangoManger;
this.collectionName = collectionName;
this.historyDocumentMap = historyDocumentMap;
this.countDownLatch = countDownLatch;
}
@Override
public void run() {
Set<String> keySet = newDocumentHashMap.keySet();
ArrayList<BaseEdgeDocument> docInsert = new ArrayList<>();
int i = 0;
try {
for (String key : keySet) {
ArrayList<BaseEdgeDocument> newEdgeDocumentSchemaList = newDocumentHashMap.getOrDefault(key, null);
if (newEdgeDocumentSchemaList != null) {
BaseEdgeDocument newEdgeDocument = mergeRelationship(newEdgeDocumentSchemaList);
i += 1;
BaseEdgeDocument historyEdgeDocument = historyDocumentMap.getOrDefault(key, null);
updateRelationship(newEdgeDocument,historyEdgeDocument,docInsert);
if (i >= ApplicationConfig.UPDATE_ARANGO_BATCH) {
arangoManger.overwrite(docInsert, collectionName);
LOG.info("更新"+collectionName+":" + i);
i = 0;
}
}
}
if (i != 0) {
arangoManger.overwrite(docInsert, collectionName);
LOG.info("更新"+collectionName+":" + i);
}
} catch (Exception e) {
e.printStackTrace();
LOG.error(e.toString());
}finally {
countDownLatch.countDown();
}
}
protected BaseEdgeDocument mergeRelationship(ArrayList<BaseEdgeDocument> newEdgeDocumentSchemaMap) {
return new BaseEdgeDocument();
}
private void updateRelationship(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument historyEdgeDocument,ArrayList<BaseEdgeDocument> docInsert){
if (historyEdgeDocument != null && newEdgeDocument != null) {
updateFunction(newEdgeDocument, historyEdgeDocument);
docInsert.add(historyEdgeDocument);
} else {
docInsert.add(newEdgeDocument);
}
}
protected void updateFunction(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument historyEdgeDocument){
updateFoundTime(newEdgeDocument,historyEdgeDocument);
setSchemaCntByHistory(historyEdgeDocument,"TLS_CNT_RECENT","TLS_CNT_TOTAL",newEdgeDocument);
setSchemaCntByHistory(historyEdgeDocument,"HTTP_CNT_RECENT","HTTP_CNT_TOTAL",newEdgeDocument);
setSchemaCntByHistory(historyEdgeDocument,"DNS_CNT_RECENT","DNS_CNT_TOTAL",newEdgeDocument);
}
protected void updateFoundTime(BaseEdgeDocument newEdgeDocument,BaseEdgeDocument historyEdgeDocument){
Object lastFoundTime = newEdgeDocument.getAttribute("LAST_FOUND_TIME");
historyEdgeDocument.addAttribute("LAST_FOUND_TIME", lastFoundTime);
}
private void setSchemaCntByHistory(BaseEdgeDocument historyEdgeDocument,String schema,String totalSchema,BaseEdgeDocument newEdgeDocument){
long countTotal = Long.parseLong(newEdgeDocument.getAttribute(totalSchema).toString());
long updateCountTotal = Long.parseLong(historyEdgeDocument.getAttribute(totalSchema).toString());
ArrayList<Long> cntRecent = (ArrayList<Long>) historyEdgeDocument.getAttribute(schema);
Long[] cntRecentsSrc = cntRecent.toArray(new Long[cntRecent.size()]);
Long[] cntRecentsDst = new Long[24];
System.arraycopy(cntRecentsSrc, 0, cntRecentsDst, 1, cntRecentsSrc.length - 1);
cntRecentsDst[0] = countTotal;
historyEdgeDocument.addAttribute(schema, cntRecentsDst);
historyEdgeDocument.addAttribute(totalSchema, countTotal + updateCountTotal);
}
protected void mergeFunction(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc) {
mergeFoundTime(properties, schemaEdgeDoc);
}
private void mergeFoundTime(Map<String, Object> properties, BaseEdgeDocument schemaEdgeDoc) {
long schemaFirstFoundTime = Long.parseLong(schemaEdgeDoc.getAttribute("FIRST_FOUND_TIME").toString());
long firstFoundTime = Long.parseLong(properties.get("FIRST_FOUND_TIME").toString());
properties.put("FIRST_FOUND_TIME", schemaFirstFoundTime < firstFoundTime ? schemaFirstFoundTime : firstFoundTime);
long schemaLastFoundTime = Long.parseLong(schemaEdgeDoc.getAttribute("LAST_FOUND_TIME").toString());
long lastFoundTime = Long.parseLong(properties.get("LAST_FOUND_TIME").toString());
properties.put("LAST_FOUND_TIME", schemaLastFoundTime > lastFoundTime ? schemaLastFoundTime : lastFoundTime);
}
private void setSchemaCount(String schema, BaseEdgeDocument schemaEdgeDoc, Map<String, Object> properties) {
switch (schema) {
case "HTTP":
long httpCntTotal = Long.parseLong(schemaEdgeDoc.getAttribute("COUNT_TOTAL").toString());
properties.put("HTTP_CNT_TOTAL", httpCntTotal);
long[] httpCntRecentsDst = new long[24];
httpCntRecentsDst[0] = httpCntTotal;
properties.put("HTTP_CNT_RECENT", httpCntRecentsDst);
break;
case "SSL":
long tlsCntTotal = Long.parseLong(schemaEdgeDoc.getAttribute("COUNT_TOTAL").toString());
properties.put("TLS_CNT_TOTAL", tlsCntTotal);
long[] tlsCntRecentsDst = new long[24];
tlsCntRecentsDst[0] = tlsCntTotal;
properties.put("TLS_CNT_RECENT", tlsCntRecentsDst);
break;
default:
break;
}
}
private void checkSchemaProperty(Map<String, Object> properties){
if (!properties.containsKey("TLS_CNT_TOTAL")){
properties.put("TLS_CNT_TOTAL",0L);
properties.put("TLS_CNT_RECENT",new long[24]);
}
if (!properties.containsKey("HTTP_CNT_TOTAL")){
properties.put("HTTP_CNT_TOTAL",0L);
properties.put("HTTP_CNT_RECENT",new long[24]);
}
if (!properties.containsKey("DNS_CNT_TOTAL")){
properties.put("DNS_CNT_TOTAL",0L);
properties.put("DNS_CNT_RECENT",new long[24]);
}
}
}

View File

@@ -0,0 +1,40 @@
package cn.ac.iie.service.update;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
/**
* @author wlh
* 多线程更新vertex数据
*/
public class Vertex extends Document<BaseDocument> {
public Vertex(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch,BaseDocument.class);
}
@Override
protected void updateFunction(BaseDocument newDocument, BaseDocument historyDocument) {
super.updateFunction(newDocument, historyDocument);
}
@Override
protected void mergeFunction(Map<String, Object> properties, BaseDocument doc) {
super.mergeFunction(properties, doc);
}
@Override
public void run() {
super.run();
}
}

View File

@@ -0,0 +1,21 @@
package cn.ac.iie.service.vertex;
import cn.ac.iie.service.update.Vertex;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class Fqdn extends Vertex {
public Fqdn(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
}
}

View File

@@ -0,0 +1,79 @@
package cn.ac.iie.service.vertex;
import cn.ac.iie.service.update.Vertex;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class Ip extends Vertex {
public Ip(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap,countDownLatch);
}
@Override
protected void updateFunction(BaseDocument newDocument, BaseDocument historyDocument) {
super.updateFunction(newDocument, historyDocument);
updateIpByType(newDocument, historyDocument);
}
@Override
protected void mergeFunction(Map<String, Object> properties, BaseDocument doc) {
super.mergeFunction(properties, doc);
mergeIpByType(properties,doc);
}
private void mergeIpByType(Map<String, Object> properties, BaseDocument doc){
Map<String, Object> mergeProperties = doc.getProperties();
checkIpTypeProperty(properties,mergeProperties,"CLIENT_SESSION_COUNT");
checkIpTypeProperty(properties,mergeProperties,"CLIENT_BYTES_SUM");
checkIpTypeProperty(properties,mergeProperties,"SERVER_SESSION_COUNT");
checkIpTypeProperty(properties,mergeProperties,"SERVER_BYTES_SUM");
}
private void checkIpTypeProperty(Map<String, Object> properties,Map<String, Object> mergeProperties,String property){
try {
if (!properties.containsKey(property)){
properties.put(property,0L);
checkIpTypeProperty(properties,mergeProperties,property);
}else if ("0".equals(properties.get(property).toString()) && mergeProperties.containsKey(property)){
if (!"0".equals(mergeProperties.get(property).toString())){
properties.put(property,Long.parseLong(mergeProperties.get(property).toString()));
}
}
}catch (Exception e){
e.printStackTrace();
}
}
private void updateIpByType(BaseDocument newDocument, BaseDocument historyDocument){
addProperty(newDocument,historyDocument,"CLIENT_SESSION_COUNT");
addProperty(newDocument,historyDocument,"CLIENT_BYTES_SUM");
addProperty(newDocument,historyDocument,"SERVER_SESSION_COUNT");
addProperty(newDocument,historyDocument,"SERVER_BYTES_SUM");
}
private void addProperty(BaseDocument newDocument, BaseDocument historyDocument,String property){
try {
if (historyDocument.getProperties().containsKey(property)){
long newProperty = Long.parseLong(newDocument.getAttribute(property).toString());
long hisProperty = Long.parseLong(historyDocument.getAttribute(property).toString());
historyDocument.updateAttribute(property,newProperty+hisProperty);
}else {
historyDocument.addAttribute(property,0L);
}
}catch (Exception e){
e.printStackTrace();
}
}
}

View File

@@ -0,0 +1,21 @@
package cn.ac.iie.service.vertex;
import cn.ac.iie.service.update.Vertex;
import cn.ac.iie.utils.ArangoDBConnect;
import com.arangodb.entity.BaseDocument;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
public class Subscriber extends Vertex {
public Subscriber(HashMap<String, ArrayList<BaseDocument>> newDocumentHashMap,
ArangoDBConnect arangoManger,
String collectionName,
ConcurrentHashMap<String, BaseDocument> historyDocumentMap,
CountDownLatch countDownLatch) {
super(newDocumentHashMap, arangoManger, collectionName, historyDocumentMap, countDownLatch);
}
}