diff --git a/pom.xml b/pom.xml
index 2e03234..6a42ada 100644
--- a/pom.xml
+++ b/pom.xml
@@ -3,19 +3,22 @@
4.0.0
cn.ac.iie
- log-stream-completion
- 0.0.1-SNAPSHOT
+ log-stream-completion-schema
+ v3.20.12.12-ack-ratelimit-con
jar
- log-stream-completion
+ log-stream-completion-schema
http://maven.apache.org
+
nexus
Team Nexus Repository
- http://192.168.10.125:8099/content/groups/public
+ http://192.168.40.125:8099/content/groups/public
+
+
@@ -71,7 +74,7 @@
false
- src/main/java
+ properties
log4j.properties
@@ -84,6 +87,8 @@
UTF-8
1.0.0
1.0.2
+ 2.2.3
+ 2.7.1
@@ -129,12 +134,6 @@
${storm.version}
-
- redis.clients
- jedis
- 2.8.1
-
-
junit
junit
@@ -145,13 +144,19 @@
com.alibaba
fastjson
- 1.2.47
+ 1.2.70
+
+
+
+ cglib
+ cglib-nodep
+ 3.2.4
com.zdjizhi
galaxy
- 1.0.1
+ 1.0.3
slf4j-log4j12
@@ -181,6 +186,122 @@
+
+
+ org.apache.hbase
+ hbase-client
+ ${hbase.version}
+
+
+ slf4j-log4j12
+ org.slf4j
+
+
+ log4j-over-slf4j
+ org.slf4j
+
+
+
+
+
+
+ org.apache.hbase
+ hbase-server
+ ${hbase.version}
+
+
+ slf4j-log4j12
+ org.slf4j
+
+
+ log4j-over-slf4j
+ org.slf4j
+
+
+
+
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+
+
+ slf4j-log4j12
+ org.slf4j
+
+
+ log4j-over-slf4j
+ org.slf4j
+
+
+
+
+
+
+ org.apache.hadoop
+ hadoop-client
+ ${hadoop.version}
+
+
+ slf4j-log4j12
+ org.slf4j
+
+
+ log4j-over-slf4j
+ org.slf4j
+
+
+
+
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+
+
+ slf4j-log4j12
+ org.slf4j
+
+
+ log4j-over-slf4j
+ org.slf4j
+
+
+
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ 5.3.2
+ compile
+
+
+
+
+ org.apache.httpcomponents
+ httpclient
+ 4.5.2
+
+
+
+ org.apache.httpcomponents
+ httpcore
+ 4.4.1
+
+
+
+ com.jayway.jsonpath
+ json-path
+ 2.4.0
+
+
+
+ io.prometheus
+ simpleclient_pushgateway
+ 0.9.0
+
diff --git a/properties/hbase-site.xml b/properties/hbase-site.xml
new file mode 100644
index 0000000..c12866e
--- /dev/null
+++ b/properties/hbase-site.xml
@@ -0,0 +1,48 @@
+
+
+
+
+
+ hbase.rootdir
+ /opt/hbase-2.2.1/data
+
+
+ hbase.zookeeper.property.clientPort
+ 2181
+
+
+ hbase.zookeeper.quorum
+ 192.168.40.203
+
+
+hbase.zookeeper.property.dataDir
+/opt/hbase-2.2.1/zk
+
+
+ zookeeper.znode.parent
+ /hbase
+
+
+ hbase.cluster.distributed
+ true
+
+
diff --git a/properties/kafka_config.properties b/properties/kafka_config.properties
new file mode 100644
index 0000000..10ddd4f
--- /dev/null
+++ b/properties/kafka_config.properties
@@ -0,0 +1,17 @@
+#producer重试的次数设置
+retries=0
+
+#他的含义就是说一个Batch被创建之后,最多过多久,不管这个Batch有没有写满,都必须发送出去了
+linger.ms=5
+
+#如果在超时之前未收到响应,客户端将在必要时重新发送请求
+request.timeout.ms=30000
+
+#producer都是按照batch进行发送的,批次大小,默认:16384
+batch.size=262144
+
+#Producer端用于缓存消息的缓冲区大小
+buffer.memory=67108864
+
+#这个参数决定了每次发送给Kafka服务器请求的最大大小,默认1048576
+max.request.size=5242880
diff --git a/properties/redis_config.properties b/properties/redis_config.properties
deleted file mode 100644
index f99d396..0000000
--- a/properties/redis_config.properties
+++ /dev/null
@@ -1,19 +0,0 @@
-#*****************jedis杩炴帴鍙傛暟璁剧疆*********************
-#redis鏈嶅姟鍣╥p
-redis.ip=192.168.40.123
-#redis鏈嶅姟鍣ㄧ鍙e彿
-redis.port=6379
-#涓庢湇鍔″櫒寤虹珛杩炴帴鐨勮秴鏃舵椂闂
-redis.timeout=3000
-#************************jedis姹犲弬鏁拌缃*******************
-#jedis鐨勬渶澶ф椿璺冭繛鎺ユ暟
-redis.pool.maxActive=200
-#jedis鏈澶х┖闂茶繛鎺ユ暟
-redis.pool.maxIdle=5
-#jedis姹犳病鏈夎繛鎺ュ璞¤繑鍥炴椂锛岀瓑寰呭彲鐢ㄨ繛鎺ョ殑鏈澶ф椂闂达紝鍗曚綅姣锛岄粯璁ゅ间负-1锛岃〃绀烘案涓嶈秴鏃躲
-#濡傛灉瓒呰繃绛夊緟鏃堕棿锛屽垯鐩存帴鎶涘嚭JedisConnectionException
-redis.pool.maxWait=-1
-#浠庢睜涓幏鍙栬繛鎺ョ殑鏃跺欙紝鏄惁杩涜鏈夋晥妫鏌
-redis.pool.testOnBorrow=true
-#褰掕繕杩炴帴鐨勬椂鍊欙紝鏄惁杩涜鏈夋晥妫鏌
-redis.pool.testOnReturn=true
diff --git a/properties/service_flow_config.properties b/properties/service_flow_config.properties
index 3077812..e76a47a 100644
--- a/properties/service_flow_config.properties
+++ b/properties/service_flow_config.properties
@@ -1,42 +1,67 @@
+#--------------------------------鍦板潃閰嶇疆------------------------------#
+
#绠$悊kafka鍦板潃
-#bootstrap.servers=10.4.35.7:9092,10.4.35.8:9092,10.4.35.9:9092
-bootstrap.servers=192.168.6.200:9093,192.168.6.200:9094,192.168.6.200:9095
+input.kafka.servers=192.168.44.12:9092
-#zookeeper 鍦板潃
-zookeeper.servers=192.168.6.200:2181
-#zookeeper.servers=192.168.40.207:2181
+#绠$悊杈撳嚭kafka鍦板潃
+output.kafka.servers=192.168.44.12:9092
-#latest/earliest
-auto.offset.reset=latest
+#zookeeper 鍦板潃 鐢ㄤ簬閰嶇疆log_id
+zookeeper.servers=192.168.40.4412:2181
+
+#hbase zookeeper鍦板潃 鐢ㄤ簬杩炴帴HBase
+hbase.zookeeper.servers=192.168.44.12:2181
+
+#瀹氫綅搴撳湴鍧
+ip.library=/home/bigdata/topology/dat/
+
+#缃戝叧鐨剆chema浣嶇疆
+schema.http=http://192.168.44.67:9999/metadata/schema/v1/fields/connection_record_log
#kafka broker涓嬬殑topic鍚嶇О
-kafka.topic=SESSION-RECORD-LOG
-#kafka.topic=Snowflake-test
+#kafka.topic=CONNECTION-RECORD-LOG
+kafka.topic=CONNECTION-RECORD-LOG
#璇诲彇topic,瀛樺偍璇pout id鐨勬秷璐筼ffset淇℃伅锛屽彲閫氳繃璇ユ嫇鎵戝懡鍚;鍏蜂綋瀛樺偍offset鐨勪綅缃紝纭畾涓嬫璇诲彇涓嶉噸澶嶇殑鏁版嵁锛
-group.id=session-record-log-z
+group.id=connection-record-log-20200818-1-test
+
+#鎺ユ敹鑷猭afka鐨勬秷璐硅 client-id
+consumer.client.id=consumer-connection-record
+#鍥炲啓缁檏afka鐨勭敓浜ц client-id
+producer.client.id=producer-connection-record
+
+#鐢熶骇鑰呭帇缂╂ā寮 none or snappy
+producer.kafka.compression.type=snappy
+
+#鐢熶骇鑰卆ck
+producer.ack=1
+
+#latest/earliest 浠庡綋鍓嶆秷 or 浠庡ご娑堣垂
+auto.offset.reset=latest
#杈撳嚭topic
-#results.output.topic=SESSION-TEST-COMPLETED-LOG
-results.output.topic=SESSION-RECORD-COMPLETED-LOG
+#results.output.topic=CONNECTION-RECORD-COMPLETED-LOG
+results.output.topic=CONNECTION-RECORD-COMPLETED-LOG
+
+#--------------------------------topology閰嶇疆------------------------------#
#storm topology workers
-topology.workers=1
+topology.workers=2
#spout骞惰搴 寤鸿涓巏afka鍒嗗尯鏁扮浉鍚
spout.parallelism=3
#澶勭悊琛ュ叏鎿嶄綔鐨刡olt骞惰搴-worker鐨勫嶆暟
-datacenter.bolt.parallelism=3
+completion.bolt.parallelism=6
-#鍐欏叆kafkad鐨勫苟琛屽害
-kafka.bolt.parallelism=3
+#鍐欏叆kafka鐨勫苟琛屽害10
+kafka.bolt.parallelism=6
-#瀹氫綅搴撳湴鍧
-ip.library=/dat/
+#ack璁剧疆 1鍚姩ack 0涓嶅惎鍔╝ck
+topology.num.acks=0
#kafka鎵归噺鏉℃暟
-batch.insert.num=5000
+batch.insert.num=2000
#鏁版嵁涓績锛圲ID锛
data.center.id.num=15
@@ -44,21 +69,38 @@ data.center.id.num=15
#tick鏃堕挓棰戠巼
topology.tick.tuple.freq.secs=5
+#hbase 鏇存柊鏃堕棿
+hbase.tick.tuple.freq.secs=60
+
+
+#--------------------------------榛樿鍊奸厤缃------------------------------#
+
#褰揵olt鎬ц兘鍙楅檺鏃讹紝闄愬埗spout鎺ユ敹閫熷害锛岀悊璁虹湅ack寮鍚墠鏈夋晥
topology.config.max.spout.pending=150000
-#ack璁剧疆 1鍚姩ack 0涓嶅惎鍔╝ck
-topology.num.acks=0
+#hbase table name
+hbase.table.name=subscriber_info
#spout鎺ユ敹鐫$湢鏃堕棿
topology.spout.sleep.time=1
-#鐢ㄤ簬杩囨护瀵瑰噯鐢ㄦ埛鍚
-check.ip.scope=10,100,192
-
#鍏佽鍙戦乲afka鏈澶уけ璐ユ暟
max.failure.num=20
+#閭欢榛樿缂栫爜
+mail.default.charset=UTF-8
+
+#鍘嬬缉妯″紡 none or snappy
+kafka.compression.type=none
+
+#闇涓嶉渶瑕佽ˉsubscriber_id,闇瑕佸垯涓簓es锛屼笉闇瑕佷负no
+need.complete.subid=yes
+
+#闇涓嶈琛ュ叏锛屼笉闇瑕佸垯鍘熸牱鏃ュ織杈撳嚭
+log.need.complete=yes
+
+
+#--------------------------------influx------------------------------#
#influx鍦板潃
influx.ip=http://192.168.40.151:8086
@@ -66,4 +108,4 @@ influx.ip=http://192.168.40.151:8086
influx.username=admin
#influx瀵嗙爜
-influx.password=admin
\ No newline at end of file
+influx.password=admin
diff --git a/src/main/java/cn/ac/iie/bean/SessionRecordLog.java b/src/main/java/cn/ac/iie/bean/SessionRecordLog.java
deleted file mode 100644
index baa3f81..0000000
--- a/src/main/java/cn/ac/iie/bean/SessionRecordLog.java
+++ /dev/null
@@ -1,672 +0,0 @@
-package cn.ac.iie.bean;
-
-import com.alibaba.fastjson.annotation.JSONField;
-import com.alibaba.fastjson.support.spring.annotation.FastJsonFilter;
-
-/**
- * @author qidaijie
- */
-public class SessionRecordLog {
- private long uid;
- private int policy_id;
- private long action;
- private int start_time;
- private int end_time;
- private long recv_time;
- private String trans_proto;
- private String app_proto;
- private int addr_type;
- private String server_ip;
- private String client_ip;
- private int server_port;
- private int client_port;
- private int service;
- private int entrance_id;
- private int device_id;
- private int Link_id;
- private String isp;
- private int encap_type;
- private int direction;
- private int stream_dir;
- private String cap_ip;
- private String addr_list;
- private String server_location;
- private String client_location;
- private String client_asn;
- private String server_asn;
- private String subscribe_id;
- private long con_duration_ms;
- private String url;
- private String host;
- private String domain;
- private String category;
- private String req_line;
- private String res_line;
- private String cookie;
- private String referer;
- private String user_agent;
- private String content_len;
- private String content_type;
- private String set_cookie;
- private String req_header;
- private String resp_header;
- private String req_body_key;
- private String req_body;
- private String res_body_key;
- private String resp_body;
- private String version;
- private String sni;
- private String san;
- private String cn;
- private int app_id;
- private int protocol_id;
- private long con_latency_ms;
- private int pinningst;
- private int intercept_state;
- private long ssl_server_side_latency;
- private long ssl_client_side_latency;
- private String ssl_server_side_version;
- private String ssl_client_side_version;
- private int ssl_cert_verify;
- private String stream_trace_id;
- private String ssl_error;
- private long c2s_pkt_num;
- private long S2c_pkt_num;
- private long c2s_byte_num;
- private long s2c_byte_num;
- private String nas_ip;
- private String framed_ip;
- private String account;
- private int packet_type;
- private int has_dup_traffic;
- private String stream_error;
-
-
- public SessionRecordLog() {
- }
-
- public long getUid() {
- return uid;
- }
-
- public void setUid(long uid) {
- this.uid = uid;
- }
-
- public int getPolicy_id() {
- return policy_id;
- }
-
- public void setPolicy_id(int policy_id) {
- this.policy_id = policy_id;
- }
-
- public long getAction() {
- return action;
- }
-
- public void setAction(long action) {
- this.action = action;
- }
-
- public int getStart_time() {
- return start_time;
- }
-
- public void setStart_time(int start_time) {
- this.start_time = start_time;
- }
-
- public int getEnd_time() {
- return end_time;
- }
-
- public void setEnd_time(int end_time) {
- this.end_time = end_time;
- }
-
- public String getSsl_error() {
- return ssl_error;
- }
-
- public void setSsl_error(String ssl_error) {
- this.ssl_error = ssl_error;
- }
-
- public String getApp_proto() {
- return app_proto;
- }
-
- public void setApp_proto(String app_proto) {
- this.app_proto = app_proto;
- }
-
- public long getRecv_time() {
- return recv_time;
- }
-
- public void setRecv_time(long recv_time) {
- this.recv_time = recv_time;
- }
-
- public String getTrans_proto() {
- return trans_proto;
- }
-
- public void setTrans_proto(String trans_proto) {
- this.trans_proto = trans_proto;
- }
-
- public int getAddr_type() {
- return addr_type;
- }
-
- public void setAddr_type(int addr_type) {
- this.addr_type = addr_type;
- }
-
- public String getServer_ip() {
- return server_ip;
- }
-
- public void setServer_ip(String server_ip) {
- this.server_ip = server_ip;
- }
-
- public String getClient_ip() {
- return client_ip;
- }
-
- public void setClient_ip(String client_ip) {
- this.client_ip = client_ip;
- }
-
- public int getServer_port() {
- return server_port;
- }
-
- public void setServer_port(int server_port) {
- this.server_port = server_port;
- }
-
- public int getClient_port() {
- return client_port;
- }
-
- public void setClient_port(int client_port) {
- this.client_port = client_port;
- }
-
- public int getService() {
- return service;
- }
-
- public void setService(int service) {
- this.service = service;
- }
-
- public int getEntrance_id() {
- return entrance_id;
- }
-
- public void setEntrance_id(int entrance_id) {
- this.entrance_id = entrance_id;
- }
-
- public int getDevice_id() {
- return device_id;
- }
-
- public void setDevice_id(int device_id) {
- this.device_id = device_id;
- }
-
- public int getLink_id() {
- return Link_id;
- }
-
- public void setLink_id(int link_id) {
- Link_id = link_id;
- }
-
- public String getIsp() {
- return isp;
- }
-
- public void setIsp(String isp) {
- this.isp = isp;
- }
-
- public int getEncap_type() {
- return encap_type;
- }
-
- public void setEncap_type(int encap_type) {
- this.encap_type = encap_type;
- }
-
- public int getDirection() {
- return direction;
- }
-
- public void setDirection(int direction) {
- this.direction = direction;
- }
-
- public int getStream_dir() {
- return stream_dir;
- }
-
- public void setStream_dir(int stream_dir) {
- this.stream_dir = stream_dir;
- }
-
- public String getCap_ip() {
- return cap_ip;
- }
-
- public void setCap_ip(String cap_ip) {
- this.cap_ip = cap_ip;
- }
-
- public String getAddr_list() {
- return addr_list;
- }
-
- public void setAddr_list(String addr_list) {
- this.addr_list = addr_list;
- }
-
- public String getServer_location() {
- return server_location;
- }
-
- public void setServer_location(String server_location) {
- this.server_location = server_location;
- }
-
- public String getClient_location() {
- return client_location;
- }
-
- public void setClient_location(String client_location) {
- this.client_location = client_location;
- }
-
- public String getClient_asn() {
- return client_asn;
- }
-
- public void setClient_asn(String client_asn) {
- this.client_asn = client_asn;
- }
-
- public String getServer_asn() {
- return server_asn;
- }
-
- public void setServer_asn(String server_asn) {
- this.server_asn = server_asn;
- }
-
- public String getSubscribe_id() {
- return subscribe_id;
- }
-
- public void setSubscribe_id(String subscribe_id) {
- this.subscribe_id = subscribe_id;
- }
-
- public long getCon_duration_ms() {
- return con_duration_ms;
- }
-
- public void setCon_duration_ms(long con_duration_ms) {
- this.con_duration_ms = con_duration_ms;
- }
-
- public String getUrl() {
- return url;
- }
-
- public void setUrl(String url) {
- this.url = url;
- }
-
- public String getHost() {
- return host;
- }
-
- public void setHost(String host) {
- this.host = host;
- }
-
- public String getDomain() {
- return domain;
- }
-
- public void setDomain(String domain) {
- this.domain = domain;
- }
-
- public String getCategory() {
- return category;
- }
-
- public void setCategory(String category) {
- this.category = category;
- }
-
- public String getReq_line() {
- return req_line;
- }
-
- public void setReq_line(String req_line) {
- this.req_line = req_line;
- }
-
- public String getRes_line() {
- return res_line;
- }
-
- public void setRes_line(String res_line) {
- this.res_line = res_line;
- }
-
- public String getCookie() {
- return cookie;
- }
-
- public void setCookie(String cookie) {
- this.cookie = cookie;
- }
-
- public String getReferer() {
- return referer;
- }
-
- public void setReferer(String referer) {
- this.referer = referer;
- }
-
- public String getUser_agent() {
- return user_agent;
- }
-
- public void setUser_agent(String user_agent) {
- this.user_agent = user_agent;
- }
-
- public String getContent_len() {
- return content_len;
- }
-
- public void setContent_len(String content_len) {
- this.content_len = content_len;
- }
-
- public String getContent_type() {
- return content_type;
- }
-
- public void setContent_type(String content_type) {
- this.content_type = content_type;
- }
-
- public String getSet_cookie() {
- return set_cookie;
- }
-
- public void setSet_cookie(String set_cookie) {
- this.set_cookie = set_cookie;
- }
-
- public String getReq_header() {
- return req_header;
- }
-
- public void setReq_header(String req_header) {
- this.req_header = req_header;
- }
-
- public String getResp_header() {
- return resp_header;
- }
-
- public void setResp_header(String resp_header) {
- this.resp_header = resp_header;
- }
-
- public String getReq_body_key() {
- return req_body_key;
- }
-
- public void setReq_body_key(String req_body_key) {
- this.req_body_key = req_body_key;
- }
-
- public String getReq_body() {
- return req_body;
- }
-
- public void setReq_body(String req_body) {
- this.req_body = req_body;
- }
-
- public String getRes_body_key() {
- return res_body_key;
- }
-
- public void setRes_body_key(String res_body_key) {
- this.res_body_key = res_body_key;
- }
-
- public String getResp_body() {
- return resp_body;
- }
-
- public void setResp_body(String resp_body) {
- this.resp_body = resp_body;
- }
-
- public String getVersion() {
- return version;
- }
-
- public void setVersion(String version) {
- this.version = version;
- }
-
- public String getSni() {
- return sni;
- }
-
- public void setSni(String sni) {
- this.sni = sni;
- }
-
- public String getSan() {
- return san;
- }
-
- public void setSan(String san) {
- this.san = san;
- }
-
- public String getCn() {
- return cn;
- }
-
- public void setCn(String cn) {
- this.cn = cn;
- }
-
- public int getApp_id() {
- return app_id;
- }
-
- public void setApp_id(int app_id) {
- this.app_id = app_id;
- }
-
- public int getProtocol_id() {
- return protocol_id;
- }
-
- public void setProtocol_id(int protocol_id) {
- this.protocol_id = protocol_id;
- }
-
- public int getIntercept_state() {
- return intercept_state;
- }
-
- public void setIntercept_state(int intercept_state) {
- this.intercept_state = intercept_state;
- }
-
- public long getSsl_server_side_latency() {
- return ssl_server_side_latency;
- }
-
- public void setSsl_server_side_latency(long ssl_server_side_latency) {
- this.ssl_server_side_latency = ssl_server_side_latency;
- }
-
- public long getSsl_client_side_latency() {
- return ssl_client_side_latency;
- }
-
- public void setSsl_client_side_latency(long ssl_client_side_latency) {
- this.ssl_client_side_latency = ssl_client_side_latency;
- }
-
- public String getSsl_server_side_version() {
- return ssl_server_side_version;
- }
-
- public void setSsl_server_side_version(String ssl_server_side_version) {
- this.ssl_server_side_version = ssl_server_side_version;
- }
-
- public String getSsl_client_side_version() {
- return ssl_client_side_version;
- }
-
- public void setSsl_client_side_version(String ssl_client_side_version) {
- this.ssl_client_side_version = ssl_client_side_version;
- }
-
- public int getSsl_cert_verify() {
- return ssl_cert_verify;
- }
-
- public void setSsl_cert_verify(int ssl_cert_verify) {
- this.ssl_cert_verify = ssl_cert_verify;
- }
-
- public String getStream_trace_id() {
- return stream_trace_id;
- }
-
- public void setStream_trace_id(String stream_trace_id) {
- this.stream_trace_id = stream_trace_id;
- }
-
- public long getCon_latency_ms() {
- return con_latency_ms;
- }
-
- public void setCon_latency_ms(long con_latency_ms) {
- this.con_latency_ms = con_latency_ms;
- }
-
- public int getPinningst() {
- return pinningst;
- }
-
- public void setPinningst(int pinningst) {
- this.pinningst = pinningst;
- }
-
-
- public long getC2s_pkt_num() {
- return c2s_pkt_num;
- }
-
- public void setC2s_pkt_num(long c2s_pkt_num) {
- this.c2s_pkt_num = c2s_pkt_num;
- }
-
- public long getS2c_pkt_num() {
- return S2c_pkt_num;
- }
-
- public void setS2c_pkt_num(long s2c_pkt_num) {
- S2c_pkt_num = s2c_pkt_num;
- }
-
- public long getC2s_byte_num() {
- return c2s_byte_num;
- }
-
- public void setC2s_byte_num(long c2s_byte_num) {
- this.c2s_byte_num = c2s_byte_num;
- }
-
- public long getS2c_byte_num() {
- return s2c_byte_num;
- }
-
- public void setS2c_byte_num(long s2c_byte_num) {
- this.s2c_byte_num = s2c_byte_num;
- }
-
- public String getNas_ip() {
- return nas_ip;
- }
-
- public void setNas_ip(String nas_ip) {
- this.nas_ip = nas_ip;
- }
-
- public String getFramed_ip() {
- return framed_ip;
- }
-
- public void setFramed_ip(String framed_ip) {
- this.framed_ip = framed_ip;
- }
-
- public String getAccount() {
- return account;
- }
-
- public void setAccount(String account) {
- this.account = account;
- }
-
- public int getPacket_type() {
- return packet_type;
- }
-
- public void setPacket_type(int packet_type) {
- this.packet_type = packet_type;
- }
-
- public int getHas_dup_traffic() {
- return has_dup_traffic;
- }
-
- public void setHas_dup_traffic(int has_dup_traffic) {
- this.has_dup_traffic = has_dup_traffic;
- }
-
- public String getStream_error() {
- return stream_error;
- }
-
- public void setStream_error(String stream_error) {
- this.stream_error = stream_error;
- }
-}
diff --git a/src/main/java/cn/ac/iie/bolt/CompletionBolt.java b/src/main/java/cn/ac/iie/bolt/CompletionBolt.java
new file mode 100644
index 0000000..0053291
--- /dev/null
+++ b/src/main/java/cn/ac/iie/bolt/CompletionBolt.java
@@ -0,0 +1,72 @@
+package cn.ac.iie.bolt;
+
+import cn.ac.iie.common.FlowWriteConfig;
+import cn.ac.iie.utils.general.SnowflakeId;
+import cn.ac.iie.utils.hbase.HBaseUtils;
+import cn.ac.iie.utils.system.TupleUtils;
+import com.zdjizhi.utils.FormatUtils;
+import com.zdjizhi.utils.StringUtil;
+import org.apache.log4j.Logger;
+import org.apache.storm.task.TopologyContext;
+import org.apache.storm.topology.BasicOutputCollector;
+import org.apache.storm.topology.OutputFieldsDeclarer;
+import org.apache.storm.topology.base.BaseBasicBolt;
+import org.apache.storm.tuple.Fields;
+import org.apache.storm.tuple.Tuple;
+import org.apache.storm.tuple.Values;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static cn.ac.iie.utils.general.TransFormUtils.dealCommonMessage;
+
+/**
+ * @author qidaijie
+ */
+
+public class CompletionBolt extends BaseBasicBolt {
+ private final static Logger logger = Logger.getLogger(CompletionBolt.class);
+ private static final long serialVersionUID = 9006119186526123734L;
+ private static final String IS = "yes";
+
+
+ @Override
+ public void prepare(Map stormConf, TopologyContext context) {
+
+ }
+
+
+ @Override
+ public void execute(Tuple tuple, BasicOutputCollector basicOutputCollector) {
+ try {
+// if (TupleUtils.isTick(tuple)) {
+// if (IS.equals(FlowWriteConfig.NEED_COMPLETE_SUBID)) {
+// HBaseUtils.change();
+// }
+// } else {
+ String message = tuple.getString(0);
+ if (StringUtil.isNotBlank(message)) {
+ basicOutputCollector.emit(new Values(dealCommonMessage(message)));
+ }
+// }
+ } catch (Exception e) {
+ logger.error(FlowWriteConfig.KAFKA_TOPIC + "鎺ユ敹/瑙f瀽杩囩▼鍑虹幇寮傚父");
+ e.printStackTrace();
+ }
+ }
+
+// @Override
+// public Map getComponentConfiguration() {
+// Map conf = new HashMap(16);
+// conf.put(org.apache.storm.Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS,
+// FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS);
+// return conf;
+// }
+
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
+ outputFieldsDeclarer.declare(new Fields("tsgLog"));
+ }
+
+}
diff --git a/src/main/java/cn/ac/iie/bolt/ConnCompletionBolt.java b/src/main/java/cn/ac/iie/bolt/ConnCompletionBolt.java
deleted file mode 100644
index e67b6cf..0000000
--- a/src/main/java/cn/ac/iie/bolt/ConnCompletionBolt.java
+++ /dev/null
@@ -1,48 +0,0 @@
-package cn.ac.iie.bolt;
-
-import com.zdjizhi.utils.StringUtil;
-import org.apache.log4j.Logger;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.BasicOutputCollector;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.base.BaseBasicBolt;
-import org.apache.storm.tuple.Fields;
-import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
-
-import java.util.Map;
-
-import static cn.ac.iie.utils.general.TransFormUtils.getJsonMessage;
-
-/**
- * 閫氳仈鍏崇郴鏃ュ織琛ュ叏
- *
- * @author qidaijie
- */
-public class ConnCompletionBolt extends BaseBasicBolt {
- private static final long serialVersionUID = -1059151670138465894L;
- private final static Logger logger = Logger.getLogger(ConnCompletionBolt.class);
-
- @Override
- public void prepare(Map stormConf, TopologyContext context) {
-
- }
-
- @Override
- public void execute(Tuple tuple, BasicOutputCollector basicOutputCollector) {
- try {
- String message = tuple.getString(0);
- if (StringUtil.isNotBlank(message)) {
- basicOutputCollector.emit(new Values(getJsonMessage(message)));
- }
- } catch (Exception e) {
- logger.error("鎺ユ敹瑙f瀽杩囩▼鍑虹幇寮傚父", e);
- }
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
- outputFieldsDeclarer.declare(new Fields("connLog"));
- }
-
-}
diff --git a/src/main/java/cn/ac/iie/bolt/SummaryBolt.java b/src/main/java/cn/ac/iie/bolt/SummaryBolt.java
deleted file mode 100644
index a3844b2..0000000
--- a/src/main/java/cn/ac/iie/bolt/SummaryBolt.java
+++ /dev/null
@@ -1,65 +0,0 @@
-package cn.ac.iie.bolt;
-
-import cn.ac.iie.common.FlowWriteConfig;
-import cn.ac.iie.utils.influxdb.InfluxDbUtils;
-import cn.ac.iie.utils.system.TupleUtils;
-import org.apache.log4j.Logger;
-import org.apache.storm.task.TopologyContext;
-import org.apache.storm.topology.BasicOutputCollector;
-import org.apache.storm.topology.OutputFieldsDeclarer;
-import org.apache.storm.topology.base.BaseBasicBolt;
-import org.apache.storm.tuple.Tuple;
-import org.influxdb.InfluxDB;
-import org.influxdb.InfluxDBFactory;
-import org.influxdb.dto.Point;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * 缁熻鎬绘暟bolt锛岀敤浜庡皢缁熻鍚庣殑鏁板叆influxDB
- *
- * @author antlee
- * @date 2018/8/14
- */
-public class SummaryBolt extends BaseBasicBolt {
- private static final long serialVersionUID = 4614020687381536301L;
- private static Logger logger = Logger.getLogger(SummaryBolt.class);
- private static long sum = 0L;
-
- @Override
- public void prepare(Map stormConf, TopologyContext context) {
-
- }
-
- @Override
- public void execute(Tuple tuple, BasicOutputCollector basicOutputCollector) {
- try {
- if (TupleUtils.isTick(tuple)) {
- InfluxDbUtils.sendKafkaSuccess(sum);
- sum = 0L;
- } else {
- long successfulSum = tuple.getLong(0);
- sum += successfulSum;
- }
- } catch (Exception e) {
- logger.error("璁℃暟鍐欏叆influxDB鍑虹幇寮傚父 ", e);
- e.printStackTrace();
- }
- }
-
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
-
- }
-
- @Override
- public Map getComponentConfiguration() {
- Map conf = new HashMap(16);
- conf.put(org.apache.storm.Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, 60);
- return conf;
- }
-
-
-}
diff --git a/src/main/java/cn/ac/iie/bolt/NtcLogSendBolt.java b/src/main/java/cn/ac/iie/bolt/kafka/LogSendBolt.java
similarity index 62%
rename from src/main/java/cn/ac/iie/bolt/NtcLogSendBolt.java
rename to src/main/java/cn/ac/iie/bolt/kafka/LogSendBolt.java
index c19acff..35bda1a 100644
--- a/src/main/java/cn/ac/iie/bolt/NtcLogSendBolt.java
+++ b/src/main/java/cn/ac/iie/bolt/kafka/LogSendBolt.java
@@ -1,18 +1,15 @@
-package cn.ac.iie.bolt;
+package cn.ac.iie.bolt.kafka;
import cn.ac.iie.common.FlowWriteConfig;
-import cn.ac.iie.utils.influxdb.InfluxDbUtils;
+import cn.ac.iie.utils.kafka.KafkaLogSend;
import cn.ac.iie.utils.system.TupleUtils;
-import cn.ac.iie.utils.kafka.KafkaLogNtc;
import com.zdjizhi.utils.StringUtil;
import org.apache.log4j.Logger;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseBasicBolt;
-import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
-import org.apache.storm.tuple.Values;
import java.util.HashMap;
import java.util.LinkedList;
@@ -23,18 +20,17 @@ import java.util.Map;
* @author qidaijie
* @date 2018/8/14
*/
-public class NtcLogSendBolt extends BaseBasicBolt {
- private static final long serialVersionUID = 3940515789830317517L;
- private static Logger logger = Logger.getLogger(NtcLogSendBolt.class);
+public class LogSendBolt extends BaseBasicBolt {
+ private static final long serialVersionUID = -3663610927224396615L;
+ private static Logger logger = Logger.getLogger(LogSendBolt.class);
private List list;
- private KafkaLogNtc kafkaLogNtc;
- private static long successfulSum = 0;
+ private KafkaLogSend kafkaLogSend;
@Override
public void prepare(Map stormConf, TopologyContext context) {
list = new LinkedList<>();
- kafkaLogNtc = KafkaLogNtc.getInstance();
+ kafkaLogSend = KafkaLogSend.getInstance();
}
@Override
@@ -42,25 +38,21 @@ public class NtcLogSendBolt extends BaseBasicBolt {
try {
if (TupleUtils.isTick(tuple)) {
if (list.size() != 0) {
- kafkaLogNtc.sendMessage(list);
- successfulSum += list.size();
+ kafkaLogSend.sendMessage(list);
list.clear();
}
- basicOutputCollector.emit(new Values(successfulSum));
- successfulSum = 0L;
} else {
- String message = tuple.getString(0);
+ String message = tuple.getValue(0).toString();
if (StringUtil.isNotBlank(message)) {
list.add(message);
}
if (list.size() == FlowWriteConfig.BATCH_INSERT_NUM) {
- kafkaLogNtc.sendMessage(list);
- successfulSum += list.size();
+ kafkaLogSend.sendMessage(list);
list.clear();
}
}
} catch (Exception e) {
- logger.error("鏃ュ織鍙戦並afka杩囩▼鍑虹幇寮傚父 ", e);
+ logger.error(FlowWriteConfig.KAFKA_TOPIC + "鏃ュ織鍙戦並afka杩囩▼鍑虹幇寮傚父");
e.printStackTrace();
}
}
@@ -74,7 +66,6 @@ public class NtcLogSendBolt extends BaseBasicBolt {
@Override
public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) {
- outputFieldsDeclarer.declare(new Fields("suc"));
}
}
diff --git a/src/main/java/cn/ac/iie/common/FlowWriteConfig.java b/src/main/java/cn/ac/iie/common/FlowWriteConfig.java
index 26e2173..731b12d 100644
--- a/src/main/java/cn/ac/iie/common/FlowWriteConfig.java
+++ b/src/main/java/cn/ac/iie/common/FlowWriteConfig.java
@@ -8,25 +8,29 @@ import cn.ac.iie.utils.system.FlowWriteConfigurations;
*/
public class FlowWriteConfig {
- public static final String LOG_STRING_SPLITTER = "\t";
- public static final String SQL_STRING_SPLITTER = "#";
- public static final String SEGMENTATION = ",";
-
+ public static final int IF_PARAM_LENGTH = 3;
+ public static final String VISIBILITY = "disabled";
+ public static final String FORMAT_SPLITTER = ",";
+ public static final String IS_JSON_KEY_TAG = "$.";
+ public static final String IF_CONDITION_SPLITTER = "=";
/**
* System
*/
public static final Integer SPOUT_PARALLELISM = FlowWriteConfigurations.getIntProperty(0, "spout.parallelism");
- public static final Integer DATACENTER_BOLT_PARALLELISM = FlowWriteConfigurations.getIntProperty(0, "datacenter.bolt.parallelism");
+ public static final Integer COMPLETION_BOLT_PARALLELISM = FlowWriteConfigurations.getIntProperty(0, "completion.bolt.parallelism");
public static final Integer TOPOLOGY_WORKERS = FlowWriteConfigurations.getIntProperty(0, "topology.workers");
public static final Integer KAFKA_BOLT_PARALLELISM = FlowWriteConfigurations.getIntProperty(0, "kafka.bolt.parallelism");
public static final Integer TOPOLOGY_TICK_TUPLE_FREQ_SECS = FlowWriteConfigurations.getIntProperty(0, "topology.tick.tuple.freq.secs");
+ public static final Integer HBASE_TICK_TUPLE_FREQ_SECS = FlowWriteConfigurations.getIntProperty(0, "hbase.tick.tuple.freq.secs");
public static final Integer TOPOLOGY_CONFIG_MAX_SPOUT_PENDING = FlowWriteConfigurations.getIntProperty(0, "topology.config.max.spout.pending");
public static final Integer TOPOLOGY_NUM_ACKS = FlowWriteConfigurations.getIntProperty(0, "topology.num.acks");
public static final Integer TOPOLOGY_SPOUT_SLEEP_TIME = FlowWriteConfigurations.getIntProperty(0, "topology.spout.sleep.time");
public static final Integer BATCH_INSERT_NUM = FlowWriteConfigurations.getIntProperty(0, "batch.insert.num");
public static final Integer DATA_CENTER_ID_NUM = FlowWriteConfigurations.getIntProperty(0, "data.center.id.num");
- public static final String CHECK_IP_SCOPE = FlowWriteConfigurations.getStringProperty(0, "check.ip.scope");
public static final Integer MAX_FAILURE_NUM = FlowWriteConfigurations.getIntProperty(0, "max.failure.num");
+ public static final String MAIL_DEFAULT_CHARSET = FlowWriteConfigurations.getStringProperty(0, "mail.default.charset");
+ public static final String NEED_COMPLETE_SUBID = FlowWriteConfigurations.getStringProperty(0, "need.complete.subid");
+ public static final String LOG_NEED_COMPLETE = FlowWriteConfigurations.getStringProperty(0, "log.need.complete");
/**
* influxDB
@@ -38,26 +42,30 @@ public class FlowWriteConfig {
/**
* kafka
*/
- public static final String BOOTSTRAP_SERVERS = FlowWriteConfigurations.getStringProperty(0, "bootstrap.servers");
+ public static final String INPUT_KAFKA_SERVERS = FlowWriteConfigurations.getStringProperty(0, "input.kafka.servers");
+ public static final String OUTPUT_KAFKA_SERVERS = FlowWriteConfigurations.getStringProperty(0, "output.kafka.servers");
public static final String ZOOKEEPER_SERVERS = FlowWriteConfigurations.getStringProperty(0, "zookeeper.servers");
+ public static final String HBASE_ZOOKEEPER_SERVERS = FlowWriteConfigurations.getStringProperty(0, "hbase.zookeeper.servers");
+ public static final String HBASE_TABLE_NAME = FlowWriteConfigurations.getStringProperty(0, "hbase.table.name");
public static final String GROUP_ID = FlowWriteConfigurations.getStringProperty(0, "group.id");
public static final String RESULTS_OUTPUT_TOPIC = FlowWriteConfigurations.getStringProperty(0, "results.output.topic");
public static final String KAFKA_TOPIC = FlowWriteConfigurations.getStringProperty(0, "kafka.topic");
public static final String AUTO_OFFSET_RESET = FlowWriteConfigurations.getStringProperty(0, "auto.offset.reset");
+ public static final String PRODUCER_ACK = FlowWriteConfigurations.getStringProperty(0, "producer.ack");
public static final String IP_LIBRARY = FlowWriteConfigurations.getStringProperty(0, "ip.library");
-
- /***
- * Redis
+ /**
+ * kafka闄愭祦閰嶇疆-20201117
*/
- public static final String REDIS_IP = "redis.ip";
- public static final String REDIS_PORT = "redis.port";
- public static final String REDIS_TIMEOUT = "redis.timeout";
- public static final String REDIS_POOL_MAXACTIVE = "redis.pool.maxActive";
- public static final String REDIS_POOL_MAXIDLE = "redis.pool.maxIdle";
- public static final String REDIS_POOL_MAXWAIT = "redis.pool.maxWait";
- public static final String REDIS_POOL_TESTONBORROW = "redis.pool.testOnBorrow";
- public static final String REDIS_POOL_TESTONRETURN = "redis.pool.testOnReturn";
+ public static final String PRODUCER_KAFKA_COMPRESSION_TYPE = FlowWriteConfigurations.getStringProperty(0, "producer.kafka.compression.type");
+ public static final String CONSUMER_CLIENT_ID = FlowWriteConfigurations.getStringProperty(0, "consumer.client.id");
+ public static final String PRODUCER_CLIENT_ID = FlowWriteConfigurations.getStringProperty(0, "producer.client.id");
+
+
+ /**
+ * http
+ */
+ public static final String SCHEMA_HTTP = FlowWriteConfigurations.getStringProperty(0, "schema.http");
}
\ No newline at end of file
diff --git a/src/main/java/cn/ac/iie/common/KafkaProConfig.java b/src/main/java/cn/ac/iie/common/KafkaProConfig.java
new file mode 100644
index 0000000..eb14465
--- /dev/null
+++ b/src/main/java/cn/ac/iie/common/KafkaProConfig.java
@@ -0,0 +1,20 @@
+package cn.ac.iie.common;
+
+
+import cn.ac.iie.utils.system.FlowWriteConfigurations;
+
+/**
+ * @author Administrator
+ */
+public class KafkaProConfig {
+
+
+ public static final String RETRIES = FlowWriteConfigurations.getStringProperty(1, "retries");
+ public static final String LINGER_MS = FlowWriteConfigurations.getStringProperty(1, "linger.ms");
+ public static final Integer REQUEST_TIMEOUT_MS = FlowWriteConfigurations.getIntProperty(1, "request.timeout.ms");
+ public static final Integer BATCH_SIZE = FlowWriteConfigurations.getIntProperty(1, "batch.size");
+ public static final Integer BUFFER_MEMORY = FlowWriteConfigurations.getIntProperty(1, "buffer.memory");
+ public static final Integer MAX_REQUEST_SIZE = FlowWriteConfigurations.getIntProperty(1, "max.request.size");
+
+
+}
\ No newline at end of file
diff --git a/src/main/java/cn/ac/iie/spout/CustomizedKafkaSpout.java b/src/main/java/cn/ac/iie/spout/CustomizedKafkaSpout.java
index 0f806fa..9333955 100644
--- a/src/main/java/cn/ac/iie/spout/CustomizedKafkaSpout.java
+++ b/src/main/java/cn/ac/iie/spout/CustomizedKafkaSpout.java
@@ -1,6 +1,7 @@
package cn.ac.iie.spout;
import cn.ac.iie.common.FlowWriteConfig;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
@@ -31,14 +32,45 @@ public class CustomizedKafkaSpout extends BaseRichSpout {
private static Properties createConsumerConfig() {
Properties props = new Properties();
- props.put("bootstrap.servers", FlowWriteConfig.BOOTSTRAP_SERVERS);
+ props.put("bootstrap.servers", FlowWriteConfig.INPUT_KAFKA_SERVERS);
props.put("group.id", FlowWriteConfig.GROUP_ID);
props.put("session.timeout.ms", "60000");
props.put("max.poll.records", 3000);
props.put("max.partition.fetch.bytes", 31457280);
props.put("auto.offset.reset", FlowWriteConfig.AUTO_OFFSET_RESET);
+// switch (FlowWriteConfig.KAFKA_TOPIC) {
+// case "PROXY-EVENT-LOG":
+// props.put("client.id", "proxy");
+// break;
+// case "RADIUS-RECORD-LOG":
+// props.put("client.id", "radius");
+// break;
+// case "CONNECTION-RECORD-LOG":
+// props.put("client.id", "connection");
+// break;
+// case "SECURITY-EVENT-LOG":
+// props.put("client.id", "security");
+// break;
+// case "CONNECTION-SKETCH":
+// props.put("client.id", "sketch");
+// break;
+// case "ACTIVE-DEFENCE-EVENT-LOG":
+// props.put("client.id", "active");
+// break;
+// case "SYS-PACKET-CAPTURE-LOG":
+// props.put("client.id", "packet");
+// break;
+//
+// default:
+// }
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
+
+ /**
+ * kafka闄愭祦閰嶇疆-20201117
+ */
+ props.put(ConsumerConfig.CLIENT_ID_CONFIG, FlowWriteConfig.CONSUMER_CLIENT_ID);
+
return props;
}
diff --git a/src/main/java/cn/ac/iie/topology/LogFlowWriteTopology.java b/src/main/java/cn/ac/iie/topology/LogFlowWriteTopology.java
index ede06c3..7b2d68c 100644
--- a/src/main/java/cn/ac/iie/topology/LogFlowWriteTopology.java
+++ b/src/main/java/cn/ac/iie/topology/LogFlowWriteTopology.java
@@ -1,9 +1,8 @@
package cn.ac.iie.topology;
-import cn.ac.iie.bolt.ConnCompletionBolt;
-import cn.ac.iie.bolt.NtcLogSendBolt;
-import cn.ac.iie.bolt.SummaryBolt;
+import cn.ac.iie.bolt.CompletionBolt;
+import cn.ac.iie.bolt.kafka.LogSendBolt;
import cn.ac.iie.common.FlowWriteConfig;
import cn.ac.iie.spout.CustomizedKafkaSpout;
import org.apache.log4j.Logger;
@@ -31,10 +30,10 @@ public class LogFlowWriteTopology {
private LogFlowWriteTopology(String topologyName) {
this.topologyName = topologyName;
- topologyConfig = createTopologConfig();
+ topologyConfig = createTopologyConfig();
}
- private Config createTopologConfig() {
+ private Config createTopologyConfig() {
Config conf = new Config();
conf.setDebug(false);
conf.setMessageTimeoutSecs(60);
@@ -56,33 +55,40 @@ public class LogFlowWriteTopology {
}
private void buildTopology() {
+ String need = "yes";
builder = new TopologyBuilder();
builder.setSpout("LogFlowWriteSpout", new CustomizedKafkaSpout(), FlowWriteConfig.SPOUT_PARALLELISM);
- builder.setBolt("ConnCompletionBolt", new ConnCompletionBolt(), FlowWriteConfig.DATACENTER_BOLT_PARALLELISM).localOrShuffleGrouping("LogFlowWriteSpout");
- builder.setBolt("NtcLogSendBolt", new NtcLogSendBolt(), FlowWriteConfig.KAFKA_BOLT_PARALLELISM).localOrShuffleGrouping("ConnCompletionBolt");
-// builder.setBolt("SummaryBolt", new SummaryBolt(), 1).localOrShuffleGrouping("NtcLogSendBolt");
+ if (need.equals(FlowWriteConfig.LOG_NEED_COMPLETE)) {
+ builder.setBolt("LogCompletionBolt", new CompletionBolt(),
+ FlowWriteConfig.COMPLETION_BOLT_PARALLELISM).localOrShuffleGrouping("LogFlowWriteSpout");
+ builder.setBolt("CompletionLogSendBolt", new LogSendBolt(),
+ FlowWriteConfig.KAFKA_BOLT_PARALLELISM).localOrShuffleGrouping("LogCompletionBolt");
+ } else {
+ builder.setBolt("LogSendBolt", new LogSendBolt(),
+ FlowWriteConfig.KAFKA_BOLT_PARALLELISM).localOrShuffleGrouping("LogFlowWriteSpout");
+ }
}
public static void main(String[] args) throws Exception {
- LogFlowWriteTopology csst = null;
+ LogFlowWriteTopology flowWriteTopology;
boolean runLocally = true;
String parameter = "remote";
int size = 2;
if (args.length >= size && parameter.equalsIgnoreCase(args[1])) {
runLocally = false;
- csst = new LogFlowWriteTopology(args[0]);
+ flowWriteTopology = new LogFlowWriteTopology(args[0]);
} else {
- csst = new LogFlowWriteTopology();
+ flowWriteTopology = new LogFlowWriteTopology();
}
- csst.buildTopology();
+ flowWriteTopology.buildTopology();
if (runLocally) {
logger.info("鎵ц鏈湴妯″紡...");
- csst.runLocally();
+ flowWriteTopology.runLocally();
} else {
logger.info("鎵ц杩滅▼閮ㄧ讲妯″紡...");
- csst.runRemotely();
+ flowWriteTopology.runRemotely();
}
}
}
diff --git a/src/main/java/cn/ac/iie/utils/general/EncryptionUtils.java b/src/main/java/cn/ac/iie/utils/general/EncryptionUtils.java
deleted file mode 100644
index 5c400cc..0000000
--- a/src/main/java/cn/ac/iie/utils/general/EncryptionUtils.java
+++ /dev/null
@@ -1,68 +0,0 @@
-package cn.ac.iie.utils.general;
-
-import org.apache.log4j.Logger;
-
-import java.security.MessageDigest;
-
-/**
- * 鎻忚堪:杞崲MD5宸ュ叿绫
- *
- * @author Administrator
- * @create 2018-08-13 15:11
- */
-public class EncryptionUtils {
- private static Logger logger = Logger.getLogger(EncryptionUtils.class);
-
- public static String md5Encode(String msg) throws Exception {
- try {
- byte[] msgBytes = msg.getBytes("utf-8");
- /*
- * 澹版槑浣跨敤Md5绠楁硶,鑾峰緱MessaDigest瀵硅薄
- */
- MessageDigest md5 = MessageDigest.getInstance("MD5");
- /*
- * 浣跨敤鎸囧畾鐨勫瓧鑺傛洿鏂版憳瑕
- */
- md5.update(msgBytes);
- /*
- * 瀹屾垚鍝堝笇璁$畻,鑾峰緱瀵嗘枃
- */
- byte[] digest = md5.digest();
- /*
- * 浠ヤ笂涓よ浠g爜绛夊悓浜
- * byte[] digest = md5.digest(msgBytes);
- */
- return byteArr2hexString(digest);
- } catch (Exception e) {
- logger.error("Error in conversion MD5! " + msg);
- return "";
- }
- }
-
- /**
- * 灏哹yte鏁扮粍杞寲涓16杩涘埗瀛楃涓插舰寮
- *
- * @param bys 瀛楄妭鏁扮粍
- * @return 瀛楃涓
- */
- private static String byteArr2hexString(byte[] bys) {
- StringBuilder hexVal = new StringBuilder();
- int val = 0;
- for (byte by : bys) {
- //灏哹yte杞寲涓篿nt 濡傛灉byte鏄竴涓礋鏁板氨蹇呴』瑕佸拰16杩涘埗鐨0xff鍋氫竴娆′笌杩愮畻
- val = ((int) by) & 0xff;
- if (val < 16) {
- hexVal.append("0");
- }
- hexVal.append(Integer.toHexString(val));
- }
-
- return hexVal.toString();
-
- }
-
-
- public static void main(String[] args) {
-
- }
-}
diff --git a/src/main/java/cn/ac/iie/utils/system/SnowflakeId.java b/src/main/java/cn/ac/iie/utils/general/SnowflakeId.java
similarity index 58%
rename from src/main/java/cn/ac/iie/utils/system/SnowflakeId.java
rename to src/main/java/cn/ac/iie/utils/general/SnowflakeId.java
index 5f77996..0b80158 100644
--- a/src/main/java/cn/ac/iie/utils/system/SnowflakeId.java
+++ b/src/main/java/cn/ac/iie/utils/general/SnowflakeId.java
@@ -1,8 +1,9 @@
-package cn.ac.iie.utils.system;
+package cn.ac.iie.utils.general;
import cn.ac.iie.common.FlowWriteConfig;
import cn.ac.iie.utils.zookeeper.DistributedLock;
import cn.ac.iie.utils.zookeeper.ZookeeperUtils;
+import com.zdjizhi.utils.ZooKeeperLock;
import org.apache.log4j.Logger;
/**
@@ -13,11 +14,18 @@ import org.apache.log4j.Logger;
public class SnowflakeId {
private static Logger logger = Logger.getLogger(SnowflakeId.class);
- // ==============================Fields===========================================
/**
- * 寮濮嬫椂闂存埅 (2018-08-01 00:00:00) max 17years
+ * 鍏64浣 绗竴浣嶄负绗﹀彿浣 榛樿0
+ * 鏃堕棿鎴 39浣(17 year), centerId:(鍏宠仈姣忎釜鐜鎴栦换鍔℃暟) :7浣(0-127),
+ * workerId(鍏宠仈杩涚▼):6(0-63) ,搴忓垪鍙凤細11浣(2047/ms)
+ *
+ * 搴忓垪鍙 /ms = (-1L ^ (-1L << 11))
+ * 鏈澶т娇鐢ㄥ勾 = (1L << 39) / (1000L * 60 * 60 * 24 * 365)
*/
- private final long twepoch = 1564588800000L;
+ /**
+ * 寮濮嬫椂闂存埅 (2020-11-14 00:00:00) max 17years
+ */
+ private final long twepoch = 1605283200000L;
/**
* 鏈哄櫒id鎵鍗犵殑浣嶆暟
@@ -27,22 +35,23 @@ public class SnowflakeId {
/**
* 鏁版嵁鏍囪瘑id鎵鍗犵殑浣嶆暟
*/
- private final long dataCenterIdBits = 4L;
+ private final long dataCenterIdBits = 7L;
/**
- * 鏀寔鐨勬渶澶ф満鍣╥d锛岀粨鏋滄槸3 (杩欎釜绉讳綅绠楁硶鍙互寰堝揩鐨勮绠楀嚭鍑犱綅浜岃繘鍒舵暟鎵鑳借〃绀虹殑鏈澶у崄杩涘埗鏁)
+ * 鏀寔鐨勬渶澶ф満鍣╥d锛岀粨鏋滄槸63 (杩欎釜绉讳綅绠楁硶鍙互寰堝揩鐨勮绠楀嚭鍑犱綅浜岃繘鍒舵暟鎵鑳借〃绀虹殑鏈澶у崄杩涘埗鏁)
+ * M << n = M * 2^n
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
- * 鏀寔鐨勬渶澶ф暟鎹爣璇唅d锛岀粨鏋滄槸15
+ * 鏀寔鐨勬渶澶ф暟鎹爣璇唅d锛岀粨鏋滄槸127
*/
private final long maxDataCenterId = -1L ^ (-1L << dataCenterIdBits);
/**
* 搴忓垪鍦╥d涓崰鐨勪綅鏁
*/
- private final long sequenceBits = 14L;
+ private final long sequenceBits = 11L;
/**
* 鏈哄櫒ID鍚戝乏绉12浣
@@ -60,7 +69,7 @@ public class SnowflakeId {
private final long timestampLeftShift = sequenceBits + workerIdBits + dataCenterIdBits;
/**
- * 鐢熸垚搴忓垪鐨勬帺鐮侊紝杩欓噷涓16383
+ * 鐢熸垚搴忓垪鐨勬帺鐮侊紝杩欓噷涓2047
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
@@ -70,12 +79,12 @@ public class SnowflakeId {
private long workerId;
/**
- * 鏁版嵁涓績ID(0~15)
+ * 鏁版嵁涓績ID(0~127)
*/
private long dataCenterId;
/**
- * 姣鍐呭簭鍒(0~16383)
+ * 姣鍐呭簭鍒(0~2047)
*/
private long sequence = 0L;
@@ -85,12 +94,18 @@ public class SnowflakeId {
private long lastTimestamp = -1L;
+ /**
+ * 璁剧疆鍏佽鏃堕棿鍥炴嫧鐨勬渶澶ч檺鍒10s
+ */
+ private static final long rollBackTime = 10000L;
+
+
private static SnowflakeId idWorker;
private static ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
static {
- idWorker = new SnowflakeId();
+ idWorker = new SnowflakeId(FlowWriteConfig.ZOOKEEPER_SERVERS, FlowWriteConfig.DATA_CENTER_ID_NUM);
}
//==============================Constructors=====================================
@@ -98,19 +113,46 @@ public class SnowflakeId {
/**
* 鏋勯犲嚱鏁
*/
- private SnowflakeId() {
+ private SnowflakeId(String zookeeperIp, long dataCenterIdNum) {
DistributedLock lock = new DistributedLock(FlowWriteConfig.ZOOKEEPER_SERVERS, "disLocks1");
+ try {
lock.lock();
- int tmpWorkerId = zookeeperUtils.modifyNode("/Snowflake/" + FlowWriteConfig.KAFKA_TOPIC);
+ int tmpWorkerId = zookeeperUtils.modifyNode("/Snowflake/" + "worker" + dataCenterIdNum, zookeeperIp);
if (tmpWorkerId > maxWorkerId || tmpWorkerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
- int dataCenterId = FlowWriteConfig.DATA_CENTER_ID_NUM;
- if (dataCenterId > maxDataCenterId || dataCenterId < 0) {
- throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDataCenterId));
+ if (dataCenterIdNum > maxDataCenterId || dataCenterIdNum < 0) {
+ throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than ", maxDataCenterId));
}
this.workerId = tmpWorkerId;
- this.dataCenterId = dataCenterId;
+ this.dataCenterId = dataCenterIdNum;
+ } catch (Exception e) {
+ e.printStackTrace();
+ logger.error("This is not usual error!!!===>>>" + e + "<<<===");
+ }finally {
+ lock.unlock();
+ }
+
+// ZooKeeperLock lock = new ZooKeeperLock(zookeeperIp, "/locks", "disLocks1");
+// if (lock.lock()) {
+// int tmpWorkerId = zookeeperUtils.modifyNode("/Snowflake/" + "worker" + dataCenterIdNum, zookeeperIp);
+// if (tmpWorkerId > maxWorkerId || tmpWorkerId < 0) {
+// throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
+// }
+// if (dataCenterIdNum > maxDataCenterId || dataCenterIdNum < 0) {
+// throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than ", maxDataCenterId));
+// }
+// this.workerId = tmpWorkerId;
+// this.dataCenterId = dataCenterIdNum;
+// try {
+// lock.unlock();
+// } catch (InterruptedException ie) {
+// ie.printStackTrace();
+// } catch (Exception e) {
+// e.printStackTrace();
+// logger.error("This is not usual error!!!===>>>" + e + "<<<===");
+// }
+// }
}
// ==============================Methods==========================================
@@ -122,7 +164,10 @@ public class SnowflakeId {
*/
private synchronized long nextId() {
long timestamp = timeGen();
-
+ //璁剧疆涓涓厑璁稿洖鎷ㄩ檺鍒舵椂闂达紝绯荤粺鏃堕棿鍥炴嫧鑼冨洿鍦╮ollBackTime鍐呭彲浠ョ瓑寰呮牎鍑
+ if (lastTimestamp - timestamp > 0 && lastTimestamp - timestamp < rollBackTime) {
+ timestamp = tilNextMillis(lastTimestamp);
+ }
//濡傛灉褰撳墠鏃堕棿灏忎簬涓婁竴娆D鐢熸垚鐨勬椂闂存埑锛岃鏄庣郴缁熸椂閽熷洖閫杩囪繖涓椂鍊欏簲褰撴姏鍑哄紓甯
if (timestamp < lastTimestamp) {
throw new RuntimeException(
diff --git a/src/main/java/cn/ac/iie/utils/general/TransFormUtils.java b/src/main/java/cn/ac/iie/utils/general/TransFormUtils.java
index 9c0bb8f..45346f3 100644
--- a/src/main/java/cn/ac/iie/utils/general/TransFormUtils.java
+++ b/src/main/java/cn/ac/iie/utils/general/TransFormUtils.java
@@ -1,123 +1,165 @@
package cn.ac.iie.utils.general;
-import cn.ac.iie.bean.SessionRecordLog;
-import cn.ac.iie.common.FlowWriteConfig;
-import cn.ac.iie.utils.redis.RedisPollUtils;
-import cn.ac.iie.utils.system.SnowflakeId;
-import cn.ac.iie.utils.zookeeper.DistributedLock;
-import cn.ac.iie.utils.zookeeper.ZookeeperUtils;
-import com.alibaba.fastjson.JSONObject;
-import com.zdjizhi.utils.IpLookup;
-import com.zdjizhi.utils.StringUtil;
-import org.apache.log4j.Logger;
-import redis.clients.jedis.Jedis;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import cn.ac.iie.common.FlowWriteConfig;
+import cn.ac.iie.utils.json.JsonParseUtil;
+import com.alibaba.fastjson.JSONObject;
+import com.zdjizhi.utils.Encodes;
+import com.zdjizhi.utils.FormatUtils;
+import com.zdjizhi.utils.IpLookup;
+import com.zdjizhi.utils.JsonMapper;
+import org.apache.log4j.Logger;
+
+import java.util.*;
+
+import static cn.ac.iie.utils.general.TransFunction.*;
/**
* 鎻忚堪:杞崲鎴栬ˉ鍏ㄥ伐鍏风被
*
* @author qidaijie
- * @create 2018-08-13 15:11
*/
public class TransFormUtils {
private static Logger logger = Logger.getLogger(TransFormUtils.class);
- private static Pattern WEB_PATTERN = Pattern.compile("[^\\\\.]+(\\.com\\.cn|\\.net\\.cn|\\.org\\.cn|\\.gov\\.cn|\\.com|\\.net|\\.cn|\\.org|\\.cc|\\.me|\\.tel|\\.mobi|\\.asia|\\.biz|\\.info|\\.name|\\.tv|\\.hk|\\.鍏徃|\\.涓浗|\\.缃戠粶)");
+
+ /**
+ * 鍦ㄥ唴瀛樹腑鍔犺浇鍙嶅皠绫荤敤鐨刴ap
+ */
+ private static HashMap map = JsonParseUtil.getMapFromHttp(FlowWriteConfig.SCHEMA_HTTP);
+
+ /**
+ * 鍙嶅皠鎴愪竴涓被
+ */
+ private static Object mapObject = JsonParseUtil.generateObject(map);
+
+ /**
+ * 鑾峰彇浠诲姟鍒楄〃
+ * list鐨勬瘡涓厓绱犳槸涓涓洓鍏冨瓧绗︿覆鏁扮粍 (鏈塮ormat鏍囪瘑鐨勫瓧娈碉紝琛ュ叏鐨勫瓧娈碉紝鐢ㄥ埌鐨勫姛鑳藉嚱鏁帮紝鐢ㄥ埌鐨勫弬鏁)锛屼緥濡傦細
+ * (mail_subject mail_subject decode_of_base64 mail_subject_charset)
+ */
+ private static ArrayList jobList = JsonParseUtil.getJobListFromHttp(FlowWriteConfig.SCHEMA_HTTP);
+
+ /**
+ * 琛ュ叏宸ュ叿绫
+ */
+// private static FormatUtils build = new FormatUtils.Builder(false).build();
+
+ /**
+ * IP瀹氫綅搴撳伐鍏风被
+ */
private static IpLookup ipLookup = new IpLookup.Builder(false)
- .loadDataFileV4(FlowWriteConfig.IP_LIBRARY + "Kazakhstan.mmdb")
- .loadDataFileV6(FlowWriteConfig.IP_LIBRARY + "Kazakhstan.mmdb")
- .loadAsnDataFileV4(FlowWriteConfig.IP_LIBRARY + "asn_v4.mmdb")
+ .loadDataFileV4(FlowWriteConfig.IP_LIBRARY + "ip_v4.mmdb")
+ .loadDataFileV6(FlowWriteConfig.IP_LIBRARY + "ip_v6.mmdb")
+ .loadDataFilePrivateV4(FlowWriteConfig.IP_LIBRARY + "ip_private_v4.mmdb")
+ .loadDataFilePrivateV6(FlowWriteConfig.IP_LIBRARY + "ip_private_v6.mmdb")
+ .loadAsnDataFile(FlowWriteConfig.IP_LIBRARY + "asn_v4.mmdb")
.loadAsnDataFileV6(FlowWriteConfig.IP_LIBRARY + "asn_v6.mmdb")
.build();
-// private static ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
-// private static SnowflakeId snowflakeId = new SnowflakeId();
/**
* 瑙f瀽鏃ュ織锛屽苟琛ュ叏
*
- * @param message 鍘熷鏃ュ織
+ * @param message kafka Topic鍘熷鏃ュ織
* @return 琛ュ叏鍚庣殑鏃ュ織
*/
- public static String getJsonMessage(String message) {
- SessionRecordLog sessionRecordLog = JSONObject.parseObject(message, SessionRecordLog.class);
- String serverIp = sessionRecordLog.getServer_ip();
- String clientIp = sessionRecordLog.getClient_ip();
+ public static String dealCommonMessage(String message) {
+
+ Object object = JSONObject.parseObject(message, mapObject.getClass());
+
try {
- sessionRecordLog.setUid(SnowflakeId.generateId());
- sessionRecordLog.setServer_location(ipLookup.countryLookup(serverIp));
- sessionRecordLog.setClient_location(ipLookup.cityLookupDetail(clientIp));
- sessionRecordLog.setClient_asn(ipLookup.asnLookup(clientIp, true));
- sessionRecordLog.setServer_asn(ipLookup.asnLookup(serverIp, true));
- sessionRecordLog.setDomain(getTopDomain(sessionRecordLog.getSni(), sessionRecordLog.getHost()));
- sessionRecordLog.setRecv_time(System.currentTimeMillis() / 1000);
-// sessionRecordLog.setSubscribe_id(getSubscribeId(clientIp));
- return JSONObject.toJSONString(sessionRecordLog);
- } catch (Exception e) {
- logger.error("鏃ュ織瑙f瀽杩囩▼鍑虹幇寮傚父", e);
- return "";
- }
- }
+ for (String[] strings : jobList) {
+ //鐢ㄥ埌鐨勫弬鏁扮殑鍊
+ Object name = JsonParseUtil.getValue(object, strings[0]);
+ //闇瑕佽ˉ鍏ㄧ殑瀛楁鐨刱ey
+ String appendToKeyName = strings[1];
+ //闇瑕佽ˉ鍏ㄧ殑瀛楁鐨勫
+ Object appendTo = JsonParseUtil.getValue(object, appendToKeyName);
+ //鍖归厤鎿嶄綔鍑芥暟鐨勫瓧娈
+ String function = strings[2];
+ //棰濆鐨勫弬鏁扮殑鍊
+ String param = strings[3];
- /**
- * 鏈塻ni閫氳繃sni鑾峰彇鍩熷悕锛屾湁hots鏍规嵁host鑾峰彇鍩熷悕
- *
- * @param sni sni
- * @param host host
- * @return 椤剁骇鍩熷悕
- */
- private static String getTopDomain(String sni, String host) {
- if (StringUtil.isNotBlank(sni)) {
- return getDomain(sni);
- } else if (StringUtil.isNotBlank(host)) {
- return getDomain(host);
- } else {
- return "";
- }
- }
-
- /**
- * 鑾峰彇鐢ㄦ埛鍚
- *
- * @param key Sip
- * @return SubscribeId
- */
- private static String getSubscribeId(String key) {
- String sub = "";
- try (Jedis jedis = RedisPollUtils.getJedis()) {
- if (jedis != null) {
- sub = jedis.get(key);
- }
- } catch (Exception e) {
- logger.error("閫氳繃Redis鑾峰彇鐢ㄦ埛鍚嶅嚭鐜板紓甯", e);
- }
- return sub;
- }
-
-
- /**
- * 鏍规嵁url鎴彇椤剁骇鍩熷悕
- *
- * @param url 缃戠珯url
- * @return 椤剁骇鍩熷悕
- */
- private static String getDomain(String url) {
- try {
- Matcher matcher = WEB_PATTERN.matcher(url);
- if (matcher.find()) {
- return matcher.group();
+ functionSet(function, object, appendToKeyName, appendTo, name, param);
}
+ return JSONObject.toJSONString(object);
} catch (Exception e) {
+ logger.error(FlowWriteConfig.KAFKA_TOPIC + "鏃ュ織棰勫鐞嗚繃绋嬪嚭鐜板紓甯");
e.printStackTrace();
+ return "";
}
- return "";
- }
-
- public static void main(String[] args) {
- String s = ipLookup.countryLookup("192.168.10.207");
- System.out.println(s);
}
-}
+ /**
+ * 鏍规嵁schema鎻忚堪瀵瑰簲瀛楁杩涜鎿嶄綔鐨 鍑芥暟闆嗗悎
+ *
+ * @param function 鍖归厤鎿嶄綔鍑芥暟鐨勫瓧娈
+ * @param object 鍔ㄦ丳OJO Object
+ * @param appendToKeyName 闇瑕佽ˉ鍏ㄧ殑瀛楁鐨刱ey
+ * @param appendTo 闇瑕佽ˉ鍏ㄧ殑瀛楁鐨勫
+ * @param name 鐢ㄥ埌鐨勫弬鏁扮殑鍊
+ * @param param 棰濆鐨勫弬鏁扮殑鍊
+ */
+ private static void functionSet(String function, Object object, String appendToKeyName, Object appendTo, Object name, String param) {
+ switch (function) {
+ case "current_timestamp":
+ if ((long) appendTo == 0L) {
+ JsonParseUtil.setValue(object, appendToKeyName, getCurrentTime());
+ }
+ break;
+ case "snowflake_id":
+// JsonParseUtil.setValue(object, appendToKeyName,
+// build.getSnowflakeId(FlowWriteConfig.ZOOKEEPER_SERVERS, FlowWriteConfig.DATA_CENTER_ID_NUM));
+ JsonParseUtil.setValue(object, appendToKeyName, SnowflakeId.generateId());
+ break;
+ case "geo_ip_detail":
+ if (name != null && appendTo == null) {
+ JsonParseUtil.setValue(object, appendToKeyName, getGeoIpDetail(ipLookup, name.toString()));
+ }
+ break;
+ case "geo_asn":
+ if (name != null && appendTo == null) {
+ JsonParseUtil.setValue(object, appendToKeyName, getGeoAsn(ipLookup, name.toString()));
+ }
+ break;
+ case "geo_ip_country":
+ if (name != null && appendTo == null) {
+ JsonParseUtil.setValue(object, appendToKeyName, getGeoIpCountry(ipLookup, name.toString()));
+ }
+ break;
+ case "get_value":
+ if (name != null) {
+ JsonParseUtil.setValue(object, appendToKeyName, name);
+ }
+ break;
+ case "if":
+ if (param != null) {
+ JsonParseUtil.setValue(object, appendToKeyName, condition(object, param));
+ }
+ break;
+ case "sub_domain":
+ if (appendTo == null && name != null) {
+ JsonParseUtil.setValue(object, appendToKeyName, getTopDomain(name.toString()));
+ }
+ break;
+ case "radius_match":
+ if (name != null) {
+ JsonParseUtil.setValue(object, appendToKeyName, radiusMatch(name.toString()));
+ }
+ break;
+ case "decode_of_base64":
+ if (name != null) {
+ JsonParseUtil.setValue(object, appendToKeyName, decodeBase64(name.toString(), isJsonValue(object, param)));
+ }
+ break;
+ case "flattenSpec":
+ if (name != null && param != null) {
+ JsonParseUtil.setValue(object, appendToKeyName, flattenSpec(name.toString(), isJsonValue(object, param)));
+ }
+ break;
+ default:
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/src/main/java/cn/ac/iie/utils/general/TransFunction.java b/src/main/java/cn/ac/iie/utils/general/TransFunction.java
new file mode 100644
index 0000000..2ee2f4b
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/general/TransFunction.java
@@ -0,0 +1,184 @@
+package cn.ac.iie.utils.general;
+
+import cn.ac.iie.common.FlowWriteConfig;
+import cn.ac.iie.utils.hbase.HBaseUtils;
+import cn.ac.iie.utils.json.JsonParseUtil;
+import com.jayway.jsonpath.InvalidPathException;
+import com.jayway.jsonpath.JsonPath;
+import com.jayway.jsonpath.PathNotFoundException;
+import com.zdjizhi.utils.Encodes;
+import com.zdjizhi.utils.FormatUtils;
+import com.zdjizhi.utils.IpLookup;
+import com.zdjizhi.utils.StringUtil;
+import org.apache.log4j.Logger;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+
+/**
+ * @author qidaijie
+ */
+class TransFunction {
+ private static Logger logger = Logger.getLogger(TransFunction.class);
+
+ /**
+ * 鐢熸垚褰撳墠鏃堕棿鎴崇殑鎿嶄綔
+ */
+ static long getCurrentTime() {
+ return System.currentTimeMillis() / 1000;
+ }
+
+ /**
+ * 鏍规嵁clientIp鑾峰彇location淇℃伅
+ *
+ * @param ip client IP
+ * @return ip鍦板潃璇︾粏淇℃伅
+ */
+ static String getGeoIpDetail(IpLookup ipLookup, String ip) {
+
+ return ipLookup.cityLookupDetail(ip);
+ }
+
+ /**
+ * 鏍规嵁ip鑾峰彇asn淇℃伅
+ *
+ * @param ip client/server IP
+ * @return ASN
+ */
+ static String getGeoAsn(IpLookup ipLookup, String ip) {
+
+ return ipLookup.asnLookup(ip);
+ }
+
+ /**
+ * 鏍规嵁ip鑾峰彇country淇℃伅
+ *
+ * @param ip server IP
+ * @return 鍥藉
+ */
+ static String getGeoIpCountry(IpLookup ipLookup, String ip) {
+
+ return ipLookup.countryLookup(ip);
+ }
+
+
+ /**
+ * radius鍊熷姪HBase琛ラ綈
+ *
+ * @param ip client IP
+ * @return account
+ */
+ static String radiusMatch(String ip) {
+ String account = HBaseUtils.getAccount(ip.trim());
+ if (StringUtil.isBlank(account)) {
+ logger.warn("HashMap get Account is null,IP is :" + ip);
+ }
+ return account;
+ }
+
+ /**
+ * 瑙f瀽椤剁骇鍩熷悕
+ *
+ * @param domain 鍒濆鍩熷悕
+ * @return 椤剁骇鍩熷悕
+ */
+ static String getTopDomain(String domain) {
+ try {
+ return FormatUtils.getTopPrivateDomain(domain);
+ } catch (StringIndexOutOfBoundsException outException) {
+ logger.error("瑙f瀽椤剁骇鍩熷悕寮傚父,寮傚父鍩熷悕:" + domain, outException);
+ return "";
+ }
+ }
+
+ /**
+ * 鏍规嵁缂栫爜瑙g爜base64
+ *
+ * @param message base64
+ * @param charset 缂栫爜
+ * @return 瑙g爜瀛楃涓
+ */
+ static String decodeBase64(String message, String charset) {
+ String result = "";
+ try {
+ if (StringUtil.isNotBlank(message)) {
+ if (StringUtil.isNotBlank(charset)) {
+ result = Encodes.decodeBase64String(message, charset);
+ } else {
+ result = Encodes.decodeBase64String(message, FlowWriteConfig.MAIL_DEFAULT_CHARSET);
+ }
+ }
+ } catch (Exception e) {
+ logger.error("瑙f瀽 Base64 寮傚父,寮傚父淇℃伅:" + e);
+ }
+ return result;
+ }
+
+ /**
+ * 鏍规嵁琛ㄨ揪寮忚В鏋恓son
+ *
+ * @param message json
+ * @param expr 瑙f瀽琛ㄨ揪寮
+ * @return 瑙f瀽缁撴灉
+ */
+ static String flattenSpec(String message, String expr) {
+ String flattenResult = "";
+ try {
+ if (StringUtil.isNotBlank(expr)) {
+ ArrayList read = JsonPath.parse(message).read(expr);
+ flattenResult = read.get(0);
+ }
+ } catch (ClassCastException | InvalidPathException e) {
+ logger.error("璁惧鏍囩瑙f瀽寮傚父锛孾 " + expr + " ]瑙f瀽琛ㄨ揪寮忛敊璇", e);
+ }
+ return flattenResult;
+ }
+
+
+ /**
+ * 鍒ゆ柇鏄惁涓烘棩蹇楀瓧娈,鏄垯杩斿洖瀵瑰簲value锛屽惁鍒欒繑鍥炲師濮嬪瓧绗︿覆
+ *
+ * @param object 鍐呭瓨瀹炰綋绫
+ * @param param 瀛楁鍚/鏅氬瓧绗︿覆
+ * @return JSON.Value or String
+ */
+ static String isJsonValue(Object object, String param) {
+ if (param.contains(FlowWriteConfig.IS_JSON_KEY_TAG)) {
+ Object value = JsonParseUtil.getValue(object, param.substring(2));
+ if (value != null) {
+ return value.toString();
+ } else {
+ return "";
+ }
+ } else {
+ return param;
+ }
+ }
+
+ /**
+ * IF鍑芥暟瀹炵幇锛岃В鏋愭棩蹇楁瀯寤轰笁鐩繍绠
+ *
+ * @param object 鍐呭瓨瀹炰綋绫
+ * @param ifParam 瀛楁鍚/鏅氬瓧绗︿覆
+ * @return resultA or resultB or ""
+ */
+ static String condition(Object object, String ifParam) {
+ String result = "";
+ try {
+ String[] split = ifParam.split(FlowWriteConfig.FORMAT_SPLITTER);
+ String[] norms = split[0].split(FlowWriteConfig.IF_CONDITION_SPLITTER);
+ String direction = isJsonValue(object, norms[0]);
+ if (StringUtil.isNotBlank(direction)) {
+ if (split.length == FlowWriteConfig.IF_PARAM_LENGTH) {
+ String resultA = isJsonValue(object, split[1]);
+ String resultB = isJsonValue(object, split[2]);
+ result = (Integer.parseInt(direction) == Integer.parseInt(norms[1])) ? resultA : resultB;
+ }
+ }
+ } catch (Exception e) {
+ logger.error("IF 鍑芥暟鎵ц寮傚父,寮傚父淇℃伅:" + e);
+ e.printStackTrace();
+ }
+ return result;
+ }
+}
diff --git a/src/main/java/cn/ac/iie/utils/hbase/HBaseUtils.java b/src/main/java/cn/ac/iie/utils/hbase/HBaseUtils.java
new file mode 100644
index 0000000..5371b06
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/hbase/HBaseUtils.java
@@ -0,0 +1,210 @@
+package cn.ac.iie.utils.hbase;
+
+import cn.ac.iie.common.FlowWriteConfig;
+import com.zdjizhi.utils.StringUtil;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.log4j.Logger;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * HBase 宸ュ叿绫
+ *
+ * @author qidaijie
+ */
+
+public class HBaseUtils {
+ private final static Logger logger = Logger.getLogger(HBaseUtils.class);
+ private static Map subIdMap = new ConcurrentHashMap<>(83334);
+// private static Map subId/Map = new HashMap<>(83334);
+ private static Connection connection;
+ private static Long time;
+
+ private static String zookeeperIp;
+ private static String hBaseTable;
+
+ private static HBaseUtils hBaseUtils;
+
+ private static void getHBaseInstance() {
+ hBaseUtils = new HBaseUtils();
+ }
+
+
+ /**
+ * 鏋勯犲嚱鏁-鏂
+ */
+ private HBaseUtils() {
+ zookeeperIp = FlowWriteConfig.HBASE_ZOOKEEPER_SERVERS;
+ hBaseTable = FlowWriteConfig.HBASE_TABLE_NAME;
+ //鑾峰彇杩炴帴
+ getHBaseConn();
+ //鎷夊彇鎵鏈
+ getAll();
+ //瀹氭椂鏇存柊
+ updateHabaseCache();
+ }
+
+ private static void getHBaseConn() {
+ try {
+ // 绠$悊Hbase鐨勯厤缃俊鎭
+ Configuration configuration = HBaseConfiguration.create();
+ // 璁剧疆zookeeper鑺傜偣
+ configuration.set("hbase.zookeeper.quorum", zookeeperIp);
+ configuration.set("hbase.client.retries.number", "3");
+ configuration.set("hbase.bulkload.retries.number", "3");
+ configuration.set("zookeeper.recovery.retry", "3");
+ connection = ConnectionFactory.createConnection(configuration);
+ time = System.currentTimeMillis();
+ logger.warn("HBaseUtils get HBase connection,now to getAll().");
+// getAll();
+ } catch (IOException ioe) {
+ logger.error("HBaseUtils getHbaseConn() IOException===>{" + ioe + "}<===");
+ ioe.printStackTrace();
+ } catch (Exception e) {
+ logger.error("HBaseUtils getHbaseConn() Exception===>{" + e + "}<===");
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * 鏇存柊鍙橀噺
+ */
+ public static void change() {
+ if (hBaseUtils == null) {
+ getHBaseInstance();
+ }
+ long nowTime = System.currentTimeMillis();
+ timestampsFilter(time - 1000, nowTime + 500);
+ }
+
+
+ public static void main(String[] args) {
+ change();
+ }
+
+ /**
+ * 鑾峰彇鍙樻洿鍐呭
+ *
+ * @param startTime 寮濮嬫椂闂
+ * @param endTime 缁撴潫鏃堕棿
+ */
+ private static void timestampsFilter(Long startTime, Long endTime) {
+ Long begin = System.currentTimeMillis();
+ Table table = null;
+ ResultScanner scanner = null;
+ Scan scan2 = new Scan();
+ try {
+ table = connection.getTable(TableName.valueOf("sub:" + hBaseTable));
+ scan2.setTimeRange(startTime, endTime);
+ scanner = table.getScanner(scan2);
+ for (Result result : scanner) {
+ Cell[] cells = result.rawCells();
+ for (Cell cell : cells) {
+ String key = Bytes.toString(CellUtil.cloneRow(cell)).trim();
+ String value = Bytes.toString(CellUtil.cloneValue(cell)).trim();
+ if (subIdMap.containsKey(key)) {
+ if (!value.equals(subIdMap.get(key))) {
+ subIdMap.put(key, value);
+ }
+ } else {
+ subIdMap.put(key, value);
+ }
+ }
+ }
+ Long end = System.currentTimeMillis();
+ logger.warn("HBaseUtils Now subIdMap.keySet().size() is: " + subIdMap.keySet().size());
+ logger.warn("HBaseUtils Update cache timeConsuming is: " + (end - begin) + ",BeginTime: " + startTime + ",EndTime: " + endTime);
+ time = endTime;
+ } catch (IOException ioe) {
+ logger.error("HBaseUtils timestampsFilter is IOException===>{" + ioe + "}<===");
+ ioe.printStackTrace();
+ } catch (Exception e) {
+ logger.error("HBaseUtils timestampsFilter is Exception===>{" + e + "}<===");
+ e.printStackTrace();
+ } finally {
+ if (scanner != null) {
+ scanner.close();
+ }
+ if (table != null) {
+ try {
+ table.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ /**
+ * 鑾峰彇鎵鏈夌殑 key value
+ */
+ private static void getAll() {
+ long begin = System.currentTimeMillis();
+ try {
+ Table table = connection.getTable(TableName.valueOf("sub:" + hBaseTable));
+ Scan scan2 = new Scan();
+ ResultScanner scanner = table.getScanner(scan2);
+ for (Result result : scanner) {
+ Cell[] cells = result.rawCells();
+ for (Cell cell : cells) {
+ subIdMap.put(Bytes.toString(CellUtil.cloneRow(cell)), Bytes.toString(CellUtil.cloneValue(cell)));
+ }
+ }
+ logger.warn("HBaseUtils Get fullAmount List size->subIdMap.size(): " + subIdMap.size());
+ logger.warn("HBaseUtils Get fullAmount List size->subIdMap.size() timeConsuming is: " + (System.currentTimeMillis() - begin));
+ scanner.close();
+ } catch (IOException ioe) {
+ logger.error("HBaseUtils getAll() is IOException===>{" + ioe + "}<===");
+ ioe.printStackTrace();
+ } catch (Exception e) {
+ logger.error("HBaseUtils getAll() is Exception===>{" + e + "}<===");
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * 楠岃瘉瀹氭椂鍣,姣忛殧涓娈垫椂闂撮獙璇佷竴娆-楠岃瘉鑾峰彇鏂扮殑Cookie
+ */
+ private void updateHabaseCache() {
+ Timer timer = new Timer();
+ timer.scheduleAtFixedRate(new TimerTask() {
+ @Override
+ public void run() {
+ try {
+ change();
+ } catch (Exception e) {
+ logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
+ e.printStackTrace();
+ }
+ }
+ }, 1, 1000 * FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS);//璧峰1ms,浠ュ悗姣忛殧60s
+ }
+
+
+ /**
+ * 鑾峰彇 account
+ *
+ * @param clientIp client_ip
+ * @return account
+ */
+ public static String getAccount(String clientIp) {
+
+ if (hBaseUtils == null) {
+ getHBaseInstance();
+ }
+ return subIdMap.get(clientIp);
+
+ }
+
+}
diff --git a/src/main/java/cn/ac/iie/utils/http/HttpClientUtil.java b/src/main/java/cn/ac/iie/utils/http/HttpClientUtil.java
new file mode 100644
index 0000000..347a69b
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/http/HttpClientUtil.java
@@ -0,0 +1,55 @@
+package cn.ac.iie.utils.http;
+
+import org.apache.http.HttpEntity;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+/**
+ * 鑾峰彇缃戝叧schema鐨勫伐鍏风被
+ *
+ * @author qidaijie
+ */
+public class HttpClientUtil {
+
+ /**
+ * 璇锋眰缃戝叧鑾峰彇schema
+ * @param http 缃戝叧url
+ * @return schema
+ */
+ public static String requestByGetMethod(String http) {
+ CloseableHttpClient httpClient = HttpClients.createDefault();
+ StringBuilder entityStringBuilder = null;
+ try {
+ HttpGet get = new HttpGet(http);
+ try (CloseableHttpResponse httpResponse = httpClient.execute(get)) {
+ HttpEntity entity = httpResponse.getEntity();
+ entityStringBuilder = new StringBuilder();
+ if (null != entity) {
+ BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(httpResponse.getEntity().getContent(), "UTF-8"), 8 * 1024);
+ String line = null;
+ while ((line = bufferedReader.readLine()) != null) {
+ entityStringBuilder.append(line);
+ }
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ try {
+ if (httpClient != null) {
+ httpClient.close();
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ return entityStringBuilder.toString();
+ }
+
+}
diff --git a/src/main/java/cn/ac/iie/utils/influxdb/InfluxDbUtils.java b/src/main/java/cn/ac/iie/utils/influxdb/InfluxDbUtils.java
index 124344f..c51589b 100644
--- a/src/main/java/cn/ac/iie/utils/influxdb/InfluxDbUtils.java
+++ b/src/main/java/cn/ac/iie/utils/influxdb/InfluxDbUtils.java
@@ -45,6 +45,22 @@ public class InfluxDbUtils {
}
}
+ /**
+ * 璁板綍瀵瑰噯澶辫触娆℃暟-鍗冲唴瀛樹腑娌℃湁瀵瑰簲鐨刱ey
+ *
+ * @param failure 瀵瑰噯澶辫触閲
+ */
+ public static void sendHBaseFailure(int failure) {
+ if (failure != 0) {
+ InfluxDB client = InfluxDBFactory.connect(FlowWriteConfig.INFLUX_IP, FlowWriteConfig.INFLUX_USERNAME, FlowWriteConfig.INFLUX_PASSWORD);
+ Point point1 = Point.measurement("sendHBaseFailure")
+ .tag("topic", FlowWriteConfig.KAFKA_TOPIC)
+ .field("failure", failure)
+ .build();
+ client.write("BusinessMonitor", "", point1);
+ }
+ }
+
/**
* 鑾峰彇鏈満IP
*
diff --git a/src/main/java/cn/ac/iie/utils/json/JsonParseUtil.java b/src/main/java/cn/ac/iie/utils/json/JsonParseUtil.java
new file mode 100644
index 0000000..4b16a06
--- /dev/null
+++ b/src/main/java/cn/ac/iie/utils/json/JsonParseUtil.java
@@ -0,0 +1,240 @@
+package cn.ac.iie.utils.json;
+
+import cn.ac.iie.common.FlowWriteConfig;
+import cn.ac.iie.utils.http.HttpClientUtil;
+import com.alibaba.fastjson.JSON;
+import com.alibaba.fastjson.JSONArray;
+import com.alibaba.fastjson.JSONObject;
+import com.jayway.jsonpath.JsonPath;
+import com.zdjizhi.utils.StringUtil;
+import net.sf.cglib.beans.BeanGenerator;
+import net.sf.cglib.beans.BeanMap;
+import org.apache.log4j.Logger;
+import scala.annotation.meta.field;
+
+
+import java.util.*;
+
+/**
+ * 浣跨敤FastJson瑙f瀽json鐨勫伐鍏风被
+ *
+ * @author qidaijie
+ */
+public class JsonParseUtil {
+ private static Logger logger = Logger.getLogger(JsonParseUtil.class);
+
+ /**
+ * 妯″紡鍖归厤锛岀粰瀹氫竴涓被鍨嬪瓧绗︿覆杩斿洖涓涓被绫诲瀷
+ *
+ * @param type 绫诲瀷
+ * @return 绫荤被鍨
+ */
+
+ private static Class getClassName(String type) {
+ Class clazz;
+
+ switch (type) {
+ case "int":
+ clazz = Integer.class;
+ break;
+ case "String":
+ clazz = String.class;
+ break;
+ case "long":
+ clazz = long.class;
+ break;
+ case "Integer":
+ clazz = Integer.class;
+ break;
+ case "double":
+ clazz = double.class;
+ break;
+ case "float":
+ clazz = float.class;
+ break;
+ case "char":
+ clazz = char.class;
+ break;
+ case "byte":
+ clazz = byte.class;
+ break;
+ case "boolean":
+ clazz = boolean.class;
+ break;
+ case "short":
+ clazz = short.class;
+ break;
+ default:
+ clazz = String.class;
+ }
+ return clazz;
+ }
+
+ /**
+ * 鏍规嵁鍙嶅皠鐢熸垚瀵硅薄鐨勬柟娉
+ *
+ * @param properties 鍙嶅皠绫荤敤鐨刴ap
+ * @return 鐢熸垚鐨凮bject绫诲瀷鐨勫璞
+ */
+ public static Object generateObject(Map properties) {
+ BeanGenerator generator = new BeanGenerator();
+ Set keySet = properties.keySet();
+ for (Object aKeySet : keySet) {
+ String key = (String) aKeySet;
+ generator.addProperty(key, (Class) properties.get(key));
+ }
+ return generator.create();
+ }
+
+ /**
+ * 鑾峰彇灞炴у肩殑鏂规硶
+ *
+ * @param obj 瀵硅薄
+ * @param property key
+ * @return 灞炴х殑鍊
+ */
+ public static Object getValue(Object obj, String property) {
+ BeanMap beanMap = BeanMap.create(obj);
+ return beanMap.get(property);
+ }
+
+ /**
+ * 鏇存柊灞炴у肩殑鏂规硶
+ *
+ * @param obj 瀵硅薄
+ * @param property 鏇存柊鐨刱ey
+ * @param value 鏇存柊鐨勫
+ */
+ public static void setValue(Object obj, String property, Object value) {
+ try {
+ BeanMap beanMap = BeanMap.create(obj);
+ beanMap.put(property, value);
+ } catch (ClassCastException e) {
+ logger.error("璧嬩簣瀹炰綋绫婚敊璇被鍨嬫暟鎹", e);
+ }
+ }
+
+ /**
+ * 閫氳繃鑾峰彇String绫诲瀷鐨勭綉鍏硈chema閾炬帴鏉ヨ幏鍙杕ap锛岀敤浜庣敓鎴愪竴涓狾bject绫诲瀷鐨勫璞
+ *
+ * @param http 缃戝叧schema鍦板潃
+ * @return 鐢ㄤ簬鍙嶅皠鐢熸垚schema绫诲瀷鐨勫璞$殑涓涓猰ap闆嗗悎
+ */
+ public static HashMap getMapFromHttp(String http) {
+ HashMap map = new HashMap<>();
+
+ String schema = HttpClientUtil.requestByGetMethod(http);
+ Object data = JSON.parseObject(schema).get("data");
+
+ //鑾峰彇fields锛屽苟杞寲涓烘暟缁勶紝鏁扮粍鐨勬瘡涓厓绱犻兘鏄竴涓猲ame doc type
+ JSONObject schemaJson = JSON.parseObject(data.toString());
+ JSONArray fields = (JSONArray) schemaJson.get("fields");
+
+ for (Object field : fields) {
+ String filedStr = field.toString();
+ if (checkKeepField(filedStr)) {
+ String name = JsonPath.read(filedStr, "$.name").toString();
+ String type = JsonPath.read(filedStr, "$.type").toString();
+// boolean contains = type.contains("[");
+// if (contains) {
+// map.put(name, Integer.class);
+// } else {
+ //缁勫悎鐢ㄦ潵鐢熸垚瀹炰綋绫荤殑map
+ map.put(name, getClassName(type));
+// }
+ }
+ }
+ return map;
+ }
+
+ /**
+ * 鍒ゆ柇瀛楁鏄惁闇瑕佷繚鐣
+ *
+ * @param message 鍗曚釜field-json
+ * @return true or false
+ */
+ private static boolean checkKeepField(String message) {
+ boolean isKeepField = true;
+ boolean isHiveDoc = JSON.parseObject(message).containsKey("doc");
+ if (isHiveDoc) {
+ boolean isHiveVi = JsonPath.read(message, "$.doc").toString().contains("visibility");
+ if (isHiveVi) {
+ String visibility = JsonPath.read(message, "$.doc.visibility").toString();
+ if (FlowWriteConfig.VISIBILITY.equals(visibility)) {
+ isKeepField = false;
+ }
+ }
+ }
+ return isKeepField;
+ }
+
+ /**
+ * 鏍规嵁http閾炬帴鑾峰彇schema锛岃В鏋愪箣鍚庤繑鍥炰竴涓换鍔″垪琛 (useList toList funcList paramlist)
+ *
+ * @param http 缃戝叧url
+ * @return 浠诲姟鍒楄〃
+ */
+ public static ArrayList getJobListFromHttp(String http) {
+ ArrayList list = new ArrayList<>();
+
+ String schema = HttpClientUtil.requestByGetMethod(http);
+ //瑙f瀽data
+ Object data = JSON.parseObject(schema).get("data");
+
+ //鑾峰彇fields锛屽苟杞寲涓烘暟缁勶紝鏁扮粍鐨勬瘡涓厓绱犻兘鏄竴涓猲ame doc type
+ JSONObject schemaJson = JSON.parseObject(data.toString());
+ JSONArray fields = (JSONArray) schemaJson.get("fields");
+
+ for (Object field : fields) {
+
+ if (JSON.parseObject(field.toString()).containsKey("doc")) {
+ Object doc = JSON.parseObject(field.toString()).get("doc");
+
+ if (JSON.parseObject(doc.toString()).containsKey("format")) {
+ String name = JSON.parseObject(field.toString()).get("name").toString();
+ Object format = JSON.parseObject(doc.toString()).get("format");
+ JSONObject formatObject = JSON.parseObject(format.toString());
+
+ String functions = formatObject.get("functions").toString();
+ String appendTo = null;
+ String params = null;
+
+ if (formatObject.containsKey("appendTo")) {
+ appendTo = formatObject.get("appendTo").toString();
+ }
+
+ if (formatObject.containsKey("param")) {
+ params = formatObject.get("param").toString();
+ }
+
+
+ if (StringUtil.isNotBlank(appendTo) && StringUtil.isBlank(params)) {
+ String[] functionArray = functions.split(FlowWriteConfig.FORMAT_SPLITTER);
+ String[] appendToArray = appendTo.split(FlowWriteConfig.FORMAT_SPLITTER);
+
+ for (int i = 0; i < functionArray.length; i++) {
+ list.add(new String[]{name, appendToArray[i], functionArray[i], null});
+ }
+
+ } else if (StringUtil.isNotBlank(appendTo) && StringUtil.isNotBlank(params)) {
+ String[] functionArray = functions.split(FlowWriteConfig.FORMAT_SPLITTER);
+ String[] appendToArray = appendTo.split(FlowWriteConfig.FORMAT_SPLITTER);
+ String[] paramArray = params.split(FlowWriteConfig.FORMAT_SPLITTER);
+
+ for (int i = 0; i < functionArray.length; i++) {
+ list.add(new String[]{name, appendToArray[i], functionArray[i], paramArray[i]});
+
+ }
+ } else {
+ list.add(new String[]{name, name, functions, params});
+ }
+
+ }
+ }
+
+ }
+ return list;
+ }
+
+
+}
\ No newline at end of file
diff --git a/src/main/java/cn/ac/iie/utils/kafka/KafkaLogNtc.java b/src/main/java/cn/ac/iie/utils/kafka/KafkaLogSend.java
similarity index 58%
rename from src/main/java/cn/ac/iie/utils/kafka/KafkaLogNtc.java
rename to src/main/java/cn/ac/iie/utils/kafka/KafkaLogSend.java
index 11ae57a..26e2e43 100644
--- a/src/main/java/cn/ac/iie/utils/kafka/KafkaLogNtc.java
+++ b/src/main/java/cn/ac/iie/utils/kafka/KafkaLogSend.java
@@ -1,11 +1,10 @@
package cn.ac.iie.utils.kafka;
import cn.ac.iie.common.FlowWriteConfig;
-import cn.ac.iie.utils.influxdb.InfluxDbUtils;
+import cn.ac.iie.common.KafkaProConfig;
import org.apache.kafka.clients.producer.*;
import org.apache.log4j.Logger;
-import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
@@ -16,8 +15,8 @@ import java.util.Properties;
* @create 2018-08-13 15:11
*/
-public class KafkaLogNtc {
- private static Logger logger = Logger.getLogger(KafkaLogNtc.class);
+public class KafkaLogSend {
+ private static Logger logger = Logger.getLogger(KafkaLogSend.class);
/**
* kafka鐢熶骇鑰咃紝鐢ㄤ簬鍚慿afka涓彂閫佹秷鎭
@@ -27,17 +26,17 @@ public class KafkaLogNtc {
/**
* kafka鐢熶骇鑰呴傞厤鍣紙鍗曚緥锛夛紝鐢ㄦ潵浠g悊kafka鐢熶骇鑰呭彂閫佹秷鎭
*/
- private static KafkaLogNtc kafkaLogNtc;
+ private static KafkaLogSend kafkaLogSend;
- private KafkaLogNtc() {
+ private KafkaLogSend() {
initKafkaProducer();
}
- public static KafkaLogNtc getInstance() {
- if (kafkaLogNtc == null) {
- kafkaLogNtc = new KafkaLogNtc();
+ public static KafkaLogSend getInstance() {
+ if (kafkaLogSend == null) {
+ kafkaLogSend = new KafkaLogSend();
}
- return kafkaLogNtc;
+ return kafkaLogSend;
}
@@ -59,7 +58,7 @@ public class KafkaLogNtc {
}
}
kafkaProducer.flush();
- logger.warn("Log sent to National Center successfully!!!!!");
+ logger.debug("Log sent to National Center successfully!!!!!");
}
/**
@@ -67,15 +66,25 @@ public class KafkaLogNtc {
*/
private void initKafkaProducer() {
Properties properties = new Properties();
- properties.put("bootstrap.servers", FlowWriteConfig.BOOTSTRAP_SERVERS);
+ properties.put("bootstrap.servers", FlowWriteConfig.OUTPUT_KAFKA_SERVERS);
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
- properties.put("acks", "1");
- properties.put("linger.ms", "2");
- properties.put("request.timeout.ms", 60000);
- properties.put("batch.size", 262144);
- properties.put("buffer.memory", 33554432);
- properties.put("compression.type", "snappy");
+ properties.put("acks", FlowWriteConfig.PRODUCER_ACK);
+ properties.put("retries", KafkaProConfig.RETRIES);
+ properties.put("linger.ms", KafkaProConfig.LINGER_MS);
+ properties.put("request.timeout.ms", KafkaProConfig.REQUEST_TIMEOUT_MS);
+ properties.put("batch.size", KafkaProConfig.BATCH_SIZE);
+ properties.put("buffer.memory", KafkaProConfig.BUFFER_MEMORY);
+ properties.put("max.request.size", KafkaProConfig.MAX_REQUEST_SIZE);
+// properties.put("compression.type", FlowWriteConfig.KAFKA_COMPRESSION_TYPE);
+
+ /**
+ * kafka闄愭祦閰嶇疆-20201117
+ */
+ properties.put(ProducerConfig.CLIENT_ID_CONFIG, FlowWriteConfig.PRODUCER_CLIENT_ID);
+ properties.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, FlowWriteConfig.PRODUCER_KAFKA_COMPRESSION_TYPE);
+
+
kafkaProducer = new KafkaProducer<>(properties);
}
diff --git a/src/main/java/cn/ac/iie/utils/redis/RedisClusterUtils.java b/src/main/java/cn/ac/iie/utils/redis/RedisClusterUtils.java
deleted file mode 100644
index e7f67d9..0000000
--- a/src/main/java/cn/ac/iie/utils/redis/RedisClusterUtils.java
+++ /dev/null
@@ -1,79 +0,0 @@
-package cn.ac.iie.utils.redis;
-
-import cn.ac.iie.common.FlowWriteConfig;
-import org.apache.log4j.Logger;
-import redis.clients.jedis.HostAndPort;
-import redis.clients.jedis.JedisCluster;
-import redis.clients.jedis.JedisPoolConfig;
-
-import java.io.IOException;
-import java.util.LinkedHashSet;
-import java.util.Properties;
-import java.util.Set;
-
-/**
- * 棰勭敤浜庡鍑咺P瀵瑰簲鐨勭敤鎴峰悕鐨 Redis杩炴帴姹
- *
- * @author my
- * @date 2018-07-04
- */
-public final class RedisClusterUtils {
- private static final Logger logger = Logger.getLogger(RedisClusterUtils.class);
- private static JedisCluster jedisCluster;
- private static Properties props = new Properties();
-
- static {
- try {
- String redisConfigFile = "redis_config.properties";
- props.load(RedisClusterUtils.class.getClassLoader().getResourceAsStream(redisConfigFile));
- } catch (IOException e) {
- props = null;
- logger.error("鍔犺浇Redis閰嶇疆鏂囦欢澶辫触锛", e);
- }
- }
-
- /**
- * 涓嶅厑璁搁氳繃new鍒涘缓璇ョ被鐨勫疄渚
- */
- private RedisClusterUtils() {
- }
-
- /**
- * 鍒濆鍖朢edis杩炴帴姹
- */
- private static JedisCluster getJedisCluster() {
- if (jedisCluster == null) {
- JedisPoolConfig poolConfig = new JedisPoolConfig();
- poolConfig.setMaxTotal(Integer.valueOf(props.getProperty(FlowWriteConfig.REDIS_POOL_MAXACTIVE)));
- poolConfig.setMaxIdle(Integer.valueOf(props.getProperty(FlowWriteConfig.REDIS_POOL_MAXIDLE)));
- poolConfig.setMaxWaitMillis(Long.valueOf(props.getProperty(FlowWriteConfig.REDIS_POOL_MAXWAIT)));
- poolConfig.setTestOnReturn(Boolean.valueOf(props.getProperty(FlowWriteConfig.REDIS_POOL_TESTONRETURN)));
- poolConfig.setTestOnBorrow(Boolean.valueOf(props.getProperty(FlowWriteConfig.REDIS_POOL_TESTONBORROW)));
- Set nodes = new LinkedHashSet();
- for (String port : props.getProperty(FlowWriteConfig.REDIS_PORT).split(FlowWriteConfig.SEGMENTATION)) {
- for (String ip : props.getProperty(FlowWriteConfig.REDIS_IP).split(FlowWriteConfig.SEGMENTATION)) {
- nodes.add(new HostAndPort(ip, Integer.parseInt(port)));
- }
- }
- jedisCluster = new JedisCluster(nodes, poolConfig);
- }
- return jedisCluster;
- }
-
- /**
- * 鑾峰彇鐢ㄦ埛鍚
- *
- * @param key service_ip
- * @return Subscribe_id
- */
- public static String get(String key) {
- String s = key.split("\\.")[0];
- if (!FlowWriteConfig.CHECK_IP_SCOPE.contains(s)) {
- jedisCluster = getJedisCluster();
- return jedisCluster.get(key);
- }
- return "";
- }
-
-
-}
diff --git a/src/main/java/cn/ac/iie/utils/redis/RedisPollUtils.java b/src/main/java/cn/ac/iie/utils/redis/RedisPollUtils.java
deleted file mode 100644
index 378bef5..0000000
--- a/src/main/java/cn/ac/iie/utils/redis/RedisPollUtils.java
+++ /dev/null
@@ -1,115 +0,0 @@
-package cn.ac.iie.utils.redis;
-
-import cn.ac.iie.common.FlowWriteConfig;
-import com.zdjizhi.utils.StringUtil;
-import org.apache.commons.lang3.RandomUtils;
-import org.apache.log4j.Logger;
-import redis.clients.jedis.Jedis;
-import redis.clients.jedis.JedisPool;
-import redis.clients.jedis.JedisPoolConfig;
-
-import java.util.Properties;
-
-/**
- * @author qidaijie
- */
-public class RedisPollUtils {
- private static final Logger logger = Logger.getLogger(RedisPollUtils.class);
- private static JedisPool jedisPool = null;
- private static Properties props = new Properties();
-
-
- private RedisPollUtils() {
- }
-
- static {
- initialPool();
-
- }
-
- /**
- * 鍒濆鍖朢edis杩炴帴姹
- */
- private static void initialPool() {
- try {
- //鍔犺浇杩炴帴姹犻厤缃枃浠
- props.load(RedisPollUtils.class.getClassLoader().getResourceAsStream("redis_config.properties"));
- // 鍒涘缓jedis姹犻厤缃疄渚
- JedisPoolConfig poolConfig = new JedisPoolConfig();
- poolConfig.setMaxTotal(Integer.valueOf(props.getProperty(FlowWriteConfig.REDIS_POOL_MAXACTIVE)));
- poolConfig.setMaxIdle(Integer.valueOf(props.getProperty(FlowWriteConfig.REDIS_POOL_MAXIDLE)));
- poolConfig.setMaxWaitMillis(Long.valueOf(props.getProperty(FlowWriteConfig.REDIS_POOL_MAXWAIT)));
- poolConfig.setTestOnReturn(Boolean.valueOf(props.getProperty(FlowWriteConfig.REDIS_POOL_TESTONRETURN)));
- poolConfig.setTestOnBorrow(Boolean.valueOf(props.getProperty(FlowWriteConfig.REDIS_POOL_TESTONBORROW)));
- // 鏍规嵁閰嶇疆瀹炰緥鍖杍edis姹
- jedisPool = new JedisPool(poolConfig, props.getProperty(FlowWriteConfig.REDIS_IP),
- Integer.valueOf(props.getProperty(FlowWriteConfig.REDIS_PORT)));
- } catch (Exception e) {
- logger.error("Redis杩炴帴姹犲垵濮嬪寲閿欒", e);
- }
- }
-
- /**
- * 鑾峰彇Jedis瀹炰緥
- *
- * @return Jedis瀹炰緥
- */
- public static Jedis getJedis() {
- Jedis jedis = null;
- try {
- if (jedisPool == null) {
- initialPool();
- }
- jedis = jedisPool.getResource();
- } catch (Exception e) {
- logger.error("Redis杩炴帴姹犻敊璇,鏃犳硶鑾峰彇杩炴帴", e);
- }
- return jedis;
- }
-
-// /**
-// * @param key redis key
-// * @return value
-// */
-// public static Integer getWorkerId(String key) {
-// int workId = 0;
-// int maxId = 32;
-// try (Jedis jedis = RedisPollUtils.getJedis()) {
-// if (jedis != null) {
-// String work = jedis.get(key);
-// if (StringUtil.isBlank(work)) {
-// jedis.set(key, "0");
-// } else {
-// workId = Integer.parseInt(work);
-// }
-// if (workId < maxId) {
-// jedis.set(key, String.valueOf(workId + 1));
-// } else {
-// workId = 0;
-// jedis.set(key, "1");
-// }
-// }
-// } catch (Exception e) {
-// logger.error("閫氳繃Redis鑾峰彇鐢ㄦ埛鍚嶅嚭鐜板紓甯", e);
-// workId = RandomUtils.nextInt(0, 31);
-// }
-// return workId;
-// }
-
- public static Integer getWorkerId(String key) {
- int workId = 0;
- try (Jedis jedis = RedisPollUtils.getJedis()) {
- if (jedis != null) {
- workId = Integer.parseInt(jedis.get(key));
- jedis.set(key, String.valueOf(workId + 2));
- logger.error("\n宸ヤ綔id鏄細" + workId + "\n");
- }
- } catch (Exception e) {
- logger.error("閫氳繃Redis鑾峰彇鐢ㄦ埛鍚嶅嚭鐜板紓甯", e);
- workId = RandomUtils.nextInt(0, 31);
- }
- return workId;
- }
-
-
-}
diff --git a/src/main/java/cn/ac/iie/utils/system/FlowWriteConfigurations.java b/src/main/java/cn/ac/iie/utils/system/FlowWriteConfigurations.java
index 273a5f8..3078b66 100644
--- a/src/main/java/cn/ac/iie/utils/system/FlowWriteConfigurations.java
+++ b/src/main/java/cn/ac/iie/utils/system/FlowWriteConfigurations.java
@@ -9,15 +9,15 @@ import java.util.Properties;
public final class FlowWriteConfigurations {
- // private static Properties propCommon = new Properties();
+ private static Properties propKafka = new Properties();
private static Properties propService = new Properties();
public static String getStringProperty(Integer type, String key) {
if (type == 0) {
return propService.getProperty(key);
-// } else if (type == 1) {
-// return propCommon.getProperty(key);
+ } else if (type == 1) {
+ return propKafka.getProperty(key);
} else {
return null;
}
@@ -27,8 +27,8 @@ public final class FlowWriteConfigurations {
public static Integer getIntProperty(Integer type, String key) {
if (type == 0) {
return Integer.parseInt(propService.getProperty(key));
-// } else if (type == 1) {
-// return Integer.parseInt(propCommon.getProperty(key));
+ } else if (type == 1) {
+ return Integer.parseInt(propKafka.getProperty(key));
} else {
return null;
}
@@ -37,8 +37,8 @@ public final class FlowWriteConfigurations {
public static Long getLongProperty(Integer type, String key) {
if (type == 0) {
return Long.parseLong(propService.getProperty(key));
-// } else if (type == 1) {
-// return Long.parseLong(propCommon.getProperty(key));
+ } else if (type == 1) {
+ return Long.parseLong(propKafka.getProperty(key));
} else {
return null;
}
@@ -47,8 +47,8 @@ public final class FlowWriteConfigurations {
public static Boolean getBooleanProperty(Integer type, String key) {
if (type == 0) {
return "true".equals(propService.getProperty(key).toLowerCase().trim());
-// } else if (type == 1) {
-// return "true".equals(propCommon.getProperty(key).toLowerCase().trim());
+ } else if (type == 1) {
+ return "true".equals(propKafka.getProperty(key).toLowerCase().trim());
} else {
return null;
}
@@ -57,8 +57,9 @@ public final class FlowWriteConfigurations {
static {
try {
propService.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("service_flow_config.properties"));
+ propKafka.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("kafka_config.properties"));
} catch (Exception e) {
-// propCommon = null;
+ propKafka = null;
propService = null;
}
}
diff --git a/src/main/java/cn/ac/iie/utils/zookeeper/DistributedLock.java b/src/main/java/cn/ac/iie/utils/zookeeper/DistributedLock.java
index 15f4506..6df8c4b 100644
--- a/src/main/java/cn/ac/iie/utils/zookeeper/DistributedLock.java
+++ b/src/main/java/cn/ac/iie/utils/zookeeper/DistributedLock.java
@@ -1,7 +1,5 @@
package cn.ac.iie.utils.zookeeper;
-import cn.ac.iie.common.FlowWriteConfig;
-import cn.ac.iie.utils.system.SnowflakeId;
import org.apache.log4j.Logger;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.Stat;
@@ -188,32 +186,4 @@ public class DistributedLock implements Lock, Watcher {
}
}
- public static void main(String[] args) {
- ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
-
- Runnable runnable = new Runnable() {
- @Override
- public void run() {
- DistributedLock lock = null;
- try {
- lock = new DistributedLock(FlowWriteConfig.ZOOKEEPER_SERVERS, "disLocks1");
- lock.lock();
-// System.out.println(SnowflakeId.generateId());
- System.out.println(1);
- Thread.sleep(3000);
- } catch (InterruptedException e) {
- e.printStackTrace();
- } finally {
- if (lock != null) {
- lock.unlock();
- }
- }
- }
- };
-
- for (int i = 0; i < 10; i++) {
- Thread t = new Thread(runnable);
- t.start();
- }
- }
}
\ No newline at end of file
diff --git a/src/main/java/cn/ac/iie/utils/zookeeper/ZookeeperUtils.java b/src/main/java/cn/ac/iie/utils/zookeeper/ZookeeperUtils.java
index 639b50c..e81885a 100644
--- a/src/main/java/cn/ac/iie/utils/zookeeper/ZookeeperUtils.java
+++ b/src/main/java/cn/ac/iie/utils/zookeeper/ZookeeperUtils.java
@@ -1,7 +1,5 @@
package cn.ac.iie.utils.zookeeper;
-import cn.ac.iie.common.FlowWriteConfig;
-import org.apache.commons.lang3.RandomUtils;
import org.apache.log4j.Logger;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.ACL;
@@ -13,9 +11,12 @@ import java.util.concurrent.CountDownLatch;
/**
* @author qidaijie
+ * @Package cn.ac.iie.utils.zookeeper
+ * @Description:
+ * @date 2020/11/1411:28
*/
public class ZookeeperUtils implements Watcher {
- private static Logger logger = Logger.getLogger(ZookeeperUtils.class);
+ private static Logger logger = Logger.getLogger(com.zdjizhi.utils.ZookeeperUtils.class);
private ZooKeeper zookeeper;
@@ -25,7 +26,7 @@ public class ZookeeperUtils implements Watcher {
@Override
public void process(WatchedEvent event) {
- if (event.getState() == Watcher.Event.KeeperState.SyncConnected) {
+ if (event.getState() == Event.KeeperState.SyncConnected) {
countDownLatch.countDown();
}
}
@@ -36,15 +37,14 @@ public class ZookeeperUtils implements Watcher {
*
* @param path 鑺傜偣璺緞
*/
- public int modifyNode(String path) {
- createNode("/Snowflake", null, ZooDefs.Ids.OPEN_ACL_UNSAFE);
- createNode("/Snowflake/" + FlowWriteConfig.KAFKA_TOPIC, "0".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE);
- int workerId;
+ public int modifyNode(String path, String zookeeperIp) {
+ createNode(path, "0".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, zookeeperIp);
+ int workerId = 0;
try {
- connectZookeeper();
+ connectZookeeper(zookeeperIp);
Stat stat = zookeeper.exists(path, true);
workerId = Integer.parseInt(getNodeDate(path));
- if (workerId > 55) {
+ if (workerId > 63) {
workerId = 0;
zookeeper.setData(path, "1".getBytes(), stat.getVersion());
} else {
@@ -56,22 +56,22 @@ public class ZookeeperUtils implements Watcher {
}
}
} catch (KeeperException | InterruptedException e) {
- e.printStackTrace();
- workerId = RandomUtils.nextInt(56, 63);
+ logger.error("modify error Can't modify," + e.getMessage());
} finally {
closeConn();
}
- logger.error("宸ヤ綔ID鏄細" + workerId);
+ logger.warn("workerID is锛" + workerId);
return workerId;
}
/**
* 杩炴帴zookeeper
*
+ * @param host 鍦板潃
*/
- private void connectZookeeper() {
+ public void connectZookeeper(String host) {
try {
- zookeeper = new ZooKeeper(FlowWriteConfig.ZOOKEEPER_SERVERS, SESSION_TIME_OUT, this);
+ zookeeper = new ZooKeeper(host, SESSION_TIME_OUT, this);
countDownLatch.await();
} catch (IOException | InterruptedException e) {
e.printStackTrace();
@@ -81,7 +81,7 @@ public class ZookeeperUtils implements Watcher {
/**
* 鍏抽棴杩炴帴
*/
- private void closeConn() {
+ public void closeConn() {
try {
if (zookeeper != null) {
zookeeper.close();
@@ -97,7 +97,7 @@ public class ZookeeperUtils implements Watcher {
* @param path 鑺傜偣璺緞
* @return 鍐呭/寮傚父null
*/
- private String getNodeDate(String path) {
+ public String getNodeDate(String path) {
String result = null;
Stat stat = new Stat();
try {
@@ -115,14 +115,18 @@ public class ZookeeperUtils implements Watcher {
* @param date 鑺傜偣鎵瀛樺偍鐨勬暟鎹殑byte[]
* @param acls 鎺у埗鏉冮檺绛栫暐
*/
- private void createNode(String path, byte[] date, List acls) {
+ public void createNode(String path, byte[] date, List acls, String zookeeperIp) {
try {
- connectZookeeper();
+ connectZookeeper(zookeeperIp);
Stat exists = zookeeper.exists(path, true);
if (exists == null) {
+ Stat existsSnowflakeld = zookeeper.exists("/Snowflake", true);
+ if (existsSnowflakeld == null) {
+ zookeeper.create("/Snowflake", null, acls, CreateMode.PERSISTENT);
+ }
zookeeper.create(path, date, acls, CreateMode.PERSISTENT);
} else {
- logger.warn("Node already exists!,Don't need to create");
+ logger.warn("Node already exists ! Don't need to create");
}
} catch (KeeperException | InterruptedException e) {
e.printStackTrace();
@@ -130,5 +134,4 @@ public class ZookeeperUtils implements Watcher {
closeConn();
}
}
-
}
diff --git a/src/main/java/log4j.properties b/src/main/java/log4j.properties
deleted file mode 100644
index c355401..0000000
--- a/src/main/java/log4j.properties
+++ /dev/null
@@ -1,23 +0,0 @@
-#Log4j
-log4j.rootLogger=info,console,file
-# 鎺у埗鍙版棩蹇楄缃
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.Threshold=info
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
-
-# 鏂囦欢鏃ュ織璁剧疆
-log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.file.Threshold=error
-log4j.appender.file.encoding=UTF-8
-log4j.appender.file.Append=true
-#璺緞璇风敤鐩稿璺緞锛屽仛濂界浉鍏虫祴璇曡緭鍑哄埌搴旂敤鐩笅
-log4j.appender.file.file=galaxy-name.log
-log4j.appender.file.DatePattern='.'yyyy-MM-dd
-log4j.appender.file.layout=org.apache.log4j.PatternLayout
-#log4j.appender.file.layout.ConversionPattern=%d{HH:mm:ss} %X{ip} [%t] %5p %c{1} %m%n
-log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] %X{ip} [Thread\:%t] %l %x - %m%n
-#MyBatis 閰嶇疆锛宑om.nis.web.dao鏄痬ybatis鎺ュ彛鎵鍦ㄥ寘
-log4j.logger.com.nis.web.dao=debug
-#bonecp鏁版嵁婧愰厤缃
-log4j.category.com.jolbox=debug,console
\ No newline at end of file
diff --git a/src/main/main.iml b/src/main/main.iml
new file mode 100644
index 0000000..5fdae01
--- /dev/null
+++ b/src/main/main.iml
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/test/java/cn/ac/iie/test/DistributedLock.java b/src/test/java/cn/ac/iie/test/DistributedLock.java
deleted file mode 100644
index 030bf4a..0000000
--- a/src/test/java/cn/ac/iie/test/DistributedLock.java
+++ /dev/null
@@ -1,200 +0,0 @@
-package cn.ac.iie.test;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.locks.Condition;
-import java.util.concurrent.locks.Lock;
-
-import cn.ac.iie.common.FlowWriteConfig;
-import cn.ac.iie.utils.zookeeper.ZookeeperUtils;
-import org.apache.log4j.Logger;
-import org.apache.zookeeper.*;
-import org.apache.zookeeper.data.Stat;
-
-public class DistributedLock implements Lock, Watcher {
- private static Logger logger = Logger.getLogger(DistributedLock.class);
-
- private ZooKeeper zk = null;
- // 鏍硅妭鐐
- private String ROOT_LOCK = "/locks";
- // 绔炰簤鐨勮祫婧
- private String lockName;
- // 绛夊緟鐨勫墠涓涓攣
- private String WAIT_LOCK;
- // 褰撳墠閿
- private String CURRENT_LOCK;
- // 璁℃暟鍣
- private CountDownLatch countDownLatch;
- private int sessionTimeout = 30000;
- private List exceptionList = new ArrayList();
-
- /**
- * 閰嶇疆鍒嗗竷寮忛攣
- *
- * @param config 杩炴帴鐨剈rl
- * @param lockName 绔炰簤璧勬簮
- */
- public DistributedLock(String config, String lockName) {
- this.lockName = lockName;
- try {
- // 杩炴帴zookeeper
- zk = new ZooKeeper(config, sessionTimeout, this);
- Stat stat = zk.exists(ROOT_LOCK, false);
- if (stat == null) {
- // 濡傛灉鏍硅妭鐐逛笉瀛樺湪锛屽垯鍒涘缓鏍硅妭鐐
- zk.create(ROOT_LOCK, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
- }
- } catch (IOException | InterruptedException | KeeperException e) {
- logger.error("Node already exists!");
- }
- }
-
- // 鑺傜偣鐩戣鍣
- public void process(WatchedEvent event) {
- if (this.countDownLatch != null) {
- this.countDownLatch.countDown();
- }
- }
-
- public void lock() {
- if (exceptionList.size() > 0) {
- throw new LockException(exceptionList.get(0));
- }
- try {
- if (this.tryLock()) {
-// System.out.println(Thread.currentThread().getName() + " " + lockName + "鑾峰緱浜嗛攣");
-// ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
-// zookeeperUtils.modifyNode("/Snowflake/" + FlowWriteConfig.KAFKA_TOPIC);
- } else {
- // 绛夊緟閿
- waitForLock(WAIT_LOCK, sessionTimeout);
- }
- } catch (InterruptedException | KeeperException e) {
- e.printStackTrace();
- }
- }
-
- public boolean tryLock() {
- try {
- String splitStr = "_lock_";
- if (lockName.contains(splitStr)) {
- throw new LockException("閿佸悕鏈夎");
- }
- // 鍒涘缓涓存椂鏈夊簭鑺傜偣
- CURRENT_LOCK = zk.create(ROOT_LOCK + "/" + lockName + splitStr, new byte[0],
- ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL);
-// System.out.println(CURRENT_LOCK + " 宸茬粡鍒涘缓");
- // 鍙栨墍鏈夊瓙鑺傜偣
- List subNodes = zk.getChildren(ROOT_LOCK, false);
- // 鍙栧嚭鎵鏈塴ockName鐨勯攣
- List lockObjects = new ArrayList();
- for (String node : subNodes) {
- String _node = node.split(splitStr)[0];
- if (_node.equals(lockName)) {
- lockObjects.add(node);
- }
- }
- Collections.sort(lockObjects);
-// System.out.println(Thread.currentThread().getName() + " 鐨勯攣鏄 " + CURRENT_LOCK);
- // 鑻ュ綋鍓嶈妭鐐逛负鏈灏忚妭鐐癸紝鍒欒幏鍙栭攣鎴愬姛
- if (CURRENT_LOCK.equals(ROOT_LOCK + "/" + lockObjects.get(0))) {
- return true;
- }
- // 鑻ヤ笉鏄渶灏忚妭鐐癸紝鍒欐壘鍒拌嚜宸辩殑鍓嶄竴涓妭鐐
- String prevNode = CURRENT_LOCK.substring(CURRENT_LOCK.lastIndexOf("/") + 1);
- WAIT_LOCK = lockObjects.get(Collections.binarySearch(lockObjects, prevNode) - 1);
- } catch (InterruptedException | KeeperException e) {
- e.printStackTrace();
- }
- return false;
- }
-
-
- @Override
- public boolean tryLock(long timeout, TimeUnit unit) {
- try {
- if (this.tryLock()) {
- return true;
- }
- return waitForLock(WAIT_LOCK, timeout);
- } catch (Exception e) {
- e.printStackTrace();
- }
- return false;
- }
-
- // 绛夊緟閿
- private boolean waitForLock(String prev, long waitTime) throws KeeperException, InterruptedException {
- Stat stat = zk.exists(ROOT_LOCK + "/" + prev, true);
-
- if (stat != null) {
-// System.out.println(Thread.currentThread().getName() + "绛夊緟閿 " + ROOT_LOCK + "/" + prev);
- this.countDownLatch = new CountDownLatch(1);
- // 璁℃暟绛夊緟锛岃嫢绛夊埌鍓嶄竴涓妭鐐规秷澶憋紝鍒檖recess涓繘琛宑ountDown锛屽仠姝㈢瓑寰咃紝鑾峰彇閿
- this.countDownLatch.await(waitTime, TimeUnit.MILLISECONDS);
- this.countDownLatch = null;
-// System.out.println(Thread.currentThread().getName() + " 绛夊埌浜嗛攣");
- }
- return true;
- }
-
- public void unlock() {
- try {
-// System.out.println("閲婃斁閿 " + CURRENT_LOCK);
- zk.delete(CURRENT_LOCK, -1);
- CURRENT_LOCK = null;
- zk.close();
- } catch (InterruptedException | KeeperException e) {
- e.printStackTrace();
- }
- }
-
- public Condition newCondition() {
- return null;
- }
-
- public void lockInterruptibly() throws InterruptedException {
- this.lock();
- }
-
-
- public class LockException extends RuntimeException {
- private static final long serialVersionUID = 1L;
-
- public LockException(String e) {
- super(e);
- }
-
- public LockException(Exception e) {
- super(e);
- }
- }
-
- public static void main(String[] args) {
- ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
-
- Runnable runnable = new Runnable() {
- public void run() {
- DistributedLock lock = null;
- try {
- lock = new DistributedLock(FlowWriteConfig.ZOOKEEPER_SERVERS, "disLocks1");
- lock.lock();
- zookeeperUtils.modifyNode("/Snowflake/" + FlowWriteConfig.KAFKA_TOPIC);
- } finally {
- if (lock != null) {
- lock.unlock();
- }
- }
- }
- };
-
- for (int i = 0; i < 10; i++) {
- Thread t = new Thread(runnable);
- t.start();
- }
- }
-}
\ No newline at end of file
diff --git a/src/test/java/cn/ac/iie/test/DomainTest.java b/src/test/java/cn/ac/iie/test/DomainTest.java
new file mode 100644
index 0000000..b4584e2
--- /dev/null
+++ b/src/test/java/cn/ac/iie/test/DomainTest.java
@@ -0,0 +1,19 @@
+package cn.ac.iie.test;
+
+import com.zdjizhi.utils.FormatUtils;
+import org.junit.Test;
+
+/**
+ * @author qidaijie
+ * @Package com.zdjizhi.flume
+ * @Description:
+ * @date 2020/11/2212:06
+ */
+public class DomainTest {
+
+ @Test
+ public void getDomainTest() {
+ String url = "array808.prod.do.dsp.mp.microsoft.com";
+ System.out.println(FormatUtils.getTopPrivateDomain(url));
+ }
+}
diff --git a/src/test/java/cn/ac/iie/test/DomainUtils.java b/src/test/java/cn/ac/iie/test/DomainUtils.java
deleted file mode 100644
index e7bdf78..0000000
--- a/src/test/java/cn/ac/iie/test/DomainUtils.java
+++ /dev/null
@@ -1,37 +0,0 @@
-package cn.ac.iie.test;
-
-import com.zdjizhi.utils.StringUtil;
-
-import javax.xml.bind.SchemaOutputResolver;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class DomainUtils {
-
- private static Pattern pattern = Pattern.compile("[^\\\\.]+(\\.com\\.cn|\\.net\\.cn|\\.org\\.cn|\\.gov\\.cn|\\.com|\\.net|\\.cn|\\.org|\\.cc|\\.me|\\.tel|\\.mobi|\\.asia|\\.biz|\\.info|\\.name|\\.tv|\\.hk|\\.鍏徃|\\.涓浗|\\.缃戠粶)");
-
- public static void main(String[] args) {
- System.out.println(getTopDomain("agoo-report.m.taobao.com"));
-
- }
-
- private static String getTopDomain(String url) {
-// try {
- //鑾峰彇鍊艰浆鎹负灏忓啓
-// String host = new URL(url).getHost().toLowerCase();//news.hexun.com
-// Pattern pattern = Pattern.compile("[^\\\\.]+(\\.com\\.cn|\\.net\\.cn|\\.org\\.cn|\\.gov\\.cn|\\.com|\\.net|\\.cn|\\.org|\\.cc|\\.me|\\.tel|\\.mobi|\\.asia|\\.biz|\\.info|\\.name|\\.tv|\\.hk|\\.鍏徃|\\.涓浗|\\.缃戠粶)");
- Matcher matcher = pattern.matcher(url);
- if (matcher.find()){
- return matcher.group();
- }
-// } catch (MalformedURLException e) {
-// e.printStackTrace();
-// }
- return null;
- }
-
-}
diff --git a/src/test/java/cn/ac/iie/test/SnowflakeIdWorker.java b/src/test/java/cn/ac/iie/test/SnowflakeIdWorker.java
deleted file mode 100644
index 470284a..0000000
--- a/src/test/java/cn/ac/iie/test/SnowflakeIdWorker.java
+++ /dev/null
@@ -1,182 +0,0 @@
-package cn.ac.iie.test;
-
-/**
- * Twitter_Snowflake
- * SnowFlake鐨勭粨鏋勫涓(姣忛儴鍒嗙敤-鍒嗗紑):
- * 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000
- * 1浣嶆爣璇嗭紝鐢变簬long鍩烘湰绫诲瀷鍦↗ava涓槸甯︾鍙风殑锛屾渶楂樹綅鏄鍙蜂綅锛屾鏁版槸0锛岃礋鏁版槸1锛屾墍浠d涓鑸槸姝f暟锛屾渶楂樹綅鏄0
- * 41浣嶆椂闂存埅(姣绾)锛屾敞鎰忥紝41浣嶆椂闂存埅涓嶆槸瀛樺偍褰撳墠鏃堕棿鐨勬椂闂存埅锛岃屾槸瀛樺偍鏃堕棿鎴殑宸硷紙褰撳墠鏃堕棿鎴 - 寮濮嬫椂闂存埅)
- * 寰楀埌鐨勫硷級锛岃繖閲岀殑鐨勫紑濮嬫椂闂存埅锛屼竴鑸槸鎴戜滑鐨刬d鐢熸垚鍣ㄥ紑濮嬩娇鐢ㄧ殑鏃堕棿锛岀敱鎴戜滑绋嬪簭鏉ユ寚瀹氱殑锛堝涓嬩笅闈㈢▼搴廔dWorker绫荤殑startTime灞炴э級銆41浣嶇殑鏃堕棿鎴紝鍙互浣跨敤69骞达紝骞碩 = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69
- * 10浣嶇殑鏁版嵁鏈哄櫒浣嶏紝鍙互閮ㄧ讲鍦1024涓妭鐐癸紝鍖呮嫭5浣峝atacenterId鍜5浣峸orkerId
- * 12浣嶅簭鍒楋紝姣鍐呯殑璁℃暟锛12浣嶇殑璁℃暟椤哄簭鍙锋敮鎸佹瘡涓妭鐐规瘡姣(鍚屼竴鏈哄櫒锛屽悓涓鏃堕棿鎴)浜х敓4096涓狪D搴忓彿
- * 鍔犺捣鏉ュ垰濂64浣嶏紝涓轰竴涓狶ong鍨嬨
- * SnowFlake鐨勪紭鐐规槸锛屾暣浣撲笂鎸夌収鏃堕棿鑷鎺掑簭锛屽苟涓旀暣涓垎甯冨紡绯荤粺鍐呬笉浼氫骇鐢烮D纰版挒(鐢辨暟鎹腑蹇僆D鍜屾満鍣↖D浣滃尯鍒)锛屽苟涓旀晥鐜囪緝楂橈紝缁忔祴璇曪紝SnowFlake姣忕鑳藉浜х敓26涓嘔D宸﹀彸銆
- */
-public class SnowflakeIdWorker {
-// ==============================Fields===========================================
- /**
- * 寮濮嬫椂闂存埅 (2015-01-01)
- */
- private final long twepoch = timeGen();
-
- /**
- * 鏈哄櫒id鎵鍗犵殑浣嶆暟
- */
- private final long workerIdBits = 5L;
-
- /**
- * 鏁版嵁鏍囪瘑id鎵鍗犵殑浣嶆暟
- */
- private final long datacenterIdBits = 5L;
-
- /**
- * 鏀寔鐨勬渶澶ф満鍣╥d锛岀粨鏋滄槸31 (杩欎釜绉讳綅绠楁硶鍙互寰堝揩鐨勮绠楀嚭鍑犱綅浜岃繘鍒舵暟鎵鑳借〃绀虹殑鏈澶у崄杩涘埗鏁)
- */
- private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
-
- /**
- * 鏀寔鐨勬渶澶ф暟鎹爣璇唅d锛岀粨鏋滄槸31
- */
- private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
-
- /**
- * 搴忓垪鍦╥d涓崰鐨勪綅鏁
- */
- private final long sequenceBits = 12L;
-
- /**
- * 鏈哄櫒ID鍚戝乏绉12浣
- */
- private final long workerIdShift = sequenceBits;
-
- /**
- * 鏁版嵁鏍囪瘑id鍚戝乏绉17浣(12+5)
- */
- private final long datacenterIdShift = sequenceBits + workerIdBits;
-
- /**
- * 鏃堕棿鎴悜宸︾Щ22浣(5+5+12)
- */
- private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
-
- /**
- * 鐢熸垚搴忓垪鐨勬帺鐮侊紝杩欓噷涓4095 (0b111111111111=0xfff=4095)
- */
- private final long sequenceMask = -1L ^ (-1L << sequenceBits);
-
- /**
- * 宸ヤ綔鏈哄櫒ID(0~31)
- */
- private long workerId;
-
- /**
- * 鏁版嵁涓績ID(0~31)
- */
- private long datacenterId;
-
- /**
- * 姣鍐呭簭鍒(0~4095)
- */
- private long sequence = 0L;
-
- /**
- * 涓婃鐢熸垚ID鐨勬椂闂存埅
- */
- private long lastTimestamp = -1L;
-
- //==============================Constructors=====================================
-
- /**
- * 鏋勯犲嚱鏁
- *
- * @param workerId 宸ヤ綔ID (0~31)
- * @param datacenterId 鏁版嵁涓績ID (0~31)
- */
- public SnowflakeIdWorker(long workerId, long datacenterId) {
- if (workerId > maxWorkerId || workerId < 0) {
- throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
- }
- if (datacenterId > maxDatacenterId || datacenterId < 0) {
- throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
- }
- this.workerId = workerId;
- this.datacenterId = datacenterId;
- }
-
- // ==============================Methods==========================================
-
- /**
- * 鑾峰緱涓嬩竴涓狪D (璇ユ柟娉曟槸绾跨▼瀹夊叏鐨)
- *
- * @return SnowflakeId
- */
- public synchronized long nextId() {
- long timestamp = timeGen();
-
- //濡傛灉褰撳墠鏃堕棿灏忎簬涓婁竴娆D鐢熸垚鐨勬椂闂存埑锛岃鏄庣郴缁熸椂閽熷洖閫杩囪繖涓椂鍊欏簲褰撴姏鍑哄紓甯
- if (timestamp < lastTimestamp) {
- throw new RuntimeException(
- String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
- }
-
- //濡傛灉鏄悓涓鏃堕棿鐢熸垚鐨勶紝鍒欒繘琛屾绉掑唴搴忓垪
- if (lastTimestamp == timestamp) {
- sequence = (sequence + 1) & sequenceMask;
- //姣鍐呭簭鍒楁孩鍑
- if (sequence == 0) {
- //闃诲鍒颁笅涓涓绉,鑾峰緱鏂扮殑鏃堕棿鎴
- timestamp = tilNextMillis(lastTimestamp);
- }
- }
- //鏃堕棿鎴虫敼鍙橈紝姣鍐呭簭鍒楅噸缃
- else {
- sequence = 0L;
- }
-
- //涓婃鐢熸垚ID鐨勬椂闂存埅
- lastTimestamp = timestamp;
-
- //绉讳綅骞堕氳繃鎴栬繍绠楁嫾鍒颁竴璧风粍鎴64浣嶇殑ID
- return ((timestamp - twepoch) << timestampLeftShift)
- | (datacenterId << datacenterIdShift)
- | (workerId << workerIdShift)
- | sequence;
- }
-
- /**
- * 闃诲鍒颁笅涓涓绉掞紝鐩村埌鑾峰緱鏂扮殑鏃堕棿鎴
- *
- * @param lastTimestamp 涓婃鐢熸垚ID鐨勬椂闂存埅
- * @return 褰撳墠鏃堕棿鎴
- */
- protected long tilNextMillis(long lastTimestamp) {
- long timestamp = timeGen();
- while (timestamp <= lastTimestamp) {
- timestamp = timeGen();
- }
- return timestamp;
- }
-
- /**
- * 杩斿洖浠ユ绉掍负鍗曚綅鐨勫綋鍓嶆椂闂
- *
- * @return 褰撳墠鏃堕棿(姣)
- */
- protected long timeGen() {
- return System.currentTimeMillis();
- }
-
- //==============================Test=============================================
-
- /**
- * 娴嬭瘯
- */
- public static void main(String[] args) {
- SnowflakeIdWorker idWorker = new SnowflakeIdWorker(0, 0);
- for (int i = 0; i < 1000; i++) {
- long id = idWorker.nextId();
-// System.out.println(Long.toBinaryString(id));
- System.out.println(id);
- }
- }
-}
\ No newline at end of file
diff --git a/src/test/java/cn/ac/iie/test/TestThread.java b/src/test/java/cn/ac/iie/test/TestThread.java
deleted file mode 100644
index 5938b8a..0000000
--- a/src/test/java/cn/ac/iie/test/TestThread.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package cn.ac.iie.test;
-
-
-import cn.ac.iie.utils.system.SnowflakeId;
-import cn.ac.iie.utils.zookeeper.ZookeeperUtils;
-
-class RunnableDemo implements Runnable {
- private Thread t;
-
- private static ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
-
- static {
- zookeeperUtils.connectZookeeper("192.168.40.207:2181");
- }
-
- @Override
- public void run() {
- zookeeperUtils.modifyNode("/testNode/UID-TEST");
- System.out.println(zookeeperUtils.getNodeDate("/testNode/UID-TEST"));
-// zookeeperUtils.closeConn();
-
- }
-
- public void start() {
- if (t == null) {
- t = new Thread(this);
- t.start();
- }
- }
-}
-
-public class TestThread {
- public static void main(String[] args) {
- RunnableDemo R1 = new RunnableDemo();
- RunnableDemo R2 = new RunnableDemo();
-// RunnableDemo R3 = new RunnableDemo();
-// RunnableDemo R4 = new RunnableDemo();
- R1.start();
- try {
- Thread.sleep(10000);
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- R2.start();
-// R3.start();
-// R4.start();
-
- }
-}
diff --git a/src/test/java/cn/ac/iie/test/URLUtil.java b/src/test/java/cn/ac/iie/test/URLUtil.java
deleted file mode 100644
index 1b1e590..0000000
--- a/src/test/java/cn/ac/iie/test/URLUtil.java
+++ /dev/null
@@ -1,90 +0,0 @@
-package cn.ac.iie.test;
-
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.regex.Pattern;
-
-public class URLUtil {
-
- private final static Set PublicSuffixSet = new HashSet(
- Arrays.asList(new String(
- "com|org|net|gov|edu|co|tv|mobi|info|asia|xxx|onion|cn|com.cn|edu.cn|gov.cn|net.cn|org.cn|jp|kr|tw|com.hk|hk|com.hk|org.hk|se|com.se|org.se")
- .split("\\|")));
-
- private static Pattern IP_PATTERN = Pattern.compile("(\\d{1,3}\\.){3}(\\d{1,3})");
-
- /**
- * 鑾峰彇url鐨勯《绾у煙鍚
- *
- * @param url
- * @return
- */
- public static String getDomainName(URL url) {
- String host = url.getHost();
- if (host.endsWith(".")) {
- host = host.substring(0, host.length() - 1);
- }
- if (IP_PATTERN.matcher(host).matches()) {
- return host;
- }
-
- int index = 0;
- String candidate = host;
- for (; index >= 0; ) {
- index = candidate.indexOf('.');
- String subCandidate = candidate.substring(index + 1);
- if (PublicSuffixSet.contains(subCandidate)) {
- return candidate;
- }
- candidate = subCandidate;
- }
- return candidate;
- }
-
- /**
- * 鑾峰彇url鐨勯《绾у煙鍚
- *
- * @param url
- * @return
- * @throws MalformedURLException
- */
- public static String getDomainName(String url) throws MalformedURLException {
- return getDomainName(new URL(url));
- }
-
- /**
- * 鍒ゆ柇涓や釜url椤剁骇鍩熷悕鏄惁鐩哥瓑
- *
- * @param url1
- * @param url2
- * @return
- */
- public static boolean isSameDomainName(URL url1, URL url2) {
- return getDomainName(url1).equalsIgnoreCase(getDomainName(url2));
- }
-
- /**
- * 鍒ゆ柇涓や釜url椤剁骇鍩熷悕鏄惁鐩哥瓑
- *
- * @param url1
- * @param url2
- * @return
- * @throws MalformedURLException
- */
- public static boolean isSameDomainName(String url1, String url2)
- throws MalformedURLException {
- return isSameDomainName(new URL(url1), new URL(url2));
- }
-
- public static void main(String[] args) throws Exception {
-// String urlStr = "http://news.hexun.com/2017-09-23/190978248.html";
- String urlStr = "array703-prod.do.dsp.mp.microsoft.com";
- System.out.println(getDomainName(urlStr.replace("\uFEFF", "")));
- System.out.println(getDomainName(new URL(urlStr.replace("\uFEFF", ""))));
-
- }
-
-}
diff --git a/src/test/java/cn/ac/iie/test/ZookeeperTest.java b/src/test/java/cn/ac/iie/test/ZookeeperTest.java
deleted file mode 100644
index 2cb6c9f..0000000
--- a/src/test/java/cn/ac/iie/test/ZookeeperTest.java
+++ /dev/null
@@ -1,126 +0,0 @@
-package cn.ac.iie.test;
-
-import org.apache.log4j.Logger;
-import org.apache.zookeeper.*;
-import org.apache.zookeeper.data.ACL;
-import org.apache.zookeeper.data.Stat;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.concurrent.CountDownLatch;
-
-public class ZookeeperTest implements Watcher {
- private static Logger logger = Logger.getLogger(ZookeeperTest.class);
- private static ZooKeeper zookeeper;
-
- private static final int SESSION_TIME_OUT = 2000;
-
-// private static Stat stat = new Stat();
-
- private CountDownLatch countDownLatch = new CountDownLatch(1);
-
- public void process(WatchedEvent event) {
- if (event.getState() == Watcher.Event.KeeperState.SyncConnected) {
- countDownLatch.countDown();
- }
- }
-
- /**
- * 杩炴帴zookeeper
- *
- * @param host 鍦板潃
- */
- private void connectZookeeper(String host) {
- try {
- zookeeper = new ZooKeeper(host, SESSION_TIME_OUT, this);
- countDownLatch.await();
- } catch (IOException | InterruptedException e) {
- e.printStackTrace();
- }
- }
-
-
- /**
- * @param path 璺緞
- * @return 瀛愯妭鐐
- */
- private List getChildren(String path) {
- try {
- return zookeeper.getChildren(path, false);
- } catch (KeeperException | InterruptedException e) {
- e.printStackTrace();
- return null;
- }
- }
-
- /**
- * @param path 鑺傜偣鍒涘缓鐨勮矾寰
- * @param date 鑺傜偣鎵瀛樺偍鐨勬暟鎹殑byte[]
- * @param acls 鎺у埗鏉冮檺绛栫暐
- */
- private void createNode(String path, byte[] date, List acls) {
- try {
- Stat exists = zookeeper.exists(path, true);
- if (exists == null) {
- zookeeper.create(path, date, acls, CreateMode.PERSISTENT);
- } else {
- logger.warn("Node already exists!,Don't need to create");
- }
- } catch (KeeperException | InterruptedException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * 淇敼鑺傜偣淇℃伅
- *
- * @param path 鑺傜偣璺緞
- * @param date 淇敼鐨勬暟鎹
- */
- private void modifyNode(String path, byte[] date) {
- try {
- Stat stat = zookeeper.exists(path, true);
- if (stat != null) {
- zookeeper.setData(path, date, stat.getVersion());
- } else {
- logger.error("Node does not exist!,Can't modify");
- }
- } catch (KeeperException | InterruptedException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * 鑾峰彇鑺傜偣鍐呭
- *
- * @param path 鑺傜偣璺緞
- * @return 鍐呭/寮傚父null
- */
- private String getNodeDate(String path) {
- String result = null;
- Stat stat = new Stat();
- try {
- byte[] resByte = zookeeper.getData(path, true, stat);
- result = new String(resByte);
- } catch (KeeperException | InterruptedException e) {
- logger.error("Get node information exception");
- e.printStackTrace();
- }
- return result;
- }
-
-
- public static void main(String[] args) {
- ZookeeperTest zookeeperTest = new ZookeeperTest();
- try {
- zookeeperTest.connectZookeeper("192.168.40.119:2181,192.168.40.122:2181,192.168.40.123:2181");
-// zookeeperTest.createNode("/Snowflake", "".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE);
-// System.out.println(zookeeperTest.getNodeDate("/testNode/UID-TEST"));
- zookeeperTest.modifyNode("/Snowflake/SESSION-TEST-LOG", "0".getBytes());
-// System.out.println(zookeeperTest.getNodeDate("/testNode/UID-TEST"));
-
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-}
diff --git a/src/test/java/cn/ac/iie/test/a.xml b/src/test/java/cn/ac/iie/test/a.xml
deleted file mode 100644
index 5e857b3..0000000
--- a/src/test/java/cn/ac/iie/test/a.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
- 1
-
- false
-
- {ip}
- 9001
- default
- {rootpassword}
-
-
diff --git a/src/test/java/cn/ac/iie/test/influxQueryTest.java b/src/test/java/cn/ac/iie/test/influxQueryTest.java
deleted file mode 100644
index 7bf5497..0000000
--- a/src/test/java/cn/ac/iie/test/influxQueryTest.java
+++ /dev/null
@@ -1,52 +0,0 @@
-package cn.ac.iie.test;
-
-import org.influxdb.InfluxDB;
-import org.influxdb.InfluxDBFactory;
-import org.influxdb.dto.Pong;
-import org.influxdb.dto.Query;
-import org.influxdb.dto.QueryResult;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-
-public class influxQueryTest {
- private static InfluxDB client;
-
- public static void main(String[] args) {
-
- Query query = new Query("select * from test", "deltest");
- QueryResult a = getClient().query(query);
-
-// List lists = new ArrayList();
- for (QueryResult.Result result : a.getResults()) {
- List series = result.getSeries();
- for (QueryResult.Series serie : series) {
- List> values = serie.getValues();//瀛楁瀛楅泦鍚
- List colums = serie.getColumns();//瀛楁鍚
- System.out.println("colums:" + colums);
- for (List