diff --git a/.idea/compiler.xml b/.idea/compiler.xml
index 5cc2379..07e44d7 100644
--- a/.idea/compiler.xml
+++ b/.idea/compiler.xml
@@ -6,10 +6,11 @@
-
+
+
diff --git a/.idea/modules.xml b/.idea/modules.xml
index ab6cd18..477fae9 100644
--- a/.idea/modules.xml
+++ b/.idea/modules.xml
@@ -2,7 +2,7 @@
-
+
\ No newline at end of file
diff --git a/.idea/workspace.xml b/.idea/workspace.xml
index 3faf420..0dc48fe 100644
--- a/.idea/workspace.xml
+++ b/.idea/workspace.xml
@@ -2,250 +2,83 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -284,14 +117,21 @@
+
-
+
+
+
+
+
+
+
+
+
-
-
@@ -302,9 +142,27 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -331,44 +189,98 @@
1634901800840
+
+ 1634902912230
+
+
+
+ 1634902912230
+
+
+
+
+
+
+
-
+
-
-
+
+
-
-
+
+
-
-
+
+
-
-
+
+
-
-
+
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
-
+
+
-
+
+
+
+
+
+
\ No newline at end of file
diff --git a/log-completion-doublewrite.iml b/log-completion-doublewrite.iml
new file mode 100644
index 0000000..a785e47
--- /dev/null
+++ b/log-completion-doublewrite.iml
@@ -0,0 +1,186 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index e9fbf1c..077ee70 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,7 @@
com.zdjizhi
log-completion-doublewrite
- 1015
+ 1214
log-completion-doublewrite
http://www.example.com
@@ -37,8 +37,8 @@
2.7.1
1.0.0
2.2.3
-
- compile
+ provided
+
diff --git a/properties/default_config.properties b/properties/default_config.properties
index 94c70a5..aeb0309 100644
--- a/properties/default_config.properties
+++ b/properties/default_config.properties
@@ -1,33 +1,4 @@
-#producerԵĴ
-retries=0
-
-#ĺ˵һBatch֮ãBatchûд뷢ͳȥ
-linger.ms=10
-
-#ڳʱ֮ǰδյӦͻ˽ڱҪʱ·
-request.timeout.ms=30000
-
-#producerǰbatchз͵,δСĬ:16384
-batch.size=262144
-
-#ProducerڻϢĻС
-#64M
-#buffer.memory=67108864
-#128M
-buffer.memory=134217728
-
-#ÿηKafkaС,Ĭ1048576
-#5M
-#max.request.size=5242880
-#10M
-max.request.size=10485760
-
-#kafka SASL֤û
-kafka.user=admin
-
-#kafka SASLSSL֤
-kafka.pin=galaxy2019
-
+#====================Kafka Consumer====================#
#kafka source connection timeout
session.timeout.ms=60000
@@ -36,15 +7,48 @@ max.poll.records=3000
#kafka source poll bytes
max.partition.fetch.bytes=31457280
+#====================Kafka Producer====================#
+#producer重试的次数设置
+retries=0
-#hbase table name
-hbase.table.name=subscriber_info
+#他的含义就是说一个Batch被创建之后,最多过多久,不管这个Batch有没有写满,都必须发送出去了
+linger.ms=10
-#ʼĬϱ
-mail.default.charset=UTF-8
+#如果在超时之前未收到响应,客户端将在必要时重新发送请求
+request.timeout.ms=30000
+#producer都是按照batch进行发送的,批次大小,默认:16384
+batch.size=262144
+
+#Producer端用于缓存消息的缓冲区大小
+#128M
+buffer.memory=134217728
+
+#这个参数决定了每次发送给Kafka服务器请求的最大大小,默认1048576
+#10M
+max.request.size=10485760
+#====================kafka default====================#
#kafka source protocol; SSL or SASL
kafka.source.protocol=SASL
#kafka sink protocol; SSL or SASL
-kafka.sink.protocol=
\ No newline at end of file
+kafka.sink.protocol=
+
+#kafka SASL验证用户名
+kafka.user=admin
+
+#kafka SASL及SSL验证密码
+kafka.pin=galaxy2019
+#====================Topology Default====================#
+
+#hbase table name
+hbase.table.name=tsg_galaxy:relation_framedip_account
+
+#邮件默认编码
+mail.default.charset=UTF-8
+
+#0不做任何校验,1强类型校验,2弱类型校验
+log.transform.type=2
+
+#两个输出之间的最大时间(单位milliseconds)
+buffer.timeout=5000
\ No newline at end of file
diff --git a/properties/file_type.properties b/properties/file_type.properties
new file mode 100644
index 0000000..8ffc908
--- /dev/null
+++ b/properties/file_type.properties
@@ -0,0 +1,5 @@
+txt
+html
+eml
+jpg
+png
\ No newline at end of file
diff --git a/properties/service_flow_config.properties b/properties/service_flow_config.properties
index bb7a28b..03e3b33 100644
--- a/properties/service_flow_config.properties
+++ b/properties/service_flow_config.properties
@@ -4,7 +4,7 @@
input.kafka.servers=10.3.60.3:9094
#管理输出kafka地址
-output.kafka.servers=10.3.45.126:6667,10.3.45.127:6667,10.3.45.128:6667
+output.kafka.servers=10.3.60.3:9092
#zookeeper 地址 用于配置log_id
zookeeper.servers=10.3.60.3:2181
@@ -12,33 +12,31 @@ zookeeper.servers=10.3.60.3:2181
#hbase zookeeper地址 用于连接HBase
hbase.zookeeper.servers=10.3.60.3:2181
-#oos地址
-oos.servers=10.3.45.124:8057
-
#--------------------------------HTTP/定位库------------------------------#
#定位库地址
-#tools.library=/opt/dat/
tools.library=D:/dingweiku/dat/
#网关的schema位置
-schema.http=http://10.3.60.3:9999/metadata/schema/v1/fields/proxy_event
+schema.http=http://10.3.60.3:9999/metadata/schema/v1/fields/security_event
#网关APP_ID 获取接口
app.id.http=http://10.3.60.3:9999/open-api/appDicList
#--------------------------------Kafka消费组信息------------------------------#
-#kafka 接收数据topic
-#input.kafka.topic=SESSION-RECORD
-input.kafka.topic=PROXY-EVENT
-
+#oos地址
+oos.servers=10.3.45.124:8057
+#输入kafka
+input.kafka.topic=test11
#文件源数据topic
-output.kafka.topic=TRAFFIC-FILE-METADATA
+output.kafka.topic=test-file-data
+
+percent.kafka.topic=test
+
-percent.kafka.topic=PROXY-EVENT
#读取topic,存储该spout id的消费offset信息,可通过该拓扑命名;具体存储offset的位置,确定下次读取不重复的数据;
-group.id=session-record-log-20211018-A
+group.id=flink-test-1
#生产者压缩模式 none or snappy
producer.kafka.compression.type=none
@@ -49,13 +47,16 @@ producer.ack=1
#--------------------------------topology配置------------------------------#
#consumer 并行度
-consumer.parallelism=1
+source.parallelism=10
#转换函数并行度
-transform.parallelism=1
+transform.parallelism=10
+
+#kafka producer 并行度
+sink.parallelism=10
#数据中心,取值范围(0-63)
-data.center.id.num=0
+data.center.id.num=7
#hbase 更新时间,如填写0则不更新缓存
hbase.tick.tuple.freq.secs=180
@@ -69,4 +70,4 @@ app.tick.tuple.freq.secs=0
mail.default.charset=UTF-8
#0不需要补全原样输出日志,1需要补全
-log.need.complete=1
+log.need.complete=1
\ No newline at end of file
diff --git a/src/main/java/com/zdjizhi/bean/FileMeta.java b/src/main/java/com/zdjizhi/bean/FileMeta.java
deleted file mode 100644
index e24e0b4..0000000
--- a/src/main/java/com/zdjizhi/bean/FileMeta.java
+++ /dev/null
@@ -1,51 +0,0 @@
-package com.zdjizhi.bean;
-
-import com.alibaba.fastjson.JSONArray;
-
-public class FileMeta {
- private long common_log_id;
- protected int common_recv_time;
- private String common_schema_type;
- private JSONArray sourceList;
- private int processing_time;
-
- public long getCommon_log_id() {
- return common_log_id;
- }
-
- public void setCommon_log_id(long common_log_id) {
- this.common_log_id = common_log_id;
- }
-
- public int getCommon_recv_time() {
- return common_recv_time;
- }
-
- public void setCommon_recv_time(int common_recv_time) {
- this.common_recv_time = common_recv_time;
- }
-
- public String getCommon_schema_type() {
- return common_schema_type;
- }
-
- public void setCommon_schema_type(String common_schema_type) {
- this.common_schema_type = common_schema_type;
- }
-
- public JSONArray getSourceList() {
- return sourceList;
- }
-
- public void setSourceList(JSONArray sourceList) {
- this.sourceList = sourceList;
- }
-
- public int getProcessing_time() {
- return processing_time;
- }
-
- public void setProcessing_time(int processing_time) {
- this.processing_time = processing_time;
- }
-}
diff --git a/src/main/java/com/zdjizhi/bean/SourceList.java b/src/main/java/com/zdjizhi/bean/SourceList.java
deleted file mode 100644
index 8fba85d..0000000
--- a/src/main/java/com/zdjizhi/bean/SourceList.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package com.zdjizhi.bean;
-
-public class SourceList {
- private String destination_oss_path;
- private String source_oss_path;
-
- public String getDestination_oss_path() {
- return destination_oss_path;
- }
-
- public void setDestination_oss_path(String destination_oss_path) {
- this.destination_oss_path = destination_oss_path;
- }
-
- public String getSource_oss_path() {
- return source_oss_path;
- }
-
- public void setSource_oss_path(String source_oss_path) {
- this.source_oss_path = source_oss_path;
- }
-}
diff --git a/src/main/java/com/zdjizhi/common/FlowWriteConfig.java b/src/main/java/com/zdjizhi/common/FlowWriteConfig.java
deleted file mode 100644
index f821dc2..0000000
--- a/src/main/java/com/zdjizhi/common/FlowWriteConfig.java
+++ /dev/null
@@ -1,83 +0,0 @@
-package com.zdjizhi.common;
-
-
-import com.zdjizhi.utils.system.FlowWriteConfigurations;
-
-/**
- * @author Administrator
- */
-public class FlowWriteConfig {
-
- public static final int IF_PARAM_LENGTH = 3;
- public static final String VISIBILITY = "disabled";
- public static final String FORMAT_SPLITTER = ",";
- public static final String IS_JSON_KEY_TAG = "$.";
- public static final String IF_CONDITION_SPLITTER = "=";
- public static final String MODEL = "remote";
- public static final String PROTOCOL_SPLITTER = "\\.";
-
- /**
- * System config
- */
- public static final Integer CONSUMER_PARALLELISM = FlowWriteConfigurations.getIntProperty(0, "consumer.parallelism");
- public static final Integer TRANSFORM_PARALLELISM = FlowWriteConfigurations.getIntProperty(0, "transform.parallelism");
- public static final Integer HBASE_TICK_TUPLE_FREQ_SECS = FlowWriteConfigurations.getIntProperty(0, "hbase.tick.tuple.freq.secs");
- public static final Integer APP_TICK_TUPLE_FREQ_SECS = FlowWriteConfigurations.getIntProperty(0, "app.tick.tuple.freq.secs");
- public static final Integer DATA_CENTER_ID_NUM = FlowWriteConfigurations.getIntProperty(0, "data.center.id.num");
- public static final Integer LOG_NEED_COMPLETE = FlowWriteConfigurations.getIntProperty(0, "log.need.complete");
- public static final String MAIL_DEFAULT_CHARSET = FlowWriteConfigurations.getStringProperty(0, "mail.default.charset");
- public static final String HBASE_TABLE_NAME = FlowWriteConfigurations.getStringProperty(1, "hbase.table.name");
-
- /**
- * kafka source config
- */
- public static final String SESSION_TIMEOUT_MS = FlowWriteConfigurations.getStringProperty(1, "session.timeout.ms");
- public static final String MAX_POLL_RECORDS = FlowWriteConfigurations.getStringProperty(1, "max.poll.records");
- public static final String MAX_PARTITION_FETCH_BYTES = FlowWriteConfigurations.getStringProperty(1, "max.partition.fetch.bytes");
-
-
- /**
- * kafka sink config
- */
- public static final String INPUT_KAFKA_SERVERS = FlowWriteConfigurations.getStringProperty(0, "input.kafka.servers");
- public static final String OUTPUT_KAFKA_SERVERS = FlowWriteConfigurations.getStringProperty(0, "output.kafka.servers");
- public static final String ZOOKEEPER_SERVERS = FlowWriteConfigurations.getStringProperty(0, "zookeeper.servers");
- public static final String HBASE_ZOOKEEPER_SERVERS = FlowWriteConfigurations.getStringProperty(0, "hbase.zookeeper.servers");
- public static final String GROUP_ID = FlowWriteConfigurations.getStringProperty(0, "group.id");
- public static final String OUTPUT_KAFKA_TOPIC = FlowWriteConfigurations.getStringProperty(0, "output.kafka.topic");
- public static final String INPUT_KAFKA_TOPIC = FlowWriteConfigurations.getStringProperty(0, "input.kafka.topic");
- public static final String PRODUCER_ACK = FlowWriteConfigurations.getStringProperty(0, "producer.ack");
- public static final String TOOLS_LIBRARY = FlowWriteConfigurations.getStringProperty(0, "tools.library");
- public static final String PRODUCER_KAFKA_COMPRESSION_TYPE = FlowWriteConfigurations.getStringProperty(0, "producer.kafka.compression.type");
- public static final String KAFKA_SOURCE_PROTOCOL = FlowWriteConfigurations.getStringProperty(1, "kafka.source.protocol");
- public static final String KAFKA_SINK_PROTOCOL = FlowWriteConfigurations.getStringProperty(1, "kafka.sink.protocol");
- public static final String KAFKA_USER = FlowWriteConfigurations.getStringProperty(1, "kafka.user");
- public static final String KAFKA_PIN = FlowWriteConfigurations.getStringProperty(1, "kafka.pin");
-
-
- public static final String PERCENT_KAFKA_TOPIC = FlowWriteConfigurations.getStringProperty(0, "percent.kafka.topic");
-
- /**
- * connection kafka
- */
- public static final String RETRIES = FlowWriteConfigurations.getStringProperty(1, "retries");
- public static final String LINGER_MS = FlowWriteConfigurations.getStringProperty(1, "linger.ms");
- public static final Integer REQUEST_TIMEOUT_MS = FlowWriteConfigurations.getIntProperty(1, "request.timeout.ms");
- public static final Integer BATCH_SIZE = FlowWriteConfigurations.getIntProperty(1, "batch.size");
- public static final Integer BUFFER_MEMORY = FlowWriteConfigurations.getIntProperty(1, "buffer.memory");
- public static final Integer MAX_REQUEST_SIZE = FlowWriteConfigurations.getIntProperty(1, "max.request.size");
-
- /**
- * http
- */
- public static final String SCHEMA_HTTP = FlowWriteConfigurations.getStringProperty(0, "schema.http");
- public static final String APP_ID_HTTP = FlowWriteConfigurations.getStringProperty(0, "app.id.http");
-
-
- /**
- * oos
- */
- public static final String OOS_SERVERS = FlowWriteConfigurations.getStringProperty(0, "oos.servers");
-
-
-}
\ No newline at end of file
diff --git a/src/main/java/com/zdjizhi/topology/LogFlowWriteTopology.java b/src/main/java/com/zdjizhi/topology/LogFlowWriteTopology.java
deleted file mode 100644
index 0a27325..0000000
--- a/src/main/java/com/zdjizhi/topology/LogFlowWriteTopology.java
+++ /dev/null
@@ -1,75 +0,0 @@
-package com.zdjizhi.topology;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.zdjizhi.common.FlowWriteConfig;
-import com.zdjizhi.utils.functions.DealFileProcessFunction;
-import com.zdjizhi.utils.functions.FilterNullFunction;
-import com.zdjizhi.utils.functions.MapCompletedFunction;
-import com.zdjizhi.utils.kafka.Consumer;
-import com.zdjizhi.utils.kafka.Producer;
-import org.apache.flink.streaming.api.datastream.DataStream;
-import org.apache.flink.streaming.api.datastream.DataStreamSource;
-import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
-import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
-
-
-/**
- * @author qidaijie
- * @Package com.zdjizhi.topology
- * @Description:
- * @date 2021/5/2016:42
- */
-public class LogFlowWriteTopology {
- private static final Log logger = LogFactory.get();
-
- public static void main(String[] args) {
- final StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
-
-// 开启Checkpoint,interval用于指定checkpoint的触发间隔(单位milliseconds)
-// environment.enableCheckpointing(5000);
-//
- DataStreamSource streamSource = environment.addSource(Consumer.getKafkaConsumer())
- .setParallelism(FlowWriteConfig.CONSUMER_PARALLELISM);
-// DataStreamSource streamSource = environment.readTextFile("D:\\flinkdata\\security.log", "utf-8");
-
- if (FlowWriteConfig.LOG_NEED_COMPLETE == 1) {
- //对原始日志进行处理补全转换等
- DataStream cleaningLog = streamSource.map(new MapCompletedFunction()).name("TransFormLogs")
- .setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
- //过滤空数据不发送到Kafka内
- DataStream result = cleaningLog.filter(new FilterNullFunction()).name("FilterAbnormalData")
- .setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
-
- //处理带有非结构化日志的数据
- SingleOutputStreamOperator process = result.process(new DealFileProcessFunction());
-
- //文件元数据发送至TRAFFIC-FILE-METADATA
- process.getSideOutput(DealFileProcessFunction.metaToKafa).addSink(Producer.getKafkaProducer()).name("toTrafficFileMeta")
- .setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
-
-
-// //补全后的数据发送给百分点的kafka
- process.addSink(Producer.getPercentKafkaProducer()).name("toPercentKafka")
- .setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
-
- } else {
- //过滤空数据不发送到Kafka内
- DataStream result = streamSource.filter(new FilterNullFunction()).name("FilterOriginalData")
- .setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
- //发送数据到Kafka
- result.addSink(Producer.getPercentKafkaProducer()).name("LogSinkKafka")
- .setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
- }
-
- try {
- environment.execute(args[0]);
- } catch (Exception e) {
- logger.error("This Flink task start ERROR! Exception information is :" + e);
- e.printStackTrace();
- }
-
- }
-
-
-}
diff --git a/src/main/java/com/zdjizhi/utils/app/AppUtils.java b/src/main/java/com/zdjizhi/utils/app/AppUtils.java
deleted file mode 100644
index 0caeb25..0000000
--- a/src/main/java/com/zdjizhi/utils/app/AppUtils.java
+++ /dev/null
@@ -1,123 +0,0 @@
-package com.zdjizhi.utils.app;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.alibaba.fastjson.JSONArray;
-import com.alibaba.fastjson.JSONObject;
-import com.zdjizhi.common.FlowWriteConfig;
-import com.zdjizhi.utils.StringUtil;
-import com.zdjizhi.utils.http.HttpClientUtil;
-
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ScheduledThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-
-/**
- * AppId 工具类
- *
- * @author qidaijie
- */
-
-public class AppUtils {
- private static final Log logger = LogFactory.get();
- private static Map appIdMap = new ConcurrentHashMap<>(128);
- private static AppUtils appUtils;
-
- private static void getAppInstance() {
- appUtils = new AppUtils();
- }
-
-
- /**
- * 构造函数-新
- */
- private AppUtils() {
- //定时更新
- updateAppIdCache();
- }
-
- /**
- * 更新变量
- */
- private static void change() {
- if (appUtils == null) {
- getAppInstance();
- }
- timestampsFilter();
- }
-
-
- /**
- * 获取变更内容
- */
- private static void timestampsFilter() {
- try {
- Long begin = System.currentTimeMillis();
- String schema = HttpClientUtil.requestByGetMethod(FlowWriteConfig.APP_ID_HTTP);
- if (StringUtil.isNotBlank(schema)) {
- String data = JSONObject.parseObject(schema).getString("data");
- JSONArray objects = JSONArray.parseArray(data);
- for (Object object : objects) {
- JSONArray jsonArray = JSONArray.parseArray(object.toString());
- int key = jsonArray.getInteger(0);
- String value = jsonArray.getString(1);
- if (appIdMap.containsKey(key)) {
- if (!value.equals(appIdMap.get(key))) {
- appIdMap.put(key, value);
- }
- } else {
- appIdMap.put(key, value);
- }
- }
- logger.warn("Updating the correspondence takes time:" + (begin - System.currentTimeMillis()));
- logger.warn("Pull the length of the interface data:[" + objects.size() + "]");
- }
- } catch (RuntimeException e) {
- logger.error("Update cache app-id failed, exception:" + e);
- }
- }
-
-
- /**
- * 验证定时器,每隔一段时间验证一次-验证获取新的Cookie
- */
- private void updateAppIdCache() {
- ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1);
- executorService.scheduleAtFixedRate(new Runnable() {
- @Override
- public void run() {
- try {
- if (FlowWriteConfig.APP_TICK_TUPLE_FREQ_SECS != 0) {
- change();
- }
- } catch (RuntimeException e) {
- logger.error("AppUtils update AppCache is error===>{" + e + "}<===");
- }
- }
- }, 1, FlowWriteConfig.APP_TICK_TUPLE_FREQ_SECS, TimeUnit.SECONDS);
- }
-
-
- /**
- * 获取 appName
- *
- * @param appId app_id
- * @return account
- */
- public static String getAppName(int appId) {
-
- if (appUtils == null) {
- getAppInstance();
- }
-
- if (appIdMap.containsKey(appId)) {
- return appIdMap.get(appId);
- } else {
- logger.warn("AppMap get appName is null, ID is :" + appId);
- return "";
- }
- }
-
-}
diff --git a/src/main/java/com/zdjizhi/utils/exception/FlowWriteException.java b/src/main/java/com/zdjizhi/utils/exception/FlowWriteException.java
deleted file mode 100644
index 67c88f0..0000000
--- a/src/main/java/com/zdjizhi/utils/exception/FlowWriteException.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package com.zdjizhi.utils.exception;
-
-/**
- * @author qidaijie
- * @Package com.zdjizhi.storm.utils.execption
- * @Description:
- * @date 2021/3/259:42
- */
-public class FlowWriteException extends RuntimeException {
-
- public FlowWriteException() {
- }
-
- public FlowWriteException(String message) {
- super(message);
- }
-
-}
diff --git a/src/main/java/com/zdjizhi/utils/functions/DealFileProcessFunction.java b/src/main/java/com/zdjizhi/utils/functions/DealFileProcessFunction.java
deleted file mode 100644
index 20cb1bb..0000000
--- a/src/main/java/com/zdjizhi/utils/functions/DealFileProcessFunction.java
+++ /dev/null
@@ -1,123 +0,0 @@
-package com.zdjizhi.utils.functions;
-
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.alibaba.fastjson.JSONArray;
-import com.alibaba.fastjson.JSONObject;
-import com.zdjizhi.bean.FileMeta;
-
-
-import com.zdjizhi.bean.SourceList;
-import com.zdjizhi.utils.JsonMapper;
-import com.zdjizhi.utils.StringUtil;
-import com.zdjizhi.utils.general.FileEdit;
-import com.zdjizhi.utils.json.JsonTypeUtils;
-import org.apache.flink.streaming.api.functions.ProcessFunction;
-import org.apache.flink.util.Collector;
-import org.apache.flink.util.OutputTag;
-
-import java.util.Map;
-
-
-/**
- * @author wangchengcheng
- * @Package com.zdjizhi.utils.functions
- * @Description:
- * @date 2021/10/14
- */
-public class DealFileProcessFunction extends ProcessFunction {
- private static final Log logger = LogFactory.get();
-
- private Map jsonMap = null;
- private String rpUrlValue;
- private String rqUrlValue;
- private String emailUrlValue;
-
- private long cfgId = 0; //= common_policy_id;
-
- private String sIp = null; // = common_client_ip;
- private int sPort = 0;// = common_client_port;
- private String dIp = null;//= common_server_ip;
- private int dPort = 0;// = common_server_port;
- private long foundTime = 0;// = common_recv_time;
- private String account = null;
- private String domain = null;
- private String schemaType = null;
-
-
- //初始化侧输流的标记
- public static OutputTag metaToKafa = new OutputTag("metaToKafka") {};
-
- @SuppressWarnings("unchecked")
- @Override
- public void processElement(String message, Context context, Collector collector) throws Exception {
- try {
- if (StringUtil.isNotBlank(message)) {
- Map map = (Map) JsonMapper.fromJsonString(message, Map.class);
- jsonMap = JsonTypeUtils.typeTransform(map);
- rpUrlValue = (String) jsonMap.get("http_response_body");
- rqUrlValue = (String) jsonMap.get("http_request_body");
- emailUrlValue = (String) jsonMap.get("mail_eml_file");
-
- if (StringUtil.isNotBlank(rpUrlValue) || StringUtil.isNotBlank(rqUrlValue) || StringUtil.isNotBlank(emailUrlValue)) {
- cfgId = (long) jsonMap.get("common_policy_id");
- sIp = (String) jsonMap.get("common_client_ip");
- sPort = (int) jsonMap.get("common_client_port");
- dIp = (String) jsonMap.get("common_server_ip");
- dPort = (int) jsonMap.get("common_server_port");
- foundTime = (long) jsonMap.get("common_recv_time");
- schemaType = (String) jsonMap.get("common_schema_type");
-
- if (StringUtil.isNotBlank((String) jsonMap.get("http_domain"))) {
- domain = jsonMap.get("http_domain").toString();
- } else {
- domain = "NA";
- }
- if (StringUtil.isNotBlank((String) jsonMap.get("common_subscribe_id"))) {
- account = jsonMap.get("common_subscribe_id").toString();
- } else {
- account = "NA";
- }
-
- FileMeta fileMeta = new FileMeta();
- JSONArray jsonarray = new JSONArray();
- if (StringUtil.isNotBlank(rqUrlValue)) {
- jsonMap.put("http_request_body", FileEdit.dealFileUrlToPercent(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, rqUrlValue, schemaType, "_1"));
- SourceList request = new SourceList();
- request.setSource_oss_path(rqUrlValue);
- request.setDestination_oss_path(FileEdit.dealFileUrlToPercent(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, rqUrlValue, schemaType, "_1"));
- jsonarray.add(request);
- }
- if (StringUtil.isNotBlank(rpUrlValue)) {
- jsonMap.put("http_response_body", FileEdit.dealFileUrlToPercent(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, rpUrlValue, schemaType, "_2"));
- SourceList response = new SourceList();
- response.setSource_oss_path(rpUrlValue);
- response.setDestination_oss_path(FileEdit.dealFileUrlToPercent(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, rpUrlValue, schemaType, "_2"));
- jsonarray.add(response);
- }
- if (StringUtil.isNotBlank(emailUrlValue)) {
- jsonMap.put("mail_eml_file", FileEdit.dealFileUrlToPercent(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, emailUrlValue, schemaType, "_9"));
- SourceList emailFile = new SourceList();
- emailFile.setSource_oss_path(emailUrlValue);
- emailFile.setDestination_oss_path(FileEdit.dealFileUrlToPercent(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, emailUrlValue, schemaType, "_9"));
- jsonarray.add(emailFile);
- }
- fileMeta.setSourceList(jsonarray);
- fileMeta.setCommon_log_id((long) jsonMap.get("common_log_id"));
- fileMeta.setCommon_recv_time(Integer.parseInt(jsonMap.get("common_recv_time").toString()));
- fileMeta.setCommon_schema_type((String) jsonMap.get("common_schema_type"));
- fileMeta.setProcessing_time((int) (System.currentTimeMillis() / 1000));
-
- context.output(metaToKafa, JSONObject.toJSONString(fileMeta));
- }
- collector.collect(JsonMapper.toJsonString(jsonMap));
- } else {
- collector.collect(message);
- }
- }catch (RuntimeException e) {
- logger.error("处理带有非结构结构化字段的日志出错:" + e + "\n" + message);
-
- }
- }
- }
diff --git a/src/main/java/com/zdjizhi/utils/functions/FilterNullFunction.java b/src/main/java/com/zdjizhi/utils/functions/FilterNullFunction.java
deleted file mode 100644
index de507ad..0000000
--- a/src/main/java/com/zdjizhi/utils/functions/FilterNullFunction.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package com.zdjizhi.utils.functions;
-
-import com.zdjizhi.utils.StringUtil;
-import org.apache.flink.api.common.functions.FilterFunction;
-
-/**
- * @author qidaijie
- * @Package com.zdjizhi.utils.functions
- * @Description:
- * @date 2021/5/2715:01
- */
-public class FilterNullFunction implements FilterFunction {
- @Override
- public boolean filter(String message) {
- return StringUtil.isNotBlank(message);
- }
-}
diff --git a/src/main/java/com/zdjizhi/utils/functions/MapCompletedFunction.java b/src/main/java/com/zdjizhi/utils/functions/MapCompletedFunction.java
deleted file mode 100644
index 5618159..0000000
--- a/src/main/java/com/zdjizhi/utils/functions/MapCompletedFunction.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package com.zdjizhi.utils.functions;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.zdjizhi.utils.general.TransFormTypeMap;
-import org.apache.flink.api.common.functions.MapFunction;
-
-
-/**
- * @author qidaijie
- * @Package com.zdjizhi.utils.functions
- * @Description:
- * @date 2021/5/2715:01
- */
-public class MapCompletedFunction implements MapFunction {
- private static final Log logger = LogFactory.get();
-
- @Override
- @SuppressWarnings("unchecked")
- public String map(String logs) {
- try {
- return TransFormTypeMap.dealCommonMessage(logs);
- } catch (RuntimeException e) {
- logger.error("解析补全日志信息过程异常,异常信息:" + e + "\n" + logs);
- return "";
- }
- }
-}
diff --git a/src/main/java/com/zdjizhi/utils/general/FileEdit.java b/src/main/java/com/zdjizhi/utils/general/FileEdit.java
deleted file mode 100644
index 091f368..0000000
--- a/src/main/java/com/zdjizhi/utils/general/FileEdit.java
+++ /dev/null
@@ -1,40 +0,0 @@
-package com.zdjizhi.utils.general;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.zdjizhi.common.FlowWriteConfig;
-import com.zdjizhi.utils.ordinary.MD5Utils;
-
-
-/**
- * 文件字段操作工具
- */
-public class FileEdit {
-
- private static final Log logger = LogFactory.get();
-
-
- public static String dealFileUrlToPercent(long cfgId,String sIp,int sPort,String dIp,int dPort,long foundTime,String account,String domain, String urlValue,String schemaType,String fileSuffix) throws Exception {
- String fileType = null;
- if (schemaType.equals("HTTP")){
- fileType = "html";
- }
- if (schemaType.equals("MAIL")){
- fileType = "eml";
- }
- return "http://"+ FlowWriteConfig.OOS_SERVERS+"/upload_v2"+"/"+cfgId+"/"+fileType+"/"+sIp+"/"+sPort+"/"+dIp+"/"+dPort+"/"+foundTime+"/"+account+"/"+domain+"/"+getFileName(urlValue,fileSuffix);
- }
-
- public static String getFileType(String url){
- String[] split = url.split("\\.");
- return split[split.length-1];
- }
-
- public static String getFileName(String url,String fileSuffix) throws Exception {
- String[] arr = url.split("/");
- String filename = arr[arr.length-1].substring(0,arr[arr.length-1].lastIndexOf("_"));
- String prefix = MD5Utils.md5Encode(filename);
-// String suffix = arr[arr.length-1].substring(arr[arr.length-1].lastIndexOf("_"),arr[arr.length-1].lastIndexOf("."));
- return prefix+fileSuffix;
- }
-}
diff --git a/src/main/java/com/zdjizhi/utils/general/SnowflakeId.java b/src/main/java/com/zdjizhi/utils/general/SnowflakeId.java
deleted file mode 100644
index 168fec2..0000000
--- a/src/main/java/com/zdjizhi/utils/general/SnowflakeId.java
+++ /dev/null
@@ -1,213 +0,0 @@
-package com.zdjizhi.utils.general;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.zdjizhi.common.FlowWriteConfig;
-import com.zdjizhi.utils.zookeeper.DistributedLock;
-import com.zdjizhi.utils.zookeeper.ZookeeperUtils;
-
-/**
- * 雪花算法
- *
- * @author qidaijie
- */
-public class SnowflakeId {
- private static final Log logger = LogFactory.get();
-
- /**
- * 共64位 第一位为符号位 默认0
- * 时间戳 39位(17 year), centerId:(关联每个环境或任务数) :6位(0-63),
- * workerId(关联进程):7(0-127) ,序列号:11位(2047/ms)
- *
- * 序列号 /ms = (-1L ^ (-1L << 11))
- * 最大使用年 = (1L << 39) / (1000L * 60 * 60 * 24 * 365)
- */
- /**
- * 开始时间截 (2020-11-14 00:00:00) max 17years
- */
- private final long twepoch = 1605283200000L;
-
- /**
- * 机器id所占的位数
- */
- private final long workerIdBits = 8L;
-
- /**
- * 数据标识id所占的位数
- */
- private final long dataCenterIdBits = 5L;
-
- /**
- * 支持的最大机器id,结果是63 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
- * M << n = M * 2^n
- */
- private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
-
- /**
- * 支持的最大数据标识id,结果是127
- */
- private final long maxDataCenterId = -1L ^ (-1L << dataCenterIdBits);
-
- /**
- * 序列在id中占的位数
- */
- private final long sequenceBits = 11L;
-
- /**
- * 机器ID向左移12位
- */
- private final long workerIdShift = sequenceBits;
-
- /**
- * 数据标识id向左移17位(14+6)
- */
- private final long dataCenterIdShift = sequenceBits + workerIdBits;
-
- /**
- * 时间截向左移22位(4+6+14)
- */
- private final long timestampLeftShift = sequenceBits + workerIdBits + dataCenterIdBits;
-
- /**
- * 生成序列的掩码,这里为2047
- */
- private final long sequenceMask = -1L ^ (-1L << sequenceBits);
-
- /**
- * 工作机器ID(0~255)
- */
- private long workerId;
-
- /**
- * 数据中心ID(0~31)
- */
- private long dataCenterId;
-
- /**
- * 毫秒内序列(0~2047)
- */
- private long sequence = 0L;
-
- /**
- * 上次生成ID的时间截
- */
- private long lastTimestamp = -1L;
-
-
- /**
- * 设置允许时间回拨的最大限制10s
- */
- private static final long rollBackTime = 10000L;
-
-
- private static SnowflakeId idWorker;
-
- private static ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
-
- static {
- idWorker = new SnowflakeId(FlowWriteConfig.ZOOKEEPER_SERVERS, FlowWriteConfig.DATA_CENTER_ID_NUM);
- }
-
- //==============================Constructors=====================================
-
- /**
- * 构造函数
- */
- private SnowflakeId(String zookeeperIp, long dataCenterIdNum) {
- DistributedLock lock = new DistributedLock(FlowWriteConfig.ZOOKEEPER_SERVERS, "disLocks1");
- try {
- lock.lock();
- int tmpWorkerId = zookeeperUtils.modifyNode("/Snowflake/" + "worker" + dataCenterIdNum, zookeeperIp);
- if (tmpWorkerId > maxWorkerId || tmpWorkerId < 0) {
- throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
- }
- if (dataCenterIdNum > maxDataCenterId || dataCenterIdNum < 0) {
- throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than ", maxDataCenterId));
- }
- this.workerId = tmpWorkerId;
- this.dataCenterId = dataCenterIdNum;
- } catch (RuntimeException e) {
- logger.error("This is not usual error!!!===>>>" + e + "<<<===");
- }finally {
- lock.unlock();
- }
- }
-
- // ==============================Methods==========================================
-
- /**
- * 获得下一个ID (该方法是线程安全的)
- *
- * @return SnowflakeId
- */
- private synchronized long nextId() {
- long timestamp = timeGen();
- //设置一个允许回拨限制时间,系统时间回拨范围在rollBackTime内可以等待校准
- if (lastTimestamp - timestamp > 0 && lastTimestamp - timestamp < rollBackTime) {
- timestamp = tilNextMillis(lastTimestamp);
- }
- //如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
- if (timestamp < lastTimestamp) {
- throw new RuntimeException(
- String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
- }
-
- //如果是同一时间生成的,则进行毫秒内序列
- if (lastTimestamp == timestamp) {
- sequence = (sequence + 1) & sequenceMask;
- //毫秒内序列溢出
- if (sequence == 0) {
- //阻塞到下一个毫秒,获得新的时间戳
- timestamp = tilNextMillis(lastTimestamp);
- }
- }
- //时间戳改变,毫秒内序列重置
- else {
- sequence = 0L;
- }
-
- //上次生成ID的时间截
- lastTimestamp = timestamp;
-
- //移位并通过或运算拼到一起组成64位的ID
- return ((timestamp - twepoch) << timestampLeftShift)
- | (dataCenterId << dataCenterIdShift)
- | (workerId << workerIdShift)
- | sequence;
- }
-
- /**
- * 阻塞到下一个毫秒,直到获得新的时间戳
- *
- * @param lastTimestamp 上次生成ID的时间截
- * @return 当前时间戳
- */
- protected long tilNextMillis(long lastTimestamp) {
- long timestamp = timeGen();
- while (timestamp <= lastTimestamp) {
- timestamp = timeGen();
- }
- return timestamp;
- }
-
- /**
- * 返回以毫秒为单位的当前时间
- *
- * @return 当前时间(毫秒)
- */
- protected long timeGen() {
- return System.currentTimeMillis();
- }
-
-
- /**
- * 静态工具类
- *
- * @return
- */
- public static Long generateId() {
- return idWorker.nextId();
- }
-
-
-}
\ No newline at end of file
diff --git a/src/main/java/com/zdjizhi/utils/general/TransFormMap.java b/src/main/java/com/zdjizhi/utils/general/TransFormMap.java
deleted file mode 100644
index 549d1d4..0000000
--- a/src/main/java/com/zdjizhi/utils/general/TransFormMap.java
+++ /dev/null
@@ -1,146 +0,0 @@
-package com.zdjizhi.utils.general;
-
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.zdjizhi.common.FlowWriteConfig;
-import com.zdjizhi.utils.JsonMapper;
-import com.zdjizhi.utils.StringUtil;
-import com.zdjizhi.utils.json.JsonParseUtil;
-import com.zdjizhi.utils.json.JsonTypeUtils;
-
-import java.util.ArrayList;
-import java.util.Map;
-
-
-/**
- * 描述:转换或补全工具类
- *
- * @author qidaijie
- */
-public class TransFormMap {
- private static final Log logger = LogFactory.get();
-
- /**
- * 获取任务列表
- * list的每个元素是一个四元字符串数组 (有format标识的字段,补全的字段,用到的功能函数,用到的参数),例如:
- * (mail_subject mail_subject decode_of_base64 mail_subject_charset)
- */
- private static ArrayList jobList = JsonParseUtil.getJobListFromHttp(FlowWriteConfig.SCHEMA_HTTP);
-
- /**
- * 解析日志,并补全
- *
- * @param message kafka Topic原始日志
- * @return 补全后的日志
- */
- @SuppressWarnings("unchecked")
- public static String dealCommonMessage(String message) {
- try {
- if (StringUtil.isNotBlank(message)) {
-
- Map jsonMap = (Map) JsonMapper.fromJsonString(message, Map.class);
- for (String[] strings : jobList) {
- //用到的参数的值
- Object logValue = JsonParseUtil.getValue(jsonMap, strings[0]);
- //需要补全的字段的key
- String appendToKeyName = strings[1];
- //需要补全的字段的值
- Object appendTo = JsonParseUtil.getValue(jsonMap, appendToKeyName);
- //匹配操作函数的字段
- String function = strings[2];
- //额外的参数的值
- String param = strings[3];
- functionSet(function, jsonMap, appendToKeyName, appendTo, logValue, param);
- }
- return JsonMapper.toJsonString(jsonMap);
- } else {
- return "";
- }
- } catch (RuntimeException e) {
- logger.error("解析补全日志信息过程异常,异常信息:" + e + "\n" + message);
- return "";
- }
- }
-
-
- /**
- * 根据schema描述对应字段进行操作的 函数集合
- *
- * @param function 匹配操作函数的字段
- * @param jsonMap 原始日志解析map
- * @param appendToKeyName 需要补全的字段的key
- * @param appendTo 需要补全的字段的值
- * @param logValue 用到的参数的值
- * @param param 额外的参数的值
- */
- private static void functionSet(String function, Map jsonMap, String appendToKeyName, Object appendTo, Object logValue, String param) {
- switch (function) {
- case "current_timestamp":
- if (!(appendTo instanceof Long)) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getCurrentTime());
- }
- break;
- case "snowflake_id":
- JsonParseUtil.setValue(jsonMap, appendToKeyName, SnowflakeId.generateId());
- break;
- case "geo_ip_detail":
- if (logValue != null && appendTo == null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoIpDetail(logValue.toString()));
- }
- break;
- case "geo_asn":
- if (logValue != null && appendTo == null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoAsn(logValue.toString()));
- }
- break;
- case "geo_ip_country":
- if (logValue != null && appendTo == null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoIpCountry(logValue.toString()));
- }
- break;
- case "set_value":
- if (param != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, param);
- }
- break;
- case "get_value":
- if (logValue != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, logValue);
- }
- break;
- case "if":
- if (param != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.condition(jsonMap, param));
- }
- break;
- case "sub_domain":
- if (appendTo == null && logValue != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getTopDomain(logValue.toString()));
- }
- break;
- case "radius_match":
- if (logValue != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.radiusMatch(logValue.toString()));
- }
- break;
- case "app_match":
- if (logValue != null && appendTo == null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.appMatch(logValue.toString()));
- }
- break;
- case "decode_of_base64":
- if (logValue != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.decodeBase64(logValue.toString(), TransFunction.isJsonValue(jsonMap, param)));
- }
- break;
- case "flattenSpec":
- if (logValue != null && param != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.flattenSpec(logValue.toString(), param));
- }
- break;
- default:
- }
- }
-
-}
\ No newline at end of file
diff --git a/src/main/java/com/zdjizhi/utils/general/TransFormObject.java b/src/main/java/com/zdjizhi/utils/general/TransFormObject.java
deleted file mode 100644
index 26795b0..0000000
--- a/src/main/java/com/zdjizhi/utils/general/TransFormObject.java
+++ /dev/null
@@ -1,153 +0,0 @@
-package com.zdjizhi.utils.general;
-
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.zdjizhi.common.FlowWriteConfig;
-import com.zdjizhi.utils.JsonMapper;
-import com.zdjizhi.utils.StringUtil;
-import com.zdjizhi.utils.json.JsonParseUtil;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-
-
-/**
- * 描述:转换或补全工具类
- *
- * @author qidaijie
- */
-public class TransFormObject {
- private static final Log logger = LogFactory.get();
-
- /**
- * 在内存中加载反射类用的map
- */
- private static HashMap map = JsonParseUtil.getMapFromHttp(FlowWriteConfig.SCHEMA_HTTP);
-
- /**
- * 反射成一个类
- */
- private static Object mapObject = JsonParseUtil.generateObject(map);
-
- /**
- * 获取任务列表
- * list的每个元素是一个四元字符串数组 (有format标识的字段,补全的字段,用到的功能函数,用到的参数),例如:
- * (mail_subject mail_subject decode_of_base64 mail_subject_charset)
- */
- private static ArrayList jobList = JsonParseUtil.getJobListFromHttp(FlowWriteConfig.SCHEMA_HTTP);
-
- /**
- * 解析日志,并补全
- *
- * @param message kafka Topic原始日志
- * @return 补全后的日志
- */
- public static String dealCommonMessage(String message) {
- try {
- if (StringUtil.isNotBlank(message)) {
- Object object = JsonMapper.fromJsonString(message, mapObject.getClass());
- for (String[] strings : jobList) {
- //用到的参数的值
- Object name = JsonParseUtil.getValue(object, strings[0]);
- //需要补全的字段的key
- String appendToKeyName = strings[1];
- //需要补全的字段的值
- Object appendTo = JsonParseUtil.getValue(object, appendToKeyName);
- //匹配操作函数的字段
- String function = strings[2];
- //额外的参数的值
- String param = strings[3];
- functionSet(function, object, appendToKeyName, appendTo, name, param);
- }
- return JsonMapper.toJsonString(object);
- } else {
- return "";
- }
- } catch (RuntimeException e) {
- logger.error("解析补全日志信息过程异常,异常信息:" + e + "\n" + message);
- return "";
- }
- }
-
-
- /**
- * 根据schema描述对应字段进行操作的 函数集合
- *
- * @param function 匹配操作函数的字段
- * @param object 动态POJO Object
- * @param appendToKeyName 需要补全的字段的key
- * @param appendTo 需要补全的字段的值
- * @param name 用到的参数的值
- * @param param 额外的参数的值
- */
- private static void functionSet(String function, Object object, String appendToKeyName, Object appendTo, Object name, String param) {
- switch (function) {
- case "current_timestamp":
- if (!(appendTo instanceof Long)) {
- JsonParseUtil.setValue(object, appendToKeyName, TransFunction.getCurrentTime());
- }
- break;
- case "snowflake_id":
- JsonParseUtil.setValue(object, appendToKeyName, SnowflakeId.generateId());
- break;
- case "geo_ip_detail":
- if (name != null && appendTo == null) {
- JsonParseUtil.setValue(object, appendToKeyName, TransFunction.getGeoIpDetail(name.toString()));
- }
- break;
- case "geo_asn":
- if (name != null && appendTo == null) {
- JsonParseUtil.setValue(object, appendToKeyName, TransFunction.getGeoAsn(name.toString()));
- }
- break;
- case "geo_ip_country":
- if (name != null && appendTo == null) {
- JsonParseUtil.setValue(object, appendToKeyName, TransFunction.getGeoIpCountry(name.toString()));
- }
- break;
- case "set_value":
- if (name != null && param != null) {
- JsonParseUtil.setValue(object, appendToKeyName, TransFunction.setValue(param));
- }
- break;
- case "get_value":
- if (name != null) {
- JsonParseUtil.setValue(object, appendToKeyName, name);
- }
- break;
- case "if":
- if (param != null) {
- JsonParseUtil.setValue(object, appendToKeyName, TransFunction.condition(object, param));
- }
- break;
- case "sub_domain":
- if (appendTo == null && name != null) {
- JsonParseUtil.setValue(object, appendToKeyName, TransFunction.getTopDomain(name.toString()));
- }
- break;
- case "radius_match":
- if (name != null) {
- JsonParseUtil.setValue(object, appendToKeyName, TransFunction.radiusMatch(name.toString()));
- }
- break;
- case "app_match":
- if (name != null && appendTo == null) {
- JsonParseUtil.setValue(object, appendToKeyName, TransFunction.appMatch(name.toString()));
- }
- break;
- case "decode_of_base64":
- if (name != null) {
- JsonParseUtil.setValue(object, appendToKeyName, TransFunction.decodeBase64(name.toString(), TransFunction.isJsonValue(object, param)));
- }
- break;
- case "flattenSpec":
- if (name != null && param != null) {
- JsonParseUtil.setValue(object, appendToKeyName, TransFunction.flattenSpec(name.toString(), param));
- }
- break;
- default:
- }
- }
-
-}
\ No newline at end of file
diff --git a/src/main/java/com/zdjizhi/utils/general/TransFormTypeMap.java b/src/main/java/com/zdjizhi/utils/general/TransFormTypeMap.java
deleted file mode 100644
index 4423f51..0000000
--- a/src/main/java/com/zdjizhi/utils/general/TransFormTypeMap.java
+++ /dev/null
@@ -1,146 +0,0 @@
-package com.zdjizhi.utils.general;
-
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.zdjizhi.common.FlowWriteConfig;
-import com.zdjizhi.utils.JsonMapper;
-import com.zdjizhi.utils.StringUtil;
-import com.zdjizhi.utils.json.JsonParseUtil;
-import com.zdjizhi.utils.json.JsonTypeUtils;
-
-import java.util.ArrayList;
-import java.util.Map;
-
-
-/**
- * 描述:转换或补全工具类
- *
- * @author qidaijie
- */
-public class TransFormTypeMap {
- private static final Log logger = LogFactory.get();
-
- /**
- * 获取任务列表
- * list的每个元素是一个四元字符串数组 (有format标识的字段,补全的字段,用到的功能函数,用到的参数),例如:
- * (mail_subject mail_subject decode_of_base64 mail_subject_charset)
- */
- private static ArrayList jobList = JsonParseUtil.getJobListFromHttp(FlowWriteConfig.SCHEMA_HTTP);
-
- /**
- * 解析日志,并补全
- *
- * @param message kafka Topic原始日志
- * @return 补全后的日志
- */
- @SuppressWarnings("unchecked")
- public static String dealCommonMessage(String message) {
- try {
- if (StringUtil.isNotBlank(message)) {
- Map map = (Map) JsonMapper.fromJsonString(message, Map.class);
- Map jsonMap = JsonTypeUtils.typeTransform(map);
- for (String[] strings : jobList) {
- //用到的参数的值
- Object logValue = JsonParseUtil.getValue(jsonMap, strings[0]);
- //需要补全的字段的key
- String appendToKeyName = strings[1];
- //需要补全的字段的值
- Object appendToKeyValue = JsonParseUtil.getValue(jsonMap, appendToKeyName);
- //匹配操作函数的字段
- String function = strings[2];
- //额外的参数的值
- String param = strings[3];
- functionSet(function, jsonMap, appendToKeyName, appendToKeyValue, logValue, param);
- }
- return JsonMapper.toJsonString(jsonMap);
- } else {
- return "";
- }
- } catch (RuntimeException e) {
- logger.error("解析补全日志信息过程异常,异常信息:" + e + "\n" + message);
- return "";
- }
- }
-
-
- /**
- * 根据schema描述对应字段进行操作的 函数集合
- *
- * @param function 匹配操作函数的字段
- * @param jsonMap 原始日志解析map
- * @param appendToKeyName 需要补全的字段的key
- * @param appendToKeyValue 需要补全的字段的值
- * @param logValue 用到的参数的值
- * @param param 额外的参数的值
- */
- private static void functionSet(String function, Map jsonMap, String appendToKeyName, Object appendToKeyValue, Object logValue, String param) {
- switch (function) {
- case "current_timestamp":
- if (!(appendToKeyValue instanceof Long)) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getCurrentTime());
- }
- break;
- case "snowflake_id":
- JsonParseUtil.setValue(jsonMap, appendToKeyName, SnowflakeId.generateId());
- break;
- case "geo_ip_detail":
- if (logValue != null && appendToKeyValue == null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoIpDetail(logValue.toString()));
- }
- break;
- case "geo_asn":
- if (logValue != null && appendToKeyValue == null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoAsn(logValue.toString()));
- }
- break;
- case "geo_ip_country":
- if (logValue != null && appendToKeyValue == null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoIpCountry(logValue.toString()));
- }
- break;
- case "set_value":
- if (param != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, param);
- }
- break;
- case "get_value":
- if (logValue != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, logValue);
- }
- break;
- case "if":
- if (param != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.condition(jsonMap, param));
- }
- break;
- case "sub_domain":
- if (appendToKeyValue == null && logValue != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getTopDomain(logValue.toString()));
- }
- break;
- case "radius_match":
- if (logValue != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.radiusMatch(logValue.toString()));
- }
- break;
- case "app_match":
- if (logValue != null && appendToKeyValue == null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.appMatch(logValue.toString()));
- }
- break;
- case "decode_of_base64":
- if (logValue != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.decodeBase64(logValue.toString(), TransFunction.isJsonValue(jsonMap, param)));
- }
- break;
- case "flattenSpec":
- if (logValue != null && param != null) {
- JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.flattenSpec(logValue.toString(), param));
- }
- break;
- default:
- }
- }
-
-}
\ No newline at end of file
diff --git a/src/main/java/com/zdjizhi/utils/general/TransFunction.java b/src/main/java/com/zdjizhi/utils/general/TransFunction.java
deleted file mode 100644
index 7dc806e..0000000
--- a/src/main/java/com/zdjizhi/utils/general/TransFunction.java
+++ /dev/null
@@ -1,316 +0,0 @@
-package com.zdjizhi.utils.general;
-
-import cn.hutool.core.codec.Base64;
-import cn.hutool.core.text.StrSpliter;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.jayway.jsonpath.InvalidPathException;
-import com.jayway.jsonpath.JsonPath;
-import com.zdjizhi.common.FlowWriteConfig;
-import com.zdjizhi.utils.FormatUtils;
-import com.zdjizhi.utils.IpLookup;
-import com.zdjizhi.utils.StringUtil;
-import com.zdjizhi.utils.app.AppUtils;
-import com.zdjizhi.utils.hbase.HBaseUtils;
-import com.zdjizhi.utils.json.JsonParseUtil;
-import com.zdjizhi.utils.json.TypeUtils;
-
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * @author qidaijie
- */
-class TransFunction {
-
- private static final Log logger = LogFactory.get();
-
- private static final Pattern PATTERN = Pattern.compile("[0-9]*");
-
- /**
- * IP定位库工具类
- */
- private static IpLookup ipLookup = new IpLookup.Builder(false)
- .loadDataFileV4(FlowWriteConfig.TOOLS_LIBRARY + "ip_v4.mmdb")
- .loadDataFileV6(FlowWriteConfig.TOOLS_LIBRARY + "ip_v6.mmdb")
- .loadDataFilePrivateV4(FlowWriteConfig.TOOLS_LIBRARY + "ip_private_v4.mmdb")
- .loadDataFilePrivateV6(FlowWriteConfig.TOOLS_LIBRARY + "ip_private_v6.mmdb")
- .loadAsnDataFile(FlowWriteConfig.TOOLS_LIBRARY + "asn_v4.mmdb")
- .loadAsnDataFileV6(FlowWriteConfig.TOOLS_LIBRARY + "asn_v6.mmdb")
- .build();
-
- /**
- * 生成当前时间戳的操作
- */
- static long getCurrentTime() {
-
- return System.currentTimeMillis() / 1000;
- }
-
- /**
- * 根据clientIp获取location信息
- *
- * @param ip client IP
- * @return ip地址详细信息
- */
- static String getGeoIpDetail(String ip) {
-
- return ipLookup.cityLookupDetail(ip);
-
- }
-
- /**
- * 根据ip获取asn信息
- *
- * @param ip client/server IP
- * @return ASN
- */
- static String getGeoAsn(String ip) {
-
- return ipLookup.asnLookup(ip);
- }
-
- /**
- * 根据ip获取country信息
- *
- * @param ip server IP
- * @return 国家
- */
- static String getGeoIpCountry(String ip) {
-
- return ipLookup.countryLookup(ip);
- }
-
-
- /**
- * radius借助HBase补齐
- *
- * @param ip client IP
- * @return account
- */
- static String radiusMatch(String ip) {
- String account = HBaseUtils.getAccount(ip.trim());
-// if (StringUtil.isBlank(account)) {
-// logger.warn("HashMap get account is null, Ip is :" + ip);
-// }
- return account;
- }
-
- /**
- * appId与缓存中对应关系补全appName
- *
- * @param appIds app id 列表
- * @return appName
- */
- static String appMatch(String appIds) {
- try {
- String appId = StrSpliter.split(appIds, FlowWriteConfig.FORMAT_SPLITTER, true, true).get(0);
- return AppUtils.getAppName(Integer.parseInt(appId));
- } catch (NumberFormatException | ClassCastException exception) {
- logger.error("APP ID列表分割转换异常,异常APP ID列表:" + appIds);
- return "";
- }
- }
-
- /**
- * 解析顶级域名
- *
- * @param domain 初始域名
- * @return 顶级域名
- */
- static String getTopDomain(String domain) {
- try {
- return FormatUtils.getTopPrivateDomain(domain);
- } catch (StringIndexOutOfBoundsException outException) {
- logger.error("解析顶级域名异常,异常域名:" + domain);
- return "";
- }
- }
-
- /**
- * 根据编码解码base64
- *
- * @param message base64
- * @param charset 编码
- * @return 解码字符串
- */
- static String decodeBase64(String message, Object charset) {
- String result = "";
- try {
- if (StringUtil.isNotBlank(message)) {
- if (charset == null) {
- result = Base64.decodeStr(message, FlowWriteConfig.MAIL_DEFAULT_CHARSET);
- } else {
- result = Base64.decodeStr(message, charset.toString());
- }
- }
- } catch (RuntimeException rune) {
- logger.error("解析 Base64 异常,异常信息:" + rune);
- }
- return result;
- }
-
- /**
- * 根据表达式解析json
- *
- * @param message json
- * @param expr 解析表达式
- * @return 解析结果
- */
- static String flattenSpec(String message, String expr) {
- String flattenResult = "";
- try {
- if (StringUtil.isNotBlank(expr)) {
- ArrayList read = JsonPath.parse(message).read(expr);
- flattenResult = read.get(0);
- }
- } catch (ClassCastException | InvalidPathException e) {
- logger.error("设备标签解析异常,[ " + expr + " ]解析表达式错误" + e);
- }
- return flattenResult;
- }
-
-
- /**
- * 判断是否为日志字段,是则返回对应value,否则返回原始字符串
- *
- * @param object 内存实体类
- * @param param 字段名/普通字符串
- * @return JSON.Value or String
- */
- static Object isJsonValue(Object object, String param) {
- if (param.contains(FlowWriteConfig.IS_JSON_KEY_TAG)) {
- return JsonParseUtil.getValue(object, param.substring(2));
- } else {
- return param;
- }
- }
-
- /**
- * 判断是否为日志字段,是则返回对应value,否则返回原始字符串
- *
- * @param jsonMap 内存实体类
- * @param param 字段名/普通字符串
- * @return JSON.Value or String
- */
- static Object isJsonValue(Map jsonMap, String param) {
- if (param.contains(FlowWriteConfig.IS_JSON_KEY_TAG)) {
- return JsonParseUtil.getValue(jsonMap, param.substring(2));
- } else {
- return param;
- }
- }
-
- /**
- * IF函数实现,解析日志构建三目运算;包含判断是否为数字若为数字则转换为long类型返回结果。
- *
- * @param object 内存实体类
- * @param ifParam 字段名/普通字符串
- * @return resultA or resultB or null
- */
- static Object condition(Object object, String ifParam) {
- Object result = null;
- try {
- String[] split = ifParam.split(FlowWriteConfig.FORMAT_SPLITTER);
- if (split.length == FlowWriteConfig.IF_PARAM_LENGTH) {
- String[] norms = split[0].split(FlowWriteConfig.IF_CONDITION_SPLITTER);
- Object direction = isJsonValue(object, norms[0]);
- Object resultA = isJsonValue(object, split[1]);
- Object resultB = isJsonValue(object, split[2]);
- if (direction instanceof Number) {
-// result = (Integer.parseInt(direction.toString()) == Integer.parseInt(norms[1])) ? resultA : resultB;
- result = TypeUtils.castToIfFunction((Integer.parseInt(direction.toString()) == Integer.parseInt(norms[1])) ? resultA : resultB);
- } else if (direction instanceof String) {
- result = TypeUtils.castToIfFunction(direction.equals(norms[1]) ? resultA : resultB);
-// result = direction.equals(norms[1]) ? resultA : resultB;
- }
- }
- } catch (RuntimeException e) {
- logger.error("IF 函数执行异常,异常信息:" + e);
- }
- return result;
- }
-
- /**
- * IF函数实现,解析日志构建三目运算;包含判断是否为数字若为数字则转换为long类型返回结果。
- *
- * @param jsonMap 内存实体类
- * @param ifParam 字段名/普通字符串
- * @return resultA or resultB or null
- */
- static Object condition(Map jsonMap, String ifParam) {
- Object result = null;
- try {
- String[] split = ifParam.split(FlowWriteConfig.FORMAT_SPLITTER);
- if (split.length == FlowWriteConfig.IF_PARAM_LENGTH) {
- String[] norms = split[0].split(FlowWriteConfig.IF_CONDITION_SPLITTER);
- Object direction = isJsonValue(jsonMap, norms[0]);
- Object resultA = isJsonValue(jsonMap, split[1]);
- Object resultB = isJsonValue(jsonMap, split[2]);
- if (direction instanceof Number) {
- result = (Integer.parseInt(direction.toString()) == Integer.parseInt(norms[1])) ? resultA : resultB;
-// result = TypeUtils.castToIfFunction((Integer.parseInt(direction.toString()) == Integer.parseInt(norms[1])) ? resultA : resultB);
- } else if (direction instanceof String) {
-// result = TypeUtils.castToIfFunction(direction.equals(norms[1]) ? resultA : resultB);
- result = direction.equals(norms[1]) ? resultA : resultB;
- }
- }
- } catch (RuntimeException e) {
- logger.error("IF 函数执行异常,异常信息:" + e);
- }
- return result;
- }
-
-// /**
-// * IF函数实现,解析日志构建三目运算;包含判断是否为数字若为数字则转换为long类型返回结果。
-// *
-// * @param jsonMap 原始日志
-// * @param ifParam 字段名/普通字符串
-// * @return resultA or resultB or null
-// */
-// static Object condition(Map jsonMap, String ifParam) {
-// try {
-// String[] split = ifParam.split(FlowWriteConfig.FORMAT_SPLITTER);
-// String[] norms = split[0].split(FlowWriteConfig.IF_CONDITION_SPLITTER);
-// String direction = isJsonValue(jsonMap, norms[0]);
-// if (StringUtil.isNotBlank(direction)) {
-// if (split.length == FlowWriteConfig.IF_PARAM_LENGTH) {
-// String resultA = isJsonValue(jsonMap, split[1]);
-// String resultB = isJsonValue(jsonMap, split[2]);
-// String result = (Integer.parseInt(direction) == Integer.parseInt(norms[1])) ? resultA : resultB;
-// Matcher isNum = PATTERN.matcher(result);
-// if (isNum.matches()) {
-// return Long.parseLong(result);
-// } else {
-// return result;
-// }
-// }
-// }
-// } catch (RuntimeException e) {
-// logger.error("IF 函数执行异常,异常信息:" + e);
-// }
-// return null;
-// }
-
- /**
- * 设置固定值函数 若为数字则转为long返回
- *
- * @param param 默认值
- * @return 返回数字或字符串
- */
- static Object setValue(String param) {
- try {
- Matcher isNum = PATTERN.matcher(param);
- if (isNum.matches()) {
- return Long.parseLong(param);
- } else {
- return param;
- }
- } catch (RuntimeException e) {
- logger.error("SetValue 函数异常,异常信息:" + e);
- }
- return null;
- }
-}
diff --git a/src/main/java/com/zdjizhi/utils/hbase/HBaseUtils.java b/src/main/java/com/zdjizhi/utils/hbase/HBaseUtils.java
deleted file mode 100644
index 710e4b9..0000000
--- a/src/main/java/com/zdjizhi/utils/hbase/HBaseUtils.java
+++ /dev/null
@@ -1,201 +0,0 @@
-package com.zdjizhi.utils.hbase;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.zdjizhi.common.FlowWriteConfig;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.*;
-import org.apache.hadoop.hbase.util.Bytes;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ScheduledThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-
-/**
- * HBase 工具类
- *
- * @author qidaijie
- */
-
-public class HBaseUtils {
- private static final Log logger = LogFactory.get();
- private static Map subIdMap = new ConcurrentHashMap<>(83334);
- private static Connection connection;
- private static Long time;
-
- private static String zookeeperIp;
- private static String hBaseTable;
-
- private static HBaseUtils hBaseUtils;
-
- private static void getInstance() {
- hBaseUtils = new HBaseUtils();
- }
-
-
- /**
- * 构造函数-新
- */
- private HBaseUtils() {
- zookeeperIp = FlowWriteConfig.HBASE_ZOOKEEPER_SERVERS;
- hBaseTable = FlowWriteConfig.HBASE_TABLE_NAME;
- //获取连接
- getConnection();
- //拉取所有
- getAll();
- //定时更新
- updateCache();
- }
-
- private static void getConnection() {
- try {
- // 管理Hbase的配置信息
- Configuration configuration = HBaseConfiguration.create();
- // 设置zookeeper节点
- configuration.set("hbase.zookeeper.quorum", zookeeperIp);
- configuration.set("hbase.client.retries.number", "3");
- configuration.set("hbase.bulkload.retries.number", "3");
- configuration.set("zookeeper.recovery.retry", "3");
- connection = ConnectionFactory.createConnection(configuration);
- time = System.currentTimeMillis();
- logger.warn("HBaseUtils get HBase connection,now to getAll().");
- } catch (IOException ioe) {
- logger.error("HBaseUtils getHbaseConn() IOException===>{" + ioe + "}<===");
- } catch (RuntimeException e) {
- logger.error("HBaseUtils getHbaseConn() Exception===>{" + e + "}<===");
- }
- }
-
- /**
- * 更新变量
- */
- private static void change() {
- if (hBaseUtils == null) {
- getInstance();
- }
- long nowTime = System.currentTimeMillis();
- timestampsFilter(time - 1000, nowTime + 500);
- }
-
-
- /**
- * 获取变更内容
- *
- * @param startTime 开始时间
- * @param endTime 结束时间
- */
- private static void timestampsFilter(Long startTime, Long endTime) {
- Long begin = System.currentTimeMillis();
- Table table = null;
- ResultScanner scanner = null;
- Scan scan2 = new Scan();
- try {
- table = connection.getTable(TableName.valueOf("sub:" + hBaseTable));
- scan2.setTimeRange(startTime, endTime);
- scanner = table.getScanner(scan2);
- for (Result result : scanner) {
- Cell[] cells = result.rawCells();
- for (Cell cell : cells) {
- String key = Bytes.toString(CellUtil.cloneRow(cell)).trim();
- String value = Bytes.toString(CellUtil.cloneValue(cell)).trim();
- if (subIdMap.containsKey(key)) {
- if (!value.equals(subIdMap.get(key))) {
- subIdMap.put(key, value);
- }
- } else {
- subIdMap.put(key, value);
- }
- }
- }
- Long end = System.currentTimeMillis();
- logger.warn("HBaseUtils Now subIdMap.keySet().size() is: " + subIdMap.keySet().size());
- logger.warn("HBaseUtils Update cache timeConsuming is: " + (end - begin) + ",BeginTime: " + startTime + ",EndTime: " + endTime);
- time = endTime;
- } catch (IOException ioe) {
- logger.error("HBaseUtils timestampsFilter is IOException===>{" + ioe + "}<===");
- } catch (RuntimeException e) {
- logger.error("HBaseUtils timestampsFilter is Exception===>{" + e + "}<===");
- } finally {
- if (scanner != null) {
- scanner.close();
- }
- if (table != null) {
- try {
- table.close();
- } catch (IOException e) {
- logger.error("HBase Table Close ERROR! Exception message is:" + e);
- }
- }
- }
- }
-
- /**
- * 获取所有的 key value
- */
- private static void getAll() {
- long begin = System.currentTimeMillis();
- try {
- Table table = connection.getTable(TableName.valueOf("sub:" + hBaseTable));
- Scan scan2 = new Scan();
- ResultScanner scanner = table.getScanner(scan2);
- for (Result result : scanner) {
- Cell[] cells = result.rawCells();
- for (Cell cell : cells) {
- subIdMap.put(Bytes.toString(CellUtil.cloneRow(cell)), Bytes.toString(CellUtil.cloneValue(cell)));
- }
- }
- logger.warn("HBaseUtils Get fullAmount List size->subIdMap.size(): " + subIdMap.size());
- logger.warn("HBaseUtils Get fullAmount List size->subIdMap.size() timeConsuming is: " + (System.currentTimeMillis() - begin));
- scanner.close();
- } catch (IOException ioe) {
- logger.error("HBaseUtils getAll() is IOException===>{" + ioe + "}<===");
- } catch (RuntimeException e) {
- logger.error("HBaseUtils getAll() is Exception===>{" + e + "}<===");
- }
- }
-
- /**
- * 验证定时器,每隔一段时间验证一次-验证获取新的Cookie
- */
- private void updateCache() {
-// ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1,
-// new BasicThreadFactory.Builder().namingPattern("hbase-change-pool-%d").daemon(true).build());
- ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1);
- executorService.scheduleAtFixedRate(new Runnable() {
- @Override
- public void run() {
- try {
- if (FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS != 0) {
- change();
- }
- } catch (RuntimeException e) {
- logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
- }
- }
- }, 1, FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS, TimeUnit.SECONDS);
- }
-
-
- /**
- * 获取 account
- *
- * @param clientIp client_ip
- * @return account
- */
- public static String getAccount(String clientIp) {
-
- if (hBaseUtils == null) {
- getInstance();
- }
- return subIdMap.get(clientIp);
-
- }
-
-}
diff --git a/src/main/java/com/zdjizhi/utils/http/HttpClientUtil.java b/src/main/java/com/zdjizhi/utils/http/HttpClientUtil.java
deleted file mode 100644
index 1adb1d1..0000000
--- a/src/main/java/com/zdjizhi/utils/http/HttpClientUtil.java
+++ /dev/null
@@ -1,77 +0,0 @@
-package com.zdjizhi.utils.http;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import org.apache.commons.io.IOUtils;
-import org.apache.http.HttpEntity;
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClients;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-
-/**
- * 获取网关schema的工具类
- *
- * @author qidaijie
- */
-public class HttpClientUtil {
- private static final Log logger = LogFactory.get();
-
- /**
- * 请求网关获取schema
- *
- * @param http 网关url
- * @return schema
- */
- public static String requestByGetMethod(String http) {
- CloseableHttpClient httpClient = HttpClients.createDefault();
- StringBuilder entityStringBuilder;
-
- HttpGet get = new HttpGet(http);
- BufferedReader bufferedReader = null;
- CloseableHttpResponse httpResponse = null;
- try {
- httpResponse = httpClient.execute(get);
- HttpEntity entity = httpResponse.getEntity();
- entityStringBuilder = new StringBuilder();
- if (null != entity) {
- bufferedReader = new BufferedReader(new InputStreamReader(httpResponse.getEntity().getContent(), "UTF-8"), 8 * 1024);
- int intC;
- while ((intC = bufferedReader.read()) != -1) {
- char c = (char) intC;
- if (c == '\n') {
- break;
- }
- entityStringBuilder.append(c);
- }
-
- return entityStringBuilder.toString();
- }
- } catch (IOException e) {
- logger.error("Get Schema from Query engine ERROR! Exception message is:" + e);
- } finally {
- if (httpClient != null) {
- try {
- httpClient.close();
- } catch (IOException e) {
- logger.error("Close HTTP Client ERROR! Exception messgae is:" + e);
- }
- }
- if (httpResponse != null) {
- try {
- httpResponse.close();
- } catch (IOException e) {
- logger.error("Close httpResponse ERROR! Exception messgae is:" + e);
- }
- }
- if (bufferedReader != null) {
- IOUtils.closeQuietly(bufferedReader);
- }
- }
- return "";
- }
-}
diff --git a/src/main/java/com/zdjizhi/utils/json/JsonParseUtil.java b/src/main/java/com/zdjizhi/utils/json/JsonParseUtil.java
deleted file mode 100644
index bdcc43d..0000000
--- a/src/main/java/com/zdjizhi/utils/json/JsonParseUtil.java
+++ /dev/null
@@ -1,283 +0,0 @@
-package com.zdjizhi.utils.json;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.alibaba.fastjson.JSON;
-import com.alibaba.fastjson.JSONArray;
-import com.alibaba.fastjson.JSONObject;
-import com.jayway.jsonpath.JsonPath;
-import com.zdjizhi.common.FlowWriteConfig;
-import com.zdjizhi.utils.StringUtil;
-import com.zdjizhi.utils.http.HttpClientUtil;
-import net.sf.cglib.beans.BeanGenerator;
-import net.sf.cglib.beans.BeanMap;
-
-import java.util.*;
-
-/**
- * 使用FastJson解析json的工具类
- *
- * @author qidaijie
- */
-public class JsonParseUtil {
-
- private static final Log logger = LogFactory.get();
-
- private static ArrayList dropList = new ArrayList<>();
-
- /**
- * 模式匹配,给定一个类型字符串返回一个类类型
- *
- * @param type 类型
- * @return 类类型
- */
-
- private static Class getClassName(String type) {
- Class clazz;
-
- switch (type) {
- case "int":
- clazz = Integer.class;
- break;
- case "string":
- clazz = String.class;
- break;
- case "long":
- clazz = long.class;
- break;
- case "array":
- clazz = List.class;
- break;
- case "double":
- clazz = double.class;
- break;
- case "float":
- clazz = float.class;
- break;
- case "char":
- clazz = char.class;
- break;
- case "byte":
- clazz = byte.class;
- break;
- case "boolean":
- clazz = boolean.class;
- break;
- case "short":
- clazz = short.class;
- break;
- default:
- clazz = String.class;
- }
- return clazz;
- }
-
- /**
- * 获取属性值的方法
- *
- * @param obj 对象
- * @param property key
- * @return 属性的值
- */
- public static Object getValue(Object obj, String property) {
- try {
- BeanMap beanMap = BeanMap.create(obj);
- return beanMap.get(property);
- } catch (RuntimeException e) {
- logger.error("获取json-value异常,异常key:" + property + "异常信息为:" + e);
- return null;
- }
- }
-
- /**
- * 获取属性值的方法
- *
- * @param jsonMap 原始日志
- * @param property key
- * @return 属性的值
- */
- public static Object getValue(Map jsonMap, String property) {
- try {
- return jsonMap.getOrDefault(property, null);
- } catch (RuntimeException e) {
- logger.error("获取json-value异常,异常key:" + property + "异常信息为:" + e);
- return null;
- }
- }
-
- /**
- * 更新属性值的方法
- *
- * @param jsonMap 原始日志json map
- * @param property 更新的key
- * @param value 更新的值
- */
- public static void setValue(Map jsonMap, String property, Object value) {
- try {
- jsonMap.put(property, value);
- } catch (RuntimeException e) {
- logger.error("赋予实体类错误类型数据", e);
- }
- }
-
- /**
- * 更新属性值的方法
- *
- * @param obj 对象
- * @param property 更新的key
- * @param value 更新的值
- */
- public static void setValue(Object obj, String property, Object value) {
- try {
- BeanMap beanMap = BeanMap.create(obj);
- beanMap.put(property, value);
- } catch (ClassCastException e) {
- logger.error("赋予实体类错误类型数据", e);
- }
- }
-
- /**
- * 根据反射生成对象的方法
- *
- * @param properties 反射类用的map
- * @return 生成的Object类型的对象
- */
- public static Object generateObject(Map properties) {
- BeanGenerator generator = new BeanGenerator();
- Set keySet = properties.keySet();
- for (Object aKeySet : keySet) {
- String key = (String) aKeySet;
- generator.addProperty(key, (Class) properties.get(key));
- }
- return generator.create();
- }
-
- /**
- * 通过获取String类型的网关schema链接来获取map,用于生成一个Object类型的对象
- *
- * @param http 网关schema地址
- * @return 用于反射生成schema类型的对象的一个map集合
- */
- public static HashMap getMapFromHttp(String http) {
- HashMap map = new HashMap<>(16);
-
- String schema = HttpClientUtil.requestByGetMethod(http);
- Object data = JSON.parseObject(schema).get("data");
-
- //获取fields,并转化为数组,数组的每个元素都是一个name doc type
- JSONObject schemaJson = JSON.parseObject(data.toString());
- JSONArray fields = (JSONArray) schemaJson.get("fields");
-
- for (Object field : fields) {
- String filedStr = field.toString();
- if (checkKeepField(filedStr)) {
- String name = JsonPath.read(filedStr, "$.name").toString();
- String type = JsonPath.read(filedStr, "$.type").toString();
- if (type.contains("{")) {
- type = JsonPath.read(filedStr, "$.type.type").toString();
- }
- //组合用来生成实体类的map
- map.put(name, getClassName(type));
- } else {
- dropList.add(filedStr);
- }
- }
- return map;
- }
-
- /**
- * 判断字段是否需要保留
- *
- * @param message 单个field-json
- * @return true or false
- */
- private static boolean checkKeepField(String message) {
- boolean isKeepField = true;
- boolean isHiveDoc = JSON.parseObject(message).containsKey("doc");
- if (isHiveDoc) {
- boolean isHiveVi = JsonPath.read(message, "$.doc").toString().contains("visibility");
- if (isHiveVi) {
- String visibility = JsonPath.read(message, "$.doc.visibility").toString();
- if (FlowWriteConfig.VISIBILITY.equals(visibility)) {
- isKeepField = false;
- }
- }
- }
- return isKeepField;
- }
-
- static void dropJsonField(Map jsonMap) {
- for (String field : dropList) {
- jsonMap.remove(field);
- }
- }
-
- /**
- * 根据http链接获取schema,解析之后返回一个任务列表 (useList toList funcList paramlist)
- *
- * @param http 网关url
- * @return 任务列表
- */
- public static ArrayList getJobListFromHttp(String http) {
- ArrayList list = new ArrayList<>();
-
- String schema = HttpClientUtil.requestByGetMethod(http);
- //解析data
- Object data = JSON.parseObject(schema).get("data");
-
- //获取fields,并转化为数组,数组的每个元素都是一个name doc type
- JSONObject schemaJson = JSON.parseObject(data.toString());
- JSONArray fields = (JSONArray) schemaJson.get("fields");
-
- for (Object field : fields) {
-
- if (JSON.parseObject(field.toString()).containsKey("doc")) {
- Object doc = JSON.parseObject(field.toString()).get("doc");
-
- if (JSON.parseObject(doc.toString()).containsKey("format")) {
- String name = JSON.parseObject(field.toString()).get("name").toString();
- Object format = JSON.parseObject(doc.toString()).get("format");
- JSONObject formatObject = JSON.parseObject(format.toString());
-
- String functions = formatObject.get("functions").toString();
- String appendTo = null;
- String params = null;
-
- if (formatObject.containsKey("appendTo")) {
- appendTo = formatObject.get("appendTo").toString();
- }
-
- if (formatObject.containsKey("param")) {
- params = formatObject.get("param").toString();
- }
-
-
- if (StringUtil.isNotBlank(appendTo) && StringUtil.isBlank(params)) {
- String[] functionArray = functions.split(FlowWriteConfig.FORMAT_SPLITTER);
- String[] appendToArray = appendTo.split(FlowWriteConfig.FORMAT_SPLITTER);
-
- for (int i = 0; i < functionArray.length; i++) {
- list.add(new String[]{name, appendToArray[i], functionArray[i], null});
- }
-
- } else if (StringUtil.isNotBlank(appendTo) && StringUtil.isNotBlank(params)) {
- String[] functionArray = functions.split(FlowWriteConfig.FORMAT_SPLITTER);
- String[] appendToArray = appendTo.split(FlowWriteConfig.FORMAT_SPLITTER);
- String[] paramArray = params.split(FlowWriteConfig.FORMAT_SPLITTER);
-
- for (int i = 0; i < functionArray.length; i++) {
- list.add(new String[]{name, appendToArray[i], functionArray[i], paramArray[i]});
-
- }
- } else {
- list.add(new String[]{name, name, functions, params});
- }
-
- }
- }
-
- }
- return list;
- }
-
-}
\ No newline at end of file
diff --git a/src/main/java/com/zdjizhi/utils/json/JsonTypeUtils.java b/src/main/java/com/zdjizhi/utils/json/JsonTypeUtils.java
deleted file mode 100644
index 0b6bc1e..0000000
--- a/src/main/java/com/zdjizhi/utils/json/JsonTypeUtils.java
+++ /dev/null
@@ -1,187 +0,0 @@
-package com.zdjizhi.utils.json;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.zdjizhi.common.FlowWriteConfig;
-import com.zdjizhi.utils.JsonMapper;
-import com.zdjizhi.utils.exception.FlowWriteException;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @author qidaijie
- * @Package PACKAGE_NAME
- * @Description:
- * @date 2021/7/1217:34
- */
-public class JsonTypeUtils {
- private static final Log logger = LogFactory.get();
- /**
- * 在内存中加载反射类用的map
- */
- private static HashMap map = JsonParseUtil.getMapFromHttp(FlowWriteConfig.SCHEMA_HTTP);
-
- /**
- * 类型转换
- *
- * @param jsonMap 原始日志map
- */
- public static Map typeTransform(Map jsonMap) throws RuntimeException {
- JsonParseUtil.dropJsonField(jsonMap);
- HashMap tmpMap = new HashMap<>(192);
- for (String key : jsonMap.keySet()) {
- if (map.containsKey(key)) {
- String simpleName = map.get(key).getSimpleName();
- switch (simpleName) {
- case "String":
- tmpMap.put(key, checkString(jsonMap.get(key)));
- break;
- case "Integer":
- tmpMap.put(key, getIntValue(jsonMap.get(key)));
- break;
- case "long":
- tmpMap.put(key, checkLongValue(jsonMap.get(key)));
- break;
- case "List":
- tmpMap.put(key, checkArray(jsonMap.get(key)));
- break;
- case "Map":
- tmpMap.put(key, checkObject(jsonMap.get(key)));
- break;
- case "double":
- tmpMap.put(key, checkDouble(jsonMap.get(key)));
- break;
- default:
- tmpMap.put(key, checkString(jsonMap.get(key)));
- }
- }
- }
- return tmpMap;
- }
-
- /**
- * String 类型检验转换方法
- *
- * @param value json value
- * @return String value
- */
- private static String checkString(Object value) {
- if (value == null) {
- return null;
- }
-
- if (value instanceof Map){
- return JsonMapper.toJsonString(value);
- }
-
- if (value instanceof List){
- return JsonMapper.toJsonString(value);
- }
-
- return value.toString();
- }
-
- /**
- * array 类型检验转换方法
- *
- * @param value json value
- * @return List value
- */
- private static Map checkObject(Object value) {
- if (value == null) {
- return null;
- }
-
- if (value instanceof Map) {
- return (Map) value;
- }
-
- throw new FlowWriteException("can not cast to map, value : " + value);
- }
-
- /**
- * array 类型检验转换方法
- *
- * @param value json value
- * @return List value
- */
- private static List checkArray(Object value) {
- if (value == null) {
- return null;
- }
-
- if (value instanceof List) {
- return (List) value;
- }
-
- throw new FlowWriteException("can not cast to List, value : " + value);
- }
-
- private static Long checkLong(Object value) {
- if (value == null) {
- return null;
- }
-
- return TypeUtils.castToLong(value);
- }
-
- /**
- * long 类型检验转换方法,若为空返回基础值
- *
- * @param value json value
- * @return Long value
- */
- private static long checkLongValue(Object value) {
- Long longVal = TypeUtils.castToLong(value);
- if (longVal == null) {
- return 0L;
- }
-
-// return longVal.longValue();
- return longVal;
- }
-
- /**
- * Double 类型校验转换方法
- *
- * @param value json value
- * @return Double value
- */
- private static Double checkDouble(Object value) {
- if (value == null) {
- return null;
- }
-
- return TypeUtils.castToDouble(value);
- }
-
-
- private static Integer checkInt(Object value) {
- if (value == null) {
- return null;
- }
-
- return TypeUtils.castToInt(value);
- }
-
-
- /**
- * int 类型检验转换方法,若为空返回基础值
- *
- * @param value json value
- * @return int value
- */
- private static int getIntValue(Object value) {
-
- Integer intVal = TypeUtils.castToInt(value);
- if (intVal == null) {
- return 0;
- }
-
-// return intVal.intValue();
- return intVal;
- }
-
-}
diff --git a/src/main/java/com/zdjizhi/utils/json/TypeUtils.java b/src/main/java/com/zdjizhi/utils/json/TypeUtils.java
deleted file mode 100644
index b13627f..0000000
--- a/src/main/java/com/zdjizhi/utils/json/TypeUtils.java
+++ /dev/null
@@ -1,171 +0,0 @@
-package com.zdjizhi.utils.json;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.zdjizhi.common.FlowWriteConfig;
-import com.zdjizhi.utils.StringUtil;
-import com.zdjizhi.utils.exception.FlowWriteException;
-
-/**
- * @author qidaijie
- * @Package PACKAGE_NAME
- * @Description:
- * @date 2021/7/1218:20
- */
-public class TypeUtils {
- private static final Log logger = LogFactory.get();
-
- /**
- * Integer 类型判断方法
- *
- * @param value json value
- * @return Integer value or null
- */
- public static Object castToIfFunction(Object value) {
- if (value == null) {
- return null;
- }
-
- if (value instanceof String) {
- return value.toString();
- }
-
- if (value instanceof Integer) {
- return ((Number) value).intValue();
- }
-
- if (value instanceof Long) {
- return ((Number) value).longValue();
- }
-
-// if (value instanceof Map) {
-// return (Map) value;
-// }
-//
-// if (value instanceof List) {
-// return Collections.singletonList(value.toString());
-// }
-
- if (value instanceof Boolean) {
- return (Boolean) value ? 1 : 0;
- }
-
- throw new FlowWriteException("can not cast to int, value : " + value);
- }
-
- /**
- * Integer 类型判断方法
- *
- * @param value json value
- * @return Integer value or null
- */
- static Integer castToInt(Object value) {
- if (value == null) {
- return null;
- }
-
- if (value instanceof Integer) {
- return (Integer) value;
- }
-
- if (value instanceof Number) {
- return ((Number) value).intValue();
- }
-
- if (value instanceof String) {
- String strVal = (String) value;
- if (StringUtil.isBlank(strVal)) {
- return null;
- }
-
- //将 10,20 类数据转换为10
- if (strVal.contains(FlowWriteConfig.FORMAT_SPLITTER)) {
- strVal = strVal.split(FlowWriteConfig.FORMAT_SPLITTER)[0];
- }
-
- try {
- return Integer.parseInt(strVal);
- } catch (NumberFormatException ex) {
- logger.error("String change Integer Error,The error Str is:" + strVal);
- }
- }
-
- if (value instanceof Boolean) {
- return (Boolean) value ? 1 : 0;
- }
-
- throw new FlowWriteException("can not cast to int, value : " + value);
- }
-
- /**
- * Double类型判断方法
- *
- * @param value json value
- * @return double value or null
- */
- static Double castToDouble(Object value) {
-
- if (value instanceof Number) {
- return ((Number) value).doubleValue();
- }
-
- if (value instanceof String) {
- String strVal = (String) value;
-
- if (StringUtil.isBlank(strVal)) {
- return null;
- }
-
- //将 10,20 类数据转换为10
- if (strVal.contains(FlowWriteConfig.FORMAT_SPLITTER)) {
- strVal = strVal.split(FlowWriteConfig.FORMAT_SPLITTER)[0];
- }
-
- try {
- return Double.parseDouble(strVal);
- } catch (NumberFormatException ex) {
- logger.error("String change Double Error,The error Str is:" + strVal);
- }
- }
-
- throw new FlowWriteException("can not cast to double, value : " + value);
- }
-
- /**
- * Long类型判断方法
- *
- * @param value json value
- * @return (Long)value or null
- */
- static Long castToLong(Object value) {
- if (value == null) {
- return null;
- }
-
- if (value instanceof Number) {
- return ((Number) value).longValue();
- }
-
- if (value instanceof String) {
- String strVal = (String) value;
-
- if (StringUtil.isBlank(strVal)) {
- return null;
- }
-
- //将 10,20 类数据转换为10
- if (strVal.contains(FlowWriteConfig.FORMAT_SPLITTER)) {
- strVal = strVal.split(FlowWriteConfig.FORMAT_SPLITTER)[0];
- }
-
- try {
- return Long.parseLong(strVal);
- } catch (NumberFormatException ex) {
- logger.error("String change Long Error,The error Str is:" + strVal);
- }
- }
-
- throw new FlowWriteException("can not cast to long, value : " + value);
- }
-
-}
diff --git a/src/main/java/com/zdjizhi/utils/kafka/CertUtils.java b/src/main/java/com/zdjizhi/utils/kafka/CertUtils.java
deleted file mode 100644
index b09eedb..0000000
--- a/src/main/java/com/zdjizhi/utils/kafka/CertUtils.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package com.zdjizhi.utils.kafka;
-
-import com.zdjizhi.common.FlowWriteConfig;
-import org.apache.kafka.common.config.SslConfigs;
-
-import java.util.Properties;
-
-/**
- * @author qidaijie
- * @Package com.zdjizhi.utils.kafka
- * @Description:
- * @date 2021/9/610:37
- */
-class CertUtils {
- static void chooseCert(String type, Properties properties) {
- switch (type) {
- case "SSL":
- properties.put("security.protocol", "SSL");
- properties.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
- properties.put("ssl.keystore.location", FlowWriteConfig.TOOLS_LIBRARY + "keystore.jks");
- properties.put("ssl.keystore.password", FlowWriteConfig.KAFKA_PIN);
- properties.put("ssl.truststore.location", FlowWriteConfig.TOOLS_LIBRARY + "truststore.jks");
- properties.put("ssl.truststore.password", FlowWriteConfig.KAFKA_PIN);
- properties.put("ssl.key.password", FlowWriteConfig.KAFKA_PIN);
- break;
- case "SASL":
- properties.put("security.protocol", "SASL_PLAINTEXT");
- properties.put("sasl.mechanism", "PLAIN");
- properties.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username="
- + FlowWriteConfig.KAFKA_USER + " password=" + FlowWriteConfig.KAFKA_PIN + ";");
- break;
- default:
- }
-
- }
-}
diff --git a/src/main/java/com/zdjizhi/utils/kafka/Consumer.java b/src/main/java/com/zdjizhi/utils/kafka/Consumer.java
deleted file mode 100644
index 1036fe9..0000000
--- a/src/main/java/com/zdjizhi/utils/kafka/Consumer.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package com.zdjizhi.utils.kafka;
-
-import com.zdjizhi.common.FlowWriteConfig;
-import org.apache.flink.api.common.serialization.SimpleStringSchema;
-import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
-import org.apache.kafka.clients.consumer.ConsumerConfig;
-import org.apache.kafka.common.config.SslConfigs;
-
-
-import java.util.Properties;
-
-/**
- * @author qidaijie
- * @Package com.zdjizhi.utils.kafka
- * @Description:
- * @date 2021/6/813:54
- */
-public class Consumer {
- private static Properties createConsumerConfig() {
- Properties properties = new Properties();
- properties.put("bootstrap.servers", FlowWriteConfig.INPUT_KAFKA_SERVERS);
- properties.put("group.id", FlowWriteConfig.GROUP_ID);
- properties.put("session.timeout.ms", "60000");
- properties.put("max.poll.records", "3000");
- properties.put("max.partition.fetch.bytes", "31457280");
- properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
- properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
-
- CertUtils.chooseCert(FlowWriteConfig.KAFKA_SOURCE_PROTOCOL,properties);
-
- return properties;
- }
-
- public static FlinkKafkaConsumer getKafkaConsumer() {
- FlinkKafkaConsumer kafkaConsumer = new FlinkKafkaConsumer<>(FlowWriteConfig.INPUT_KAFKA_TOPIC,
- new SimpleStringSchema(), createConsumerConfig());
-
- kafkaConsumer.setCommitOffsetsOnCheckpoints(false);
- kafkaConsumer.setStartFromGroupOffsets();
-
- return kafkaConsumer;
- }
-}
diff --git a/src/main/java/com/zdjizhi/utils/kafka/Producer.java b/src/main/java/com/zdjizhi/utils/kafka/Producer.java
deleted file mode 100644
index 58b3e7a..0000000
--- a/src/main/java/com/zdjizhi/utils/kafka/Producer.java
+++ /dev/null
@@ -1,79 +0,0 @@
-package com.zdjizhi.utils.kafka;
-
-import com.zdjizhi.common.FlowWriteConfig;
-import org.apache.flink.api.common.serialization.SimpleStringSchema;
-import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
-import org.apache.kafka.clients.producer.ProducerConfig;
-import org.apache.kafka.common.config.SslConfigs;
-
-import java.util.Optional;
-import java.util.Properties;
-
-/**
- * @author qidaijie
- * @Package com.zdjizhi.utils.kafka
- * @Description:
- * @date 2021/6/814:04
- */
-public class Producer {
-
- private static Properties createPercentProducerConfig() {
- Properties properties = new Properties();
- properties.put("bootstrap.servers", FlowWriteConfig.OUTPUT_KAFKA_SERVERS);
- properties.put("acks", FlowWriteConfig.PRODUCER_ACK);
- properties.put("retries", FlowWriteConfig.RETRIES);
- properties.put("linger.ms", FlowWriteConfig.LINGER_MS);
- properties.put("request.timeout.ms", FlowWriteConfig.REQUEST_TIMEOUT_MS);
- properties.put("batch.size", FlowWriteConfig.BATCH_SIZE);
- properties.put("buffer.memory", FlowWriteConfig.BUFFER_MEMORY);
- properties.put("max.request.size", FlowWriteConfig.MAX_REQUEST_SIZE);
- properties.put("compression.type", FlowWriteConfig.PRODUCER_KAFKA_COMPRESSION_TYPE);
-
- CertUtils.chooseCert(FlowWriteConfig.KAFKA_SINK_PROTOCOL, properties);
-
- return properties;
- }
- private static Properties createProducerConfig() {
- Properties properties = new Properties();
- properties.put("bootstrap.servers", FlowWriteConfig.INPUT_KAFKA_SERVERS);
- properties.put("acks", FlowWriteConfig.PRODUCER_ACK);
- properties.put("retries", FlowWriteConfig.RETRIES);
- properties.put("linger.ms", FlowWriteConfig.LINGER_MS);
- properties.put("request.timeout.ms", FlowWriteConfig.REQUEST_TIMEOUT_MS);
- properties.put("batch.size", FlowWriteConfig.BATCH_SIZE);
- properties.put("buffer.memory", FlowWriteConfig.BUFFER_MEMORY);
- properties.put("max.request.size", FlowWriteConfig.MAX_REQUEST_SIZE);
- properties.put("compression.type", FlowWriteConfig.PRODUCER_KAFKA_COMPRESSION_TYPE);
-
- CertUtils.chooseCert(FlowWriteConfig.KAFKA_SOURCE_PROTOCOL,properties);
-
- return properties;
- }
-
- public static FlinkKafkaProducer getPercentKafkaProducer() {
- FlinkKafkaProducer kafkaProducer = new FlinkKafkaProducer(
- FlowWriteConfig.PERCENT_KAFKA_TOPIC,
- new SimpleStringSchema(),
- createPercentProducerConfig(), Optional.empty());
-
- kafkaProducer.setLogFailuresOnly(false);
-
-// kafkaProducer.setWriteTimestampToKafka(true);
-
- return kafkaProducer;
- }
-
-
- public static FlinkKafkaProducer getKafkaProducer() {
- FlinkKafkaProducer kafkaProducer = new FlinkKafkaProducer(
- FlowWriteConfig.OUTPUT_KAFKA_TOPIC,
- new SimpleStringSchema(),
- createProducerConfig(), Optional.empty());
-
- kafkaProducer.setLogFailuresOnly(false);
-
-// kafkaProducer.setWriteTimestampToKafka(true);
-
- return kafkaProducer;
- }
-}
diff --git a/src/main/java/com/zdjizhi/utils/ordinary/MD5Utils.java b/src/main/java/com/zdjizhi/utils/ordinary/MD5Utils.java
deleted file mode 100644
index aa55951..0000000
--- a/src/main/java/com/zdjizhi/utils/ordinary/MD5Utils.java
+++ /dev/null
@@ -1,64 +0,0 @@
-package com.zdjizhi.utils.ordinary;
-
-import org.apache.log4j.Logger;
-
-import java.security.MessageDigest;
-
-/**
- * 描述:转换MD5工具类
- *
- * @author Administrator
- * @create 2018-08-13 15:11
- */
-public class MD5Utils {
- private static Logger logger = Logger.getLogger(MD5Utils.class);
-
- public static String md5Encode(String msg) throws Exception {
- try {
- byte[] msgBytes = msg.getBytes("utf-8");
- /*
- * 声明使用Md5算法,获得MessaDigest对象
- */
- MessageDigest md5 = MessageDigest.getInstance("MD5");
- /*
- * 使用指定的字节更新摘要
- */
- md5.update(msgBytes);
- /*
- * 完成哈希计算,获得密文
- */
- byte[] digest = md5.digest();
- /*
- * 以上两行代码等同于
- * byte[] digest = md5.digest(msgBytes);
- */
- return byteArr2hexString(digest);
- } catch (Exception e) {
- logger.error("Error in conversion MD5! " + msg);
-// e.printStackTrace();
- return "";
- }
- }
-
- /**
- * 将byte数组转化为16进制字符串形式
- *
- * @param bys 字节数组
- * @return 字符串
- */
- public static String byteArr2hexString(byte[] bys) {
- StringBuffer hexVal = new StringBuffer();
- int val = 0;
- for (byte by : bys) {
- //将byte转化为int 如果byte是一个负数就必须要和16进制的0xff做一次与运算
- val = ((int) by) & 0xff;
- if (val < 16) {
- hexVal.append("0");
- }
- hexVal.append(Integer.toHexString(val));
- }
-
- return hexVal.toString();
-
- }
-}
diff --git a/src/main/java/com/zdjizhi/utils/system/FlowWriteConfigurations.java b/src/main/java/com/zdjizhi/utils/system/FlowWriteConfigurations.java
deleted file mode 100644
index 08fa29b..0000000
--- a/src/main/java/com/zdjizhi/utils/system/FlowWriteConfigurations.java
+++ /dev/null
@@ -1,70 +0,0 @@
-package com.zdjizhi.utils.system;
-
-import com.zdjizhi.utils.StringUtil;
-
-import java.io.IOException;
-import java.util.Locale;
-import java.util.Properties;
-
-
-/**
- * @author Administrator
- */
-
-public final class FlowWriteConfigurations {
-
- private static Properties propKafka = new Properties();
- private static Properties propService = new Properties();
-
-
- public static String getStringProperty(Integer type, String key) {
- if (type == 0) {
- return propService.getProperty(key);
- } else if (type == 1) {
- return propKafka.getProperty(key);
- } else {
- return null;
- }
-
- }
-
- public static Integer getIntProperty(Integer type, String key) {
- if (type == 0) {
- return Integer.parseInt(propService.getProperty(key));
- } else if (type == 1) {
- return Integer.parseInt(propKafka.getProperty(key));
- } else {
- return null;
- }
- }
-
- public static Long getLongProperty(Integer type, String key) {
- if (type == 0) {
- return Long.parseLong(propService.getProperty(key));
- } else if (type == 1) {
- return Long.parseLong(propKafka.getProperty(key));
- } else {
- return null;
- }
- }
-
- public static Boolean getBooleanProperty(Integer type, String key) {
- if (type == 0) {
- return StringUtil.equals(propService.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
- } else if (type == 1) {
- return StringUtil.equals(propKafka.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
- } else {
- return null;
- }
- }
-
- static {
- try {
- propService.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("service_flow_config.properties"));
- propKafka.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("default_config.properties"));
- } catch (IOException | RuntimeException e) {
- propKafka = null;
- propService = null;
- }
- }
-}
diff --git a/src/main/java/com/zdjizhi/utils/zookeeper/DistributedLock.java b/src/main/java/com/zdjizhi/utils/zookeeper/DistributedLock.java
deleted file mode 100644
index 2afab03..0000000
--- a/src/main/java/com/zdjizhi/utils/zookeeper/DistributedLock.java
+++ /dev/null
@@ -1,190 +0,0 @@
-package com.zdjizhi.utils.zookeeper;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import org.apache.zookeeper.*;
-import org.apache.zookeeper.data.Stat;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.locks.Condition;
-import java.util.concurrent.locks.Lock;
-
-/**
- * @author qidaijie
- */
-public class DistributedLock implements Lock, Watcher {
- private static final Log logger = LogFactory.get();
-
- private ZooKeeper zk = null;
- /**
- * 根节点
- */
- private final String ROOT_LOCK = "/locks";
- /**
- * 竞争的资源
- */
- private String lockName;
- /**
- * 等待的前一个锁
- */
- private String waitLock;
- /**
- * 当前锁
- */
- private String currentLock;
- /**
- * 计数器
- */
- private CountDownLatch countDownLatch;
-
- private int sessionTimeout = 2000;
-
- private List exceptionList = new ArrayList();
-
- /**
- * 配置分布式锁
- *
- * @param config 连接的url
- * @param lockName 竞争资源
- */
- public DistributedLock(String config, String lockName) {
- this.lockName = lockName;
- try {
- // 连接zookeeper
- zk = new ZooKeeper(config, sessionTimeout, this);
- Stat stat = zk.exists(ROOT_LOCK, false);
- if (stat == null) {
- // 如果根节点不存在,则创建根节点
- zk.create(ROOT_LOCK, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
- }
- } catch (IOException | InterruptedException | KeeperException e) {
- logger.error("Node already exists!");
- }
- }
-
- // 节点监视器
- @Override
- public void process(WatchedEvent event) {
- if (this.countDownLatch != null) {
- this.countDownLatch.countDown();
- }
- }
-
- @Override
- public void lock() {
- if (exceptionList.size() > 0) {
- throw new LockException(exceptionList.get(0));
- }
- try {
- if (this.tryLock()) {
- logger.info(Thread.currentThread().getName() + " " + lockName + "获得了锁");
- } else {
- // 等待锁
- waitForLock(waitLock, sessionTimeout);
- }
- } catch (InterruptedException | KeeperException e) {
- logger.error("获取锁异常" + e);
- }
- }
-
- @Override
- public boolean tryLock() {
- try {
- String splitStr = "_lock_";
- if (lockName.contains(splitStr)) {
- throw new LockException("锁名有误");
- }
- // 创建临时有序节点
- currentLock = zk.create(ROOT_LOCK + "/" + lockName + splitStr, new byte[0],
- ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL);
- // 取所有子节点
- List subNodes = zk.getChildren(ROOT_LOCK, false);
- // 取出所有lockName的锁
- List lockObjects = new ArrayList();
- for (String node : subNodes) {
- String tmpNode = node.split(splitStr)[0];
- if (tmpNode.equals(lockName)) {
- lockObjects.add(node);
- }
- }
- Collections.sort(lockObjects);
- // 若当前节点为最小节点,则获取锁成功
- if (currentLock.equals(ROOT_LOCK + "/" + lockObjects.get(0))) {
- return true;
- }
- // 若不是最小节点,则找到自己的前一个节点
- String prevNode = currentLock.substring(currentLock.lastIndexOf("/") + 1);
- waitLock = lockObjects.get(Collections.binarySearch(lockObjects, prevNode) - 1);
- } catch (InterruptedException | KeeperException e) {
- logger.error("获取锁过程异常" + e);
- }
- return false;
- }
-
-
- @Override
- public boolean tryLock(long timeout, TimeUnit unit) {
- try {
- if (this.tryLock()) {
- return true;
- }
- return waitForLock(waitLock, timeout);
- } catch (KeeperException | InterruptedException | RuntimeException e) {
- logger.error("判断是否锁定异常" + e);
- }
- return false;
- }
-
- // 等待锁
- private boolean waitForLock(String prev, long waitTime) throws KeeperException, InterruptedException {
- Stat stat = zk.exists(ROOT_LOCK + "/" + prev, true);
-
- if (stat != null) {
- this.countDownLatch = new CountDownLatch(1);
- // 计数等待,若等到前一个节点消失,则precess中进行countDown,停止等待,获取锁
- this.countDownLatch.await(waitTime, TimeUnit.MILLISECONDS);
- this.countDownLatch = null;
- }
- return true;
- }
-
- @Override
- public void unlock() {
- try {
- zk.delete(currentLock, -1);
- currentLock = null;
- zk.close();
- } catch (InterruptedException | KeeperException e) {
- logger.error("关闭锁异常" + e);
- }
- }
-
- @Override
- public Condition newCondition() {
- return null;
- }
-
- @Override
- public void lockInterruptibly() throws InterruptedException {
- this.lock();
- }
-
-
- public class LockException extends RuntimeException {
- private static final long serialVersionUID = 1L;
-
- public LockException(String e) {
- super(e);
- }
-
- public LockException(Exception e) {
- super(e);
- }
- }
-
-}
\ No newline at end of file
diff --git a/src/main/java/com/zdjizhi/utils/zookeeper/ZookeeperUtils.java b/src/main/java/com/zdjizhi/utils/zookeeper/ZookeeperUtils.java
deleted file mode 100644
index 9efbd46..0000000
--- a/src/main/java/com/zdjizhi/utils/zookeeper/ZookeeperUtils.java
+++ /dev/null
@@ -1,140 +0,0 @@
-package com.zdjizhi.utils.zookeeper;
-
-import cn.hutool.core.util.StrUtil;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import org.apache.zookeeper.*;
-import org.apache.zookeeper.data.ACL;
-import org.apache.zookeeper.data.Stat;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.concurrent.CountDownLatch;
-
-/**
- * @author qidaijie
- * @Package cn.ac.iie.utils.zookeeper
- * @Description:
- * @date 2020/11/1411:28
- */
-public class ZookeeperUtils implements Watcher {
- private static final Log logger = LogFactory.get();
- private static final int ID_MAX = 255;
-
- private ZooKeeper zookeeper;
-
- private static final int SESSION_TIME_OUT = 20000;
-
- private CountDownLatch countDownLatch = new CountDownLatch(1);
-
- @Override
- public void process(WatchedEvent event) {
- if (event.getState() == Event.KeeperState.SyncConnected) {
- countDownLatch.countDown();
- }
- }
-
-
- /**
- * 修改节点信息
- *
- * @param path 节点路径
- */
- public int modifyNode(String path, String zookeeperIp) {
- createNode(path, "0".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, zookeeperIp);
- int workerId = 0;
- try {
- connectZookeeper(zookeeperIp);
- Stat stat = zookeeper.exists(path, true);
- workerId = Integer.parseInt(getNodeDate(path));
- if (workerId > ID_MAX) {
- workerId = 0;
- zookeeper.setData(path, "1".getBytes(), stat.getVersion());
- } else {
- String result = String.valueOf(workerId + 1);
- if (stat != null) {
- zookeeper.setData(path, result.getBytes(), stat.getVersion());
- } else {
- logger.error("Node does not exist!,Can't modify");
- }
- }
- } catch (KeeperException | InterruptedException e) {
- logger.error("modify error Can't modify," + e);
- } finally {
- closeConn();
- }
- logger.warn("workerID is:" + workerId);
- return workerId;
- }
-
- /**
- * 连接zookeeper
- *
- * @param host 地址
- */
- public void connectZookeeper(String host) {
- try {
- zookeeper = new ZooKeeper(host, SESSION_TIME_OUT, this);
- countDownLatch.await();
- } catch (IOException | InterruptedException e) {
- logger.error("Connection to the Zookeeper Exception! message:" + e);
- }
- }
-
- /**
- * 关闭连接
- */
- public void closeConn() {
- try {
- if (zookeeper != null) {
- zookeeper.close();
- }
- } catch (InterruptedException e) {
- logger.error("Close the Zookeeper connection Exception! message:" + e);
- }
- }
-
- /**
- * 获取节点内容
- *
- * @param path 节点路径
- * @return 内容/异常null
- */
- public String getNodeDate(String path) {
- String result = null;
- Stat stat = new Stat();
- try {
- byte[] resByte = zookeeper.getData(path, true, stat);
-
- result = StrUtil.str(resByte, "UTF-8");
- } catch (KeeperException | InterruptedException e) {
- logger.error("Get node information exception" + e);
- }
- return result;
- }
-
- /**
- * @param path 节点创建的路径
- * @param date 节点所存储的数据的byte[]
- * @param acls 控制权限策略
- */
- public void createNode(String path, byte[] date, List acls, String zookeeperIp) {
- try {
- connectZookeeper(zookeeperIp);
- Stat exists = zookeeper.exists(path, true);
- if (exists == null) {
- Stat existsSnowflakeld = zookeeper.exists("/Snowflake", true);
- if (existsSnowflakeld == null) {
- zookeeper.create("/Snowflake", null, acls, CreateMode.PERSISTENT);
- }
- zookeeper.create(path, date, acls, CreateMode.PERSISTENT);
- } else {
- logger.warn("Node already exists ! Don't need to create");
- }
- } catch (KeeperException | InterruptedException e) {
- logger.error(e);
- } finally {
- closeConn();
- }
- }
-}
diff --git a/src/main/log4j.properties b/src/main/log4j.properties
deleted file mode 100644
index 9d91936..0000000
--- a/src/main/log4j.properties
+++ /dev/null
@@ -1,25 +0,0 @@
-#Log4j
-log4j.rootLogger=info,console,file
-# 控制台日志设置
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.Threshold=info
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
-
-# 文件日志设置
-log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.file.Threshold=info
-log4j.appender.file.encoding=UTF-8
-log4j.appender.file.Append=true
-#路径请用相对路径,做好相关测试输出到应用目下
-log4j.appender.file.file=${nis.root}/log/galaxy-name.log
-log4j.appender.file.DatePattern='.'yyyy-MM-dd
-log4j.appender.file.layout=org.apache.log4j.PatternLayout
-#log4j.appender.file.layout.ConversionPattern=%d{HH:mm:ss} %X{ip} [%t] %5p %c{1} %m%n
-log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] %X{ip} [Thread\:%t] %l %x - %m%n
-#MyBatis 配置,com.nis.web.dao是mybatis接口所在包
-log4j.logger.com.nis.web.dao=debug
-#bonecp数据源配置
-log4j.category.com.jolbox=debug,console
-
-
diff --git a/src/main/logback.xml b/src/main/logback.xml
deleted file mode 100644
index a508b6b..0000000
--- a/src/main/logback.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- ${LOG_PATTERN}
-
-
-
-
-
-
- ${LOG_FILE_PATH}
-
- 30
-
-
- 20MB
-
-
-
-
- ${LOG_PATTERN}
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/src/test/java/com/zdjizhi/KafkaTest.java b/src/test/java/com/zdjizhi/KafkaTest.java
deleted file mode 100644
index 4b034a3..0000000
--- a/src/test/java/com/zdjizhi/KafkaTest.java
+++ /dev/null
@@ -1,55 +0,0 @@
-package com.zdjizhi;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import org.apache.kafka.clients.producer.*;
-import org.apache.kafka.common.config.SslConfigs;
-
-import java.util.Properties;
-
-/**
- * @author qidaijie
- * @Package com.zdjizhi
- * @Description:
- * @date 2021/8/217:39
- */
-public class KafkaTest {
- private static final Log logger = LogFactory.get();
-
- public static void main(String[] args) {
- Properties properties = new Properties();
- properties.put("bootstrap.servers", "192.168.44.12:9091");
- properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
- properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
- properties.put("acks", "1");
-// properties.put("retries", DefaultProConfig.RETRIES);
-// properties.put("linger.ms", DefaultProConfig.LINGER_MS);
-// properties.put("request.timeout.ms", DefaultProConfig.REQUEST_TIMEOUT_MS);
-// properties.put("batch.size", DefaultProConfig.BATCH_SIZE);
-// properties.put("buffer.memory", DefaultProConfig.BUFFER_MEMORY);
-// properties.put("max.request.size", DefaultProConfig.MAX_REQUEST_SIZE);
-
- properties.put("security.protocol", "SSL");
-// properties.put("ssl.keystore.location", "D:\\K18-Phase2\\tsgSpace\\dat\\kafka\\client.keystore.jks");
- properties.put("ssl.keystore.location", "D:\\K18-Phase2\\tsgSpace\\dat\\tsg\\keystore.jks");
- properties.put("ssl.keystore.password", "galaxy2019");
-// properties.put("ssl.truststore.location", "D:\\K18-Phase2\\tsgSpace\\dat\\kafka\\client.truststore.jks");
- properties.put("ssl.truststore.location", "D:\\K18-Phase2\\tsgSpace\\dat\\tsg\\truststore.jks");
- properties.put("ssl.truststore.password", "galaxy2019");
- properties.put("ssl.key.password", "galaxy2019");
- properties.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
-
- Producer producer = new KafkaProducer(properties);
-
- producer.send(new ProducerRecord<>("test", "hello!"), new Callback() {
- @Override
- public void onCompletion(RecordMetadata metadata, Exception exception) {
- if (exception != null) {
- logger.error("写入test出现异常", exception);
- }
- }
- });
-
- producer.close();
- }
-}
diff --git a/src/test/java/com/zdjizhi/LocationTest.java b/src/test/java/com/zdjizhi/LocationTest.java
deleted file mode 100644
index e7b2d15..0000000
--- a/src/test/java/com/zdjizhi/LocationTest.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package com.zdjizhi;
-
-import com.zdjizhi.common.FlowWriteConfig;
-import com.zdjizhi.utils.IpLookup;
-import org.junit.Test;
-
-/**
- * @author qidaijie
- * @Package com.zdjizhi
- * @Description:
- * @date 2021/8/1811:34
- */
-public class LocationTest {
- private static IpLookup ipLookup = new IpLookup.Builder(false)
- .loadDataFileV4("D:\\K18-Phase2\\tsgSpace\\dat\\tsg\\ip_v4.mmdb")
- .loadDataFileV6("D:\\K18-Phase2\\tsgSpace\\dat\\tsg\\ip_v6.mmdb")
- .loadDataFilePrivateV4("D:\\K18-Phase2\\tsgSpace\\dat\\tsg\\ip_private_v4.mmdb")
- .loadDataFilePrivateV6("D:\\K18-Phase2\\tsgSpace\\dat\\tsg\\ip_private_v6.mmdb")
- .build();
-
- @Test
- public void IpLocationTest() {
- System.out.println(ipLookup.cityLookupDetail("24.241.112.0"));
- System.out.println(ipLookup.cityLookupDetail("1.1.1.1"));
- System.out.println(ipLookup.cityLookupDetail("192.168.50.58"));
- System.out.println(ipLookup.cityLookupDetail("2600:1700:9010::"));
- }
-}
diff --git a/src/test/java/com/zdjizhi/TestTime.java b/src/test/java/com/zdjizhi/TestTime.java
deleted file mode 100644
index 0f026c3..0000000
--- a/src/test/java/com/zdjizhi/TestTime.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package com.zdjizhi;
-
-import com.zdjizhi.utils.StringUtil;
-
-public class TestTime {
- public static void main(String[] args) {
- String s = null;
- String rrr = rrr(s);
- System.out.println(rrr);
- }
-
- public static String rrr(String url){
- if (StringUtil.isBlank(url)) {
- return "这是空的";
- }else {
- return "这不是空的";
- }
- }
-}
diff --git a/target/classes/com/zdjizhi/bean/FileMeta.class b/target/classes/com/zdjizhi/bean/FileMeta.class
deleted file mode 100644
index a392040..0000000
Binary files a/target/classes/com/zdjizhi/bean/FileMeta.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/bean/SourceList.class b/target/classes/com/zdjizhi/bean/SourceList.class
deleted file mode 100644
index ceb238c..0000000
Binary files a/target/classes/com/zdjizhi/bean/SourceList.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/common/FlowWriteConfig.class b/target/classes/com/zdjizhi/common/FlowWriteConfig.class
deleted file mode 100644
index eb00296..0000000
Binary files a/target/classes/com/zdjizhi/common/FlowWriteConfig.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/topology/LogFlowWriteTopology.class b/target/classes/com/zdjizhi/topology/LogFlowWriteTopology.class
deleted file mode 100644
index d330b63..0000000
Binary files a/target/classes/com/zdjizhi/topology/LogFlowWriteTopology.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/app/AppUtils$1.class b/target/classes/com/zdjizhi/utils/app/AppUtils$1.class
deleted file mode 100644
index c2518be..0000000
Binary files a/target/classes/com/zdjizhi/utils/app/AppUtils$1.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/app/AppUtils.class b/target/classes/com/zdjizhi/utils/app/AppUtils.class
deleted file mode 100644
index 3c6329f..0000000
Binary files a/target/classes/com/zdjizhi/utils/app/AppUtils.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/exception/FlowWriteException.class b/target/classes/com/zdjizhi/utils/exception/FlowWriteException.class
deleted file mode 100644
index d921793..0000000
Binary files a/target/classes/com/zdjizhi/utils/exception/FlowWriteException.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/functions/DealFileProcessFunction$1.class b/target/classes/com/zdjizhi/utils/functions/DealFileProcessFunction$1.class
deleted file mode 100644
index bcc63fa..0000000
Binary files a/target/classes/com/zdjizhi/utils/functions/DealFileProcessFunction$1.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/functions/DealFileProcessFunction.class b/target/classes/com/zdjizhi/utils/functions/DealFileProcessFunction.class
deleted file mode 100644
index e3fa790..0000000
Binary files a/target/classes/com/zdjizhi/utils/functions/DealFileProcessFunction.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/functions/FilterNullFunction.class b/target/classes/com/zdjizhi/utils/functions/FilterNullFunction.class
deleted file mode 100644
index 24e4d91..0000000
Binary files a/target/classes/com/zdjizhi/utils/functions/FilterNullFunction.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/functions/MapCompletedFunction.class b/target/classes/com/zdjizhi/utils/functions/MapCompletedFunction.class
deleted file mode 100644
index 3380c97..0000000
Binary files a/target/classes/com/zdjizhi/utils/functions/MapCompletedFunction.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/general/FileEdit.class b/target/classes/com/zdjizhi/utils/general/FileEdit.class
deleted file mode 100644
index 56f046b..0000000
Binary files a/target/classes/com/zdjizhi/utils/general/FileEdit.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/general/SnowflakeId.class b/target/classes/com/zdjizhi/utils/general/SnowflakeId.class
deleted file mode 100644
index 6f66064..0000000
Binary files a/target/classes/com/zdjizhi/utils/general/SnowflakeId.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/general/TransFormMap.class b/target/classes/com/zdjizhi/utils/general/TransFormMap.class
deleted file mode 100644
index be05a4f..0000000
Binary files a/target/classes/com/zdjizhi/utils/general/TransFormMap.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/general/TransFormObject.class b/target/classes/com/zdjizhi/utils/general/TransFormObject.class
deleted file mode 100644
index dd96b16..0000000
Binary files a/target/classes/com/zdjizhi/utils/general/TransFormObject.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/general/TransFormTypeMap.class b/target/classes/com/zdjizhi/utils/general/TransFormTypeMap.class
deleted file mode 100644
index 3c0989a..0000000
Binary files a/target/classes/com/zdjizhi/utils/general/TransFormTypeMap.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/general/TransFunction.class b/target/classes/com/zdjizhi/utils/general/TransFunction.class
deleted file mode 100644
index cfa0b6e..0000000
Binary files a/target/classes/com/zdjizhi/utils/general/TransFunction.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/hbase/HBaseUtils$1.class b/target/classes/com/zdjizhi/utils/hbase/HBaseUtils$1.class
deleted file mode 100644
index 314f309..0000000
Binary files a/target/classes/com/zdjizhi/utils/hbase/HBaseUtils$1.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/hbase/HBaseUtils.class b/target/classes/com/zdjizhi/utils/hbase/HBaseUtils.class
deleted file mode 100644
index 87afd87..0000000
Binary files a/target/classes/com/zdjizhi/utils/hbase/HBaseUtils.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/http/HttpClientUtil.class b/target/classes/com/zdjizhi/utils/http/HttpClientUtil.class
deleted file mode 100644
index 903c4f8..0000000
Binary files a/target/classes/com/zdjizhi/utils/http/HttpClientUtil.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/json/JsonParseUtil.class b/target/classes/com/zdjizhi/utils/json/JsonParseUtil.class
deleted file mode 100644
index 7254b7a..0000000
Binary files a/target/classes/com/zdjizhi/utils/json/JsonParseUtil.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/json/JsonTypeUtils.class b/target/classes/com/zdjizhi/utils/json/JsonTypeUtils.class
deleted file mode 100644
index a543fe3..0000000
Binary files a/target/classes/com/zdjizhi/utils/json/JsonTypeUtils.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/json/TypeUtils.class b/target/classes/com/zdjizhi/utils/json/TypeUtils.class
deleted file mode 100644
index a5059fd..0000000
Binary files a/target/classes/com/zdjizhi/utils/json/TypeUtils.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/kafka/CertUtils.class b/target/classes/com/zdjizhi/utils/kafka/CertUtils.class
deleted file mode 100644
index 112eb6f..0000000
Binary files a/target/classes/com/zdjizhi/utils/kafka/CertUtils.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/kafka/Consumer.class b/target/classes/com/zdjizhi/utils/kafka/Consumer.class
deleted file mode 100644
index e43f50b..0000000
Binary files a/target/classes/com/zdjizhi/utils/kafka/Consumer.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/kafka/Producer.class b/target/classes/com/zdjizhi/utils/kafka/Producer.class
deleted file mode 100644
index fc282e3..0000000
Binary files a/target/classes/com/zdjizhi/utils/kafka/Producer.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/ordinary/MD5Utils.class b/target/classes/com/zdjizhi/utils/ordinary/MD5Utils.class
deleted file mode 100644
index 939b5d0..0000000
Binary files a/target/classes/com/zdjizhi/utils/ordinary/MD5Utils.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/system/FlowWriteConfigurations.class b/target/classes/com/zdjizhi/utils/system/FlowWriteConfigurations.class
deleted file mode 100644
index bdd4ab6..0000000
Binary files a/target/classes/com/zdjizhi/utils/system/FlowWriteConfigurations.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/zookeeper/DistributedLock$LockException.class b/target/classes/com/zdjizhi/utils/zookeeper/DistributedLock$LockException.class
deleted file mode 100644
index c9e6e98..0000000
Binary files a/target/classes/com/zdjizhi/utils/zookeeper/DistributedLock$LockException.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/zookeeper/DistributedLock.class b/target/classes/com/zdjizhi/utils/zookeeper/DistributedLock.class
deleted file mode 100644
index 06610f2..0000000
Binary files a/target/classes/com/zdjizhi/utils/zookeeper/DistributedLock.class and /dev/null differ
diff --git a/target/classes/com/zdjizhi/utils/zookeeper/ZookeeperUtils.class b/target/classes/com/zdjizhi/utils/zookeeper/ZookeeperUtils.class
deleted file mode 100644
index aadc30e..0000000
Binary files a/target/classes/com/zdjizhi/utils/zookeeper/ZookeeperUtils.class and /dev/null differ
diff --git a/target/classes/default_config.properties b/target/classes/default_config.properties
deleted file mode 100644
index 94c70a5..0000000
--- a/target/classes/default_config.properties
+++ /dev/null
@@ -1,50 +0,0 @@
-#producerԵĴ
-retries=0
-
-#ĺ˵һBatch֮ãBatchûд뷢ͳȥ
-linger.ms=10
-
-#ڳʱ֮ǰδյӦͻ˽ڱҪʱ·
-request.timeout.ms=30000
-
-#producerǰbatchз͵,δСĬ:16384
-batch.size=262144
-
-#ProducerڻϢĻС
-#64M
-#buffer.memory=67108864
-#128M
-buffer.memory=134217728
-
-#ÿηKafkaС,Ĭ1048576
-#5M
-#max.request.size=5242880
-#10M
-max.request.size=10485760
-
-#kafka SASL֤û
-kafka.user=admin
-
-#kafka SASLSSL֤
-kafka.pin=galaxy2019
-
-#kafka source connection timeout
-session.timeout.ms=60000
-
-#kafka source poll
-max.poll.records=3000
-
-#kafka source poll bytes
-max.partition.fetch.bytes=31457280
-
-#hbase table name
-hbase.table.name=subscriber_info
-
-#ʼĬϱ
-mail.default.charset=UTF-8
-
-#kafka source protocol; SSL or SASL
-kafka.source.protocol=SASL
-
-#kafka sink protocol; SSL or SASL
-kafka.sink.protocol=
\ No newline at end of file
diff --git a/target/classes/service_flow_config.properties b/target/classes/service_flow_config.properties
deleted file mode 100644
index bb7a28b..0000000
--- a/target/classes/service_flow_config.properties
+++ /dev/null
@@ -1,72 +0,0 @@
-#--------------------------------地址配置------------------------------#
-
-#管理kafka地址
-input.kafka.servers=10.3.60.3:9094
-
-#管理输出kafka地址
-output.kafka.servers=10.3.45.126:6667,10.3.45.127:6667,10.3.45.128:6667
-
-#zookeeper 地址 用于配置log_id
-zookeeper.servers=10.3.60.3:2181
-
-#hbase zookeeper地址 用于连接HBase
-hbase.zookeeper.servers=10.3.60.3:2181
-
-#oos地址
-oos.servers=10.3.45.124:8057
-
-#--------------------------------HTTP/定位库------------------------------#
-#定位库地址
-#tools.library=/opt/dat/
-tools.library=D:/dingweiku/dat/
-
-#网关的schema位置
-schema.http=http://10.3.60.3:9999/metadata/schema/v1/fields/proxy_event
-
-#网关APP_ID 获取接口
-app.id.http=http://10.3.60.3:9999/open-api/appDicList
-
-#--------------------------------Kafka消费组信息------------------------------#
-
-#kafka 接收数据topic
-#input.kafka.topic=SESSION-RECORD
-input.kafka.topic=PROXY-EVENT
-
-#文件源数据topic
-output.kafka.topic=TRAFFIC-FILE-METADATA
-
-percent.kafka.topic=PROXY-EVENT
-
-#读取topic,存储该spout id的消费offset信息,可通过该拓扑命名;具体存储offset的位置,确定下次读取不重复的数据;
-group.id=session-record-log-20211018-A
-
-#生产者压缩模式 none or snappy
-producer.kafka.compression.type=none
-
-#生产者ack
-producer.ack=1
-
-#--------------------------------topology配置------------------------------#
-
-#consumer 并行度
-consumer.parallelism=1
-
-#转换函数并行度
-transform.parallelism=1
-
-#数据中心,取值范围(0-63)
-data.center.id.num=0
-
-#hbase 更新时间,如填写0则不更新缓存
-hbase.tick.tuple.freq.secs=180
-
-#app_id 更新时间,如填写0则不更新缓存
-app.tick.tuple.freq.secs=0
-
-#--------------------------------默认值配置------------------------------#
-
-#邮件默认编码
-mail.default.charset=UTF-8
-
-#0不需要补全原样输出日志,1需要补全
-log.need.complete=1
diff --git a/target/test-classes/com/zdjizhi/KafkaTest$1.class b/target/test-classes/com/zdjizhi/KafkaTest$1.class
deleted file mode 100644
index 94894f7..0000000
Binary files a/target/test-classes/com/zdjizhi/KafkaTest$1.class and /dev/null differ
diff --git a/target/test-classes/com/zdjizhi/KafkaTest.class b/target/test-classes/com/zdjizhi/KafkaTest.class
deleted file mode 100644
index 00972d6..0000000
Binary files a/target/test-classes/com/zdjizhi/KafkaTest.class and /dev/null differ
diff --git a/target/test-classes/com/zdjizhi/LocationTest.class b/target/test-classes/com/zdjizhi/LocationTest.class
deleted file mode 100644
index 8478cf7..0000000
Binary files a/target/test-classes/com/zdjizhi/LocationTest.class and /dev/null differ
diff --git a/target/test-classes/com/zdjizhi/TestTime.class b/target/test-classes/com/zdjizhi/TestTime.class
deleted file mode 100644
index 918dd86..0000000
Binary files a/target/test-classes/com/zdjizhi/TestTime.class and /dev/null differ