1.适配TSG 23.07及以上功能,添加数据传输统计指标,并输出至pushgateway。(GAL-409)
2.原URL参数domain从http_domain字段取值,更新为从common_server_domain字段取值。(GAL-410)
This commit is contained in:
50
pom.xml
50
pom.xml
@@ -5,10 +5,10 @@
|
|||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
<groupId>com.zdjizhi</groupId>
|
<groupId>com.zdjizhi</groupId>
|
||||||
<artifactId>log-stream-doublewrite</artifactId>
|
<artifactId>log-completion-schema</artifactId>
|
||||||
<version>22.04-v3</version>
|
<version>230907</version>
|
||||||
|
|
||||||
<name>log-stream-doublewrite</name>
|
<name>log-completion-schema</name>
|
||||||
<url>http://www.example.com</url>
|
<url>http://www.example.com</url>
|
||||||
|
|
||||||
<repositories>
|
<repositories>
|
||||||
@@ -22,10 +22,10 @@
|
|||||||
<id>maven-ali</id>
|
<id>maven-ali</id>
|
||||||
<url>http://maven.aliyun.com/nexus/content/groups/public/</url>
|
<url>http://maven.aliyun.com/nexus/content/groups/public/</url>
|
||||||
<releases>
|
<releases>
|
||||||
<!--<enabled>true</enabled>-->
|
<enabled>true</enabled>
|
||||||
</releases>
|
</releases>
|
||||||
<snapshots>
|
<snapshots>
|
||||||
<!--<enabled>true</enabled>-->
|
<enabled>true</enabled>
|
||||||
<checksumPolicy>fail</checksumPolicy>
|
<checksumPolicy>fail</checksumPolicy>
|
||||||
</snapshots>
|
</snapshots>
|
||||||
</repository>
|
</repository>
|
||||||
@@ -33,13 +33,17 @@
|
|||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
|
<galaxy.tools.version>1.2</galaxy.tools.version>
|
||||||
|
<zookeeper.version>3.4.10</zookeeper.version>
|
||||||
<flink.version>1.13.1</flink.version>
|
<flink.version>1.13.1</flink.version>
|
||||||
<hadoop.version>2.7.1</hadoop.version>
|
<hadoop.version>2.7.1</hadoop.version>
|
||||||
<kafka.version>1.0.0</kafka.version>
|
<kafka.version>1.0.0</kafka.version>
|
||||||
<hbase.version>2.2.3</hbase.version>
|
<hbase.version>2.2.3</hbase.version>
|
||||||
<nacos.version>1.2.0</nacos.version>
|
<nacos.version>1.2.0</nacos.version>
|
||||||
<zdjz.tools.version>1.0.8</zdjz.tools.version>
|
<fastjson.version>2.0.40</fastjson.version>
|
||||||
|
<hutool.version>5.7.17</hutool.version>
|
||||||
<scope.type>provided</scope.type>
|
<scope.type>provided</scope.type>
|
||||||
|
<!--<scope.type>compile</scope.type>-->
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
@@ -114,25 +118,15 @@
|
|||||||
<dependencies>
|
<dependencies>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.zdjizhi</groupId>
|
<groupId>com.geedgenetworks</groupId>
|
||||||
<artifactId>galaxy</artifactId>
|
<artifactId>galaxy</artifactId>
|
||||||
<version>${zdjz.tools.version}</version>
|
<version>${galaxy.tools.version}</version>
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<artifactId>slf4j-log4j12</artifactId>
|
|
||||||
<groupId>org.slf4j</groupId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<artifactId>log4j-over-slf4j</artifactId>
|
|
||||||
<groupId>org.slf4j</groupId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.alibaba</groupId>
|
<groupId>com.alibaba</groupId>
|
||||||
<artifactId>fastjson</artifactId>
|
<artifactId>fastjson</artifactId>
|
||||||
<version>1.2.70</version>
|
<version>${fastjson.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-core -->
|
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-core -->
|
||||||
@@ -143,7 +137,6 @@
|
|||||||
<scope>${scope.type}</scope>
|
<scope>${scope.type}</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
|
||||||
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-streaming-java -->
|
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-streaming-java -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.flink</groupId>
|
<groupId>org.apache.flink</groupId>
|
||||||
@@ -180,7 +173,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.zookeeper</groupId>
|
<groupId>org.apache.zookeeper</groupId>
|
||||||
<artifactId>zookeeper</artifactId>
|
<artifactId>zookeeper</artifactId>
|
||||||
<version>3.4.10</version>
|
<version>${zookeeper.version}</version>
|
||||||
<exclusions>
|
<exclusions>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<artifactId>slf4j-log4j12</artifactId>
|
<artifactId>slf4j-log4j12</artifactId>
|
||||||
@@ -263,16 +256,10 @@
|
|||||||
<version>4.5.2</version>
|
<version>4.5.2</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.jayway.jsonpath</groupId>
|
|
||||||
<artifactId>json-path</artifactId>
|
|
||||||
<version>2.4.0</version>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>cn.hutool</groupId>
|
<groupId>cn.hutool</groupId>
|
||||||
<artifactId>hutool-all</artifactId>
|
<artifactId>hutool-all</artifactId>
|
||||||
<version>5.7.17</version>
|
<version>${hutool.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
@@ -296,6 +283,13 @@
|
|||||||
<version>1.9.3</version>
|
<version>1.9.3</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.prometheus</groupId>
|
||||||
|
<artifactId>simpleclient_pushgateway</artifactId>
|
||||||
|
<version>0.9.0</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</project>
|
</project>
|
||||||
|
|
||||||
|
|||||||
@@ -18,48 +18,86 @@ linger.ms=10
|
|||||||
request.timeout.ms=30000
|
request.timeout.ms=30000
|
||||||
|
|
||||||
#producer都是按照batch进行发送的,批次大小,默认:16384
|
#producer都是按照batch进行发送的,批次大小,默认:16384
|
||||||
batch.size=262144
|
batch.size=1048576
|
||||||
|
|
||||||
#Producer端用于缓存消息的缓冲区大小
|
#Producer端用于缓存消息的缓冲区大小
|
||||||
#128M
|
#128M
|
||||||
buffer.memory=134217728
|
buffer.memory=134217728
|
||||||
|
|
||||||
#这个参数决定了每次发送给Kafka服务器请求的最大大小,默认1048576
|
#这个参数决定了每次发送给Kafka服务器请求的最大大小
|
||||||
#10M
|
#default: 10485760 = 10M
|
||||||
max.request.size=10485760
|
max.request.size=10485760
|
||||||
|
|
||||||
|
#producer ack
|
||||||
|
producer.ack=1
|
||||||
#====================kafka default====================#
|
#====================kafka default====================#
|
||||||
#kafka SASL验证用户名-加密
|
#kafka SASL/SSL username (encryption)
|
||||||
kafka.user=nsyGpHKGFA4KW0zro9MDdw==
|
kafka.user=nsyGpHKGFA4KW0zro9MDdw==
|
||||||
|
|
||||||
#kafka SASL及SSL验证密码-加密
|
#kafka SASL/SSL pin (encryption)
|
||||||
kafka.pin=6MleDyA3Z73HSaXiKsDJ2k7Ys8YWLhEJ
|
kafka.pin=6MleDyA3Z73HSaXiKsDJ2k7Ys8YWLhEJ
|
||||||
|
|
||||||
#生产者ack
|
|
||||||
producer.ack=1
|
|
||||||
#====================nacos default====================#
|
#====================nacos default====================#
|
||||||
#nacos username
|
#nacos username (encryption)
|
||||||
nacos.username=nacos
|
nacos.username=kANxu/Zi5rBnZVxa5zAjrQ==
|
||||||
|
|
||||||
#nacos password
|
#nacos pin (encryption)
|
||||||
nacos.pin=nacos
|
nacos.pin=YPIBDIXjJUtVBjjk2op0Dg==
|
||||||
|
|
||||||
|
#nacos connection timeout default 60s
|
||||||
|
nacos.connection.timeout=60000
|
||||||
|
|
||||||
|
#nacos table schema use group
|
||||||
|
nacos.schema.group=Galaxy
|
||||||
|
|
||||||
|
#nacos public use group
|
||||||
|
nacos.public.group=DEFAULT_GROUP
|
||||||
|
|
||||||
|
#public namespace名称
|
||||||
|
nacos.public.namespace=
|
||||||
|
|
||||||
|
#knowledgebase data id名称
|
||||||
|
nacos.knowledgebase.data.id=knowledge_base.json
|
||||||
|
|
||||||
|
#================= HTTP 配置 ====================#
|
||||||
|
#max connection
|
||||||
|
http.pool.max.connection=20
|
||||||
|
|
||||||
|
#one route max connection
|
||||||
|
http.pool.max.per.route=4
|
||||||
|
|
||||||
|
#connect timeout(ms)
|
||||||
|
http.pool.connect.timeout=30000
|
||||||
|
|
||||||
|
#request timeout(ms)
|
||||||
|
http.pool.request.timeout=90000
|
||||||
|
|
||||||
|
#response timeout(ms)
|
||||||
|
http.socket.timeout=90000
|
||||||
|
|
||||||
|
#====================HBase Default conf====================#
|
||||||
|
hbase.rpc.timeout=60000
|
||||||
|
|
||||||
#nacos group
|
|
||||||
nacos.group=Galaxy
|
|
||||||
#====================Topology Default====================#
|
#====================Topology Default====================#
|
||||||
#hbase table name
|
#hbase radius relation table name
|
||||||
hbase.table.name=tsg_galaxy:relation_framedip_account
|
hbase.radius.table.name=tsg_galaxy:relation_framedip_account
|
||||||
|
|
||||||
#邮件默认编码
|
#hbase gtpc relation table name
|
||||||
mail.default.charset=UTF-8
|
hbase.gtpc.table.name=tsg_galaxy:relation_user_teid
|
||||||
|
|
||||||
#0不做任何校验,1弱类型校验
|
#0 no-operation parse JSON directly.
|
||||||
|
#1 Check fields type with schema,Do some type conversion.
|
||||||
log.transform.type=1
|
log.transform.type=1
|
||||||
|
|
||||||
#两个输出之间的最大时间(单位milliseconds)
|
#Maximum time between two outputs(milliseconds)
|
||||||
buffer.timeout=5000
|
buffer.timeout=-1
|
||||||
#====================临时配置-待删除====================#
|
|
||||||
#网关APP_ID 获取接口
|
|
||||||
app.id.http=http://192.168.44.67:9999/open-api/appDicList
|
|
||||||
|
|
||||||
#app_id 更新时间,如填写0则不更新缓存
|
#The gtpc data scan max rows,0 = no limit.
|
||||||
app.tick.tuple.freq.secs=0
|
hbase.gtpc.scan.max.rows=100000
|
||||||
|
|
||||||
|
#The radius data scan max rows,0 = no limit.
|
||||||
|
hbase.radius.scan.max.rows=100000
|
||||||
|
|
||||||
|
#Whether vsys_id is used as the relationship key between gtpc and radius.
|
||||||
|
#vsys or global
|
||||||
|
data.relationship.model=vsys
|
||||||
@@ -4,64 +4,50 @@ source.kafka.servers=192.168.44.12:9094
|
|||||||
|
|
||||||
#百分点输出kafka地址
|
#百分点输出kafka地址
|
||||||
percent.sink.kafka.servers=192.168.44.12:9094
|
percent.sink.kafka.servers=192.168.44.12:9094
|
||||||
|
|
||||||
#文件源数据topic输出kafka地址
|
#文件源数据topic输出kafka地址
|
||||||
file.data.sink.kafka.servers=192.168.44.12:9094
|
sink.file.data.kafka.servers=192.168.44.12:9094
|
||||||
|
#用于分配log_id、连接hbase的zookeeper地址
|
||||||
#zookeeper 地址 用于配置log_id
|
|
||||||
zookeeper.servers=192.168.44.12:2181
|
zookeeper.servers=192.168.44.12:2181
|
||||||
|
|
||||||
#hbase zookeeper地址 用于连接HBase
|
#hdfs地址用于获取定位库
|
||||||
hbase.zookeeper.servers=192.168.44.12:2181
|
hdfs.servers=192.168.40.151:9000,192.168.40.152:9000
|
||||||
|
|
||||||
#--------------------------------HTTP/定位库------------------------------#
|
#--------------------------------HTTP/定位库------------------------------#
|
||||||
#定位库地址
|
#工具库地址,存放秘钥文件等。
|
||||||
tools.library=C:\\workspace\\dat\\
|
tools.library=D:\\workerspace\\dat\\
|
||||||
|
|
||||||
#--------------------------------nacos配置------------------------------#
|
#--------------------------------nacos配置------------------------------#
|
||||||
#nacos 地址
|
#nacos 地址
|
||||||
nacos.server=192.168.44.12:8848
|
nacos.server=192.168.44.67:8848
|
||||||
|
|
||||||
#nacos namespace
|
#schema namespace名称
|
||||||
nacos.schema.namespace=P19
|
nacos.schema.namespace=f507879a-8b1b-4330-913e-83d4fcdc14bb
|
||||||
|
|
||||||
#nacos topology_common_config.properties namespace
|
#schema data id名称
|
||||||
nacos.common.namespace=P19
|
nacos.schema.data.id=session_record.json
|
||||||
|
|
||||||
#nacos data id
|
|
||||||
nacos.data.id=security_event.json
|
|
||||||
|
|
||||||
#------------------------------------OOS配置------------------------------------#
|
|
||||||
#oos地址
|
|
||||||
oos.servers=10.3.45.100:8057
|
|
||||||
|
|
||||||
#--------------------------------Kafka消费/生产配置------------------------------#
|
#--------------------------------Kafka消费/生产配置------------------------------#
|
||||||
|
|
||||||
#kafka 接收数据topic
|
#kafka 接收数据topic
|
||||||
source.kafka.topic=test
|
source.kafka.topic=test
|
||||||
|
|
||||||
#百分点对应的topic
|
|
||||||
percent.kafka.topic=PERCENT-RECORD
|
|
||||||
|
|
||||||
#文件源数据topic
|
|
||||||
file.data.kafka.topic=test-file-data
|
|
||||||
|
|
||||||
|
sink.percent.kafka.topic=PERCENT-RECORD
|
||||||
|
sink.file.data.kafka.topic=test-file-data
|
||||||
#读取topic,存储该spout id的消费offset信息,可通过该拓扑命名;具体存储offset的位置,确定下次读取不重复的数据;
|
#读取topic,存储该spout id的消费offset信息,可通过该拓扑命名;具体存储offset的位置,确定下次读取不重复的数据;
|
||||||
group.id=flinktest-1
|
|
||||||
|
group.id=session-record-log-20211114-test-2
|
||||||
|
|
||||||
#--------------------------------topology配置------------------------------#
|
#--------------------------------topology配置------------------------------#
|
||||||
|
|
||||||
#consumer 并行度
|
#consumer 并行度
|
||||||
source.parallelism=1
|
source.parallelism=1
|
||||||
|
|
||||||
#转换函数并行度
|
#转换函数并行度
|
||||||
transform.parallelism=1
|
transform.parallelism=1
|
||||||
|
|
||||||
#percent producer 并行度
|
deal.file.parallelism=1
|
||||||
percent.sink.parallelism=1
|
sink.file.data.parallelism=1
|
||||||
|
sink.percent.parallelism=1
|
||||||
|
|
||||||
#filedata producer 并行度
|
|
||||||
file.data.sink.parallelism=1
|
|
||||||
#数据中心,取值范围(0-31)
|
#数据中心,取值范围(0-31)
|
||||||
data.center.id.num=0
|
data.center.id.num=0
|
||||||
|
|
||||||
@@ -69,9 +55,13 @@ data.center.id.num=0
|
|||||||
hbase.tick.tuple.freq.secs=180
|
hbase.tick.tuple.freq.secs=180
|
||||||
|
|
||||||
#--------------------------------默认值配置------------------------------#
|
#--------------------------------默认值配置------------------------------#
|
||||||
|
|
||||||
#0不需要补全原样输出日志,1需要补全
|
|
||||||
log.need.complete=1
|
|
||||||
|
|
||||||
#生产者压缩模式 none or snappy
|
#生产者压缩模式 none or snappy
|
||||||
producer.kafka.compression.type=none
|
producer.kafka.compression.type=snappy
|
||||||
|
|
||||||
|
#------------------------------------OOS配置------------------------------------#
|
||||||
|
#oos地址
|
||||||
|
oos.servers=10.3.45.100:8057
|
||||||
|
#prometheus-httpserver
|
||||||
|
prometheus.pushgateway.address=192.168.44.12:9091
|
||||||
|
pushgateway.statistics.time=300
|
||||||
|
deal.file.statistics.time=60
|
||||||
81
src/main/java/com/zdjizhi/common/CommonConfig.java
Normal file
81
src/main/java/com/zdjizhi/common/CommonConfig.java
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
package com.zdjizhi.common;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.alibaba.nacos.api.config.ConfigService;
|
||||||
|
import com.alibaba.nacos.api.config.listener.Listener;
|
||||||
|
import com.alibaba.nacos.api.exception.NacosException;
|
||||||
|
import com.geedgenetworks.utils.StringUtil;
|
||||||
|
import com.zdjizhi.tools.connections.nacos.NacosConnection;
|
||||||
|
import com.zdjizhi.tools.general.ConfigurationsUtils;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.util.Properties;
|
||||||
|
import java.util.concurrent.Executor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.common
|
||||||
|
* @Description:
|
||||||
|
* @date 2023/7/2811:00
|
||||||
|
*/
|
||||||
|
public class CommonConfig {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
private static final String dataId = "olap_platform.properties";
|
||||||
|
|
||||||
|
private static final Properties configProperties;
|
||||||
|
|
||||||
|
public static String KNOWLEDGEBASE_TYPE_LIST;
|
||||||
|
public static String KNOWLEDGEBASE_NAME_LIST;
|
||||||
|
public static Boolean SCHEMA_UPDATE_ENABLED;
|
||||||
|
|
||||||
|
static {
|
||||||
|
configProperties = new Properties();
|
||||||
|
NacosConnection nacosConnection = new NacosConnection();
|
||||||
|
ConfigService configService = nacosConnection.getPublicService();
|
||||||
|
try {
|
||||||
|
String group = FlowWriteConfig.NACOS_PUBLIC_GROUP;
|
||||||
|
String config = configService.getConfigAndSignListener(dataId, group, FlowWriteConfig.NACOS_CONNECTION_TIMEOUT, new Listener() {
|
||||||
|
@Override
|
||||||
|
public Executor getExecutor() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void receiveConfigInfo(String configInfo) {
|
||||||
|
if (StringUtil.isNotBlank(configInfo)) {
|
||||||
|
logger.info("Reload the common config when it changes.");
|
||||||
|
clearProperties();
|
||||||
|
updateConfigProperties(configInfo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (StringUtil.isNotBlank(config)) {
|
||||||
|
updateConfigProperties(config);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (NacosException e) {
|
||||||
|
logger.error("Get common config from Nacos error,The exception message is :{}" + e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static void updateConfigProperties(String config) {
|
||||||
|
try {
|
||||||
|
configProperties.load(new StringReader(config));
|
||||||
|
KNOWLEDGEBASE_TYPE_LIST = ConfigurationsUtils.getStringProperty(configProperties, "etl.knowledgebase.type.list");
|
||||||
|
KNOWLEDGEBASE_NAME_LIST = ConfigurationsUtils.getStringProperty(configProperties, "etl.knowledgebase.name.list");
|
||||||
|
SCHEMA_UPDATE_ENABLED = ConfigurationsUtils.getBooleanProperty(configProperties, "etl.schema.update.enabled");
|
||||||
|
} catch (IOException e) {
|
||||||
|
logger.error("IOException:{}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void clearProperties() {
|
||||||
|
configProperties.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,134 +1,164 @@
|
|||||||
package com.zdjizhi.common;
|
package com.zdjizhi.common;
|
||||||
|
|
||||||
|
|
||||||
import com.zdjizhi.utils.system.FlowWriteConfigurations;
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.zdjizhi.tools.general.ConfigurationsUtils;
|
||||||
import org.jasypt.encryption.pbe.StandardPBEStringEncryptor;
|
import org.jasypt.encryption.pbe.StandardPBEStringEncryptor;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Administrator
|
* @author Administrator
|
||||||
*/
|
*/
|
||||||
public class FlowWriteConfig {
|
public class FlowWriteConfig {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
private static final StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor();
|
||||||
|
private static final Properties propDefault = new Properties();
|
||||||
|
private static final Properties propService = new Properties();
|
||||||
|
private static Properties propfiletype = new Properties();
|
||||||
|
private static Map<String, String> fileTypeMap;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
private static StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor();
|
|
||||||
|
|
||||||
static {
|
static {
|
||||||
encryptor.setPassword("galaxy");
|
encryptor.setPassword("galaxy");
|
||||||
|
try {
|
||||||
|
propService.load(ConfigurationsUtils.class.getClassLoader().getResourceAsStream("service_flow_config.properties"));
|
||||||
|
propDefault.load(ConfigurationsUtils.class.getClassLoader().getResourceAsStream("default_config.properties"));
|
||||||
|
propfiletype.load(ConfigurationsUtils.class.getClassLoader().getResourceAsStream("file_type.properties"));
|
||||||
|
fileTypeMap = new HashMap<String, String>((Map) propfiletype);
|
||||||
|
} catch (IOException | RuntimeException e) {
|
||||||
|
logger.error("Loading the configuration file Failed!,The error message is:{}" + e);
|
||||||
|
System.exit(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
public static boolean judgeFileType(String filetype){
|
||||||
|
return fileTypeMap.containsKey(filetype);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final int IF_PARAM_LENGTH = 3;
|
|
||||||
/**
|
/**
|
||||||
* 有此标识的字段为失效字段,不计入最终日志字段
|
* 默认的文件系统标识
|
||||||
*/
|
*/
|
||||||
public static final String VISIBILITY = "disabled";
|
public static final String FILE_SYSTEM_TYPE = "hdfs";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 默认的切分符号
|
* 默认的切分符号
|
||||||
*/
|
*/
|
||||||
public static final String FORMAT_SPLITTER = ",";
|
public static final String FORMAT_SPLITTER = ",";
|
||||||
/**
|
|
||||||
* 标识字段为日志字段还是schema指定字段
|
public static final String GTPC_FAMILY_NAME = "gtp";
|
||||||
*/
|
public static final String RADIUS_FAMILY_NAME = "radius";
|
||||||
public static final String IS_JSON_KEY_TAG = "$.";
|
public static final String DEFAULT_RELATIONSHIP_MODULE = "vsys";
|
||||||
/**
|
|
||||||
* if函数连接分隔符
|
|
||||||
*/
|
|
||||||
public static final String IF_CONDITION_SPLITTER = "=";
|
|
||||||
/**
|
|
||||||
* 默认的字符串解析编码
|
|
||||||
*/
|
|
||||||
public static final String ENCODING = "UTF8";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Nacos
|
* Nacos
|
||||||
*/
|
*/
|
||||||
public static final String NACOS_SERVER = FlowWriteConfigurations.getStringProperty(0, "nacos.server");
|
public static final String NACOS_SERVER = ConfigurationsUtils.getStringProperty(propService, "nacos.server");
|
||||||
public static final String NACOS_SCHEMA_NAMESPACE = FlowWriteConfigurations.getStringProperty(0, "nacos.schema.namespace");
|
public static final String NACOS_USERNAME = encryptor.decrypt(ConfigurationsUtils.getStringProperty(propDefault, "nacos.username"));
|
||||||
public static final String NACOS_COMMON_NAMESPACE = FlowWriteConfigurations.getStringProperty(0, "nacos.common.namespace");
|
public static final String NACOS_PIN = encryptor.decrypt(ConfigurationsUtils.getStringProperty(propDefault, "nacos.pin"));
|
||||||
public static final String NACOS_DATA_ID = FlowWriteConfigurations.getStringProperty(0, "nacos.data.id");
|
public static final Integer NACOS_CONNECTION_TIMEOUT = ConfigurationsUtils.getIntProperty(propDefault, "nacos.connection.timeout");
|
||||||
public static final String NACOS_PIN = FlowWriteConfigurations.getStringProperty(1, "nacos.pin");
|
|
||||||
public static final String NACOS_GROUP = FlowWriteConfigurations.getStringProperty(1, "nacos.group");
|
public static final String NACOS_SCHEMA_NAMESPACE = ConfigurationsUtils.getStringProperty(propService, "nacos.schema.namespace");
|
||||||
public static final String NACOS_USERNAME = FlowWriteConfigurations.getStringProperty(1, "nacos.username");
|
public static final String NACOS_SCHEMA_DATA_ID = ConfigurationsUtils.getStringProperty(propService, "nacos.schema.data.id");
|
||||||
|
public static final String NACOS_SCHEMA_GROUP = ConfigurationsUtils.getStringProperty(propDefault, "nacos.schema.group");
|
||||||
|
|
||||||
|
public static final String NACOS_PUBLIC_NAMESPACE = ConfigurationsUtils.getStringProperty(propDefault, "nacos.public.namespace");
|
||||||
|
public static final String NACOS_PUBLIC_GROUP = ConfigurationsUtils.getStringProperty(propDefault, "nacos.public.group");
|
||||||
|
public static final String NACOS_KNOWLEDGEBASE_DATA_ID = ConfigurationsUtils.getStringProperty(propDefault, "nacos.knowledgebase.data.id");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* System config
|
* System config
|
||||||
*/
|
*/
|
||||||
public static final Integer SOURCE_PARALLELISM = FlowWriteConfigurations.getIntProperty(0, "source.parallelism");
|
public static final Integer SOURCE_PARALLELISM = ConfigurationsUtils.getIntProperty(propService, "source.parallelism");
|
||||||
public static final Integer PERCENT_SINK_PARALLELISM = FlowWriteConfigurations.getIntProperty(0, "percent.sink.parallelism");
|
public static final Integer TRANSFORM_PARALLELISM = ConfigurationsUtils.getIntProperty(propService, "transform.parallelism");
|
||||||
public static final Integer FILE_DATA_SINK_PARALLELISM = FlowWriteConfigurations.getIntProperty(0, "file.data.sink.parallelism");
|
public static final Integer DATA_CENTER_ID_NUM = ConfigurationsUtils.getIntProperty(propService, "data.center.id.num");
|
||||||
public static final Integer TRANSFORM_PARALLELISM = FlowWriteConfigurations.getIntProperty(0, "transform.parallelism");
|
public static final Integer LOG_TRANSFORM_TYPE = ConfigurationsUtils.getIntProperty(propDefault, "log.transform.type");
|
||||||
public static final Integer DATA_CENTER_ID_NUM = FlowWriteConfigurations.getIntProperty(0, "data.center.id.num");
|
public static final String DATA_RELATIONSHIP_MODEL = ConfigurationsUtils.getStringProperty(propDefault, "data.relationship.model");
|
||||||
public static final Integer LOG_NEED_COMPLETE = FlowWriteConfigurations.getIntProperty(0, "log.need.complete");
|
public static final Integer BUFFER_TIMEOUT = ConfigurationsUtils.getIntProperty(propDefault, "buffer.timeout");
|
||||||
public static final String MAIL_DEFAULT_CHARSET = FlowWriteConfigurations.getStringProperty(1, "mail.default.charset");
|
public static final Integer DEAL_FILE_PARALLELISM = ConfigurationsUtils.getIntProperty(propService, "deal.file.parallelism");
|
||||||
public static final Integer LOG_TRANSFORM_TYPE = FlowWriteConfigurations.getIntProperty(1, "log.transform.type");
|
public static final Integer SINK_FILE_DATA_PARALLELISM = ConfigurationsUtils.getIntProperty(propService, "sink.file.data.parallelism");
|
||||||
public static final Integer BUFFER_TIMEOUT = FlowWriteConfigurations.getIntProperty(1, "buffer.timeout");
|
public static final Integer SINK_PERCENT_PARALLELISM = ConfigurationsUtils.getIntProperty(propService, "sink.percent.parallelism");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HBase
|
* HBase
|
||||||
*/
|
*/
|
||||||
public static final Integer HBASE_TICK_TUPLE_FREQ_SECS = FlowWriteConfigurations.getIntProperty(0, "hbase.tick.tuple.freq.secs");
|
public static final Integer HBASE_TICK_TUPLE_FREQ_SECS = ConfigurationsUtils.getIntProperty(propService, "hbase.tick.tuple.freq.secs");
|
||||||
public static final String HBASE_TABLE_NAME = FlowWriteConfigurations.getStringProperty(1, "hbase.table.name");
|
public static final Integer HBASE_GTPC_SCAN_MAX_ROWS = ConfigurationsUtils.getIntProperty(propDefault, "hbase.gtpc.scan.max.rows");
|
||||||
|
public static final Integer HBASE_RADIUS_SCAN_MAX_ROWS = ConfigurationsUtils.getIntProperty(propDefault, "hbase.radius.scan.max.rows");
|
||||||
|
public static final String HBASE_RADIUS_TABLE_NAME = ConfigurationsUtils.getStringProperty(propDefault, "hbase.radius.table.name");
|
||||||
|
public static final String HBASE_GTPC_TABLE_NAME = ConfigurationsUtils.getStringProperty(propDefault, "hbase.gtpc.table.name");
|
||||||
|
public static final String HBASE_RPC_TIMEOUT = ConfigurationsUtils.getStringProperty(propDefault, "hbase.rpc.timeout");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* HDFS
|
||||||
|
*/
|
||||||
|
public static final String HDFS_SERVERS = ConfigurationsUtils.getStringProperty(propService, "hdfs.servers");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* HTTP
|
||||||
|
*/
|
||||||
|
public static final Integer HTTP_POOL_MAX_CONNECTION = ConfigurationsUtils.getIntProperty(propDefault, "http.pool.max.connection");
|
||||||
|
public static final Integer HTTP_POOL_MAX_PER_ROUTE = ConfigurationsUtils.getIntProperty(propDefault, "http.pool.max.per.route");
|
||||||
|
public static final Integer HTTP_POOL_REQUEST_TIMEOUT = ConfigurationsUtils.getIntProperty(propDefault, "http.pool.request.timeout");
|
||||||
|
public static final Integer HTTP_POOL_CONNECT_TIMEOUT = ConfigurationsUtils.getIntProperty(propDefault, "http.pool.connect.timeout");
|
||||||
|
public static final Integer HTTP_SOCKET_TIMEOUT = ConfigurationsUtils.getIntProperty(propDefault, "http.socket.timeout");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* kafka common
|
* kafka common
|
||||||
*/
|
*/
|
||||||
public static final String KAFKA_SASL_JAAS_USER = encryptor.decrypt(FlowWriteConfigurations.getStringProperty(1, "kafka.user"));
|
public static final String KAFKA_SASL_JAAS_USER = encryptor.decrypt(ConfigurationsUtils.getStringProperty(propDefault, "kafka.user"));
|
||||||
public static final String KAFKA_SASL_JAAS_PIN = encryptor.decrypt(FlowWriteConfigurations.getStringProperty(1, "kafka.pin"));
|
public static final String KAFKA_SASL_JAAS_PIN = encryptor.decrypt(ConfigurationsUtils.getStringProperty(propDefault, "kafka.pin"));
|
||||||
|
|
||||||
public static final String PERCENT_KAFKA_TOPIC = FlowWriteConfigurations.getStringProperty(0, "percent.kafka.topic");
|
|
||||||
/**
|
/**
|
||||||
* kafka source config
|
* kafka source config
|
||||||
*/
|
*/
|
||||||
public static final String SOURCE_KAFKA_TOPIC = FlowWriteConfigurations.getStringProperty(0, "source.kafka.topic");
|
public static final String SOURCE_KAFKA_TOPIC = ConfigurationsUtils.getStringProperty(propService, "source.kafka.topic");
|
||||||
public static final String GROUP_ID = FlowWriteConfigurations.getStringProperty(0, "group.id");
|
public static final String GROUP_ID = ConfigurationsUtils.getStringProperty(propService, "group.id");
|
||||||
public static final String SESSION_TIMEOUT_MS = FlowWriteConfigurations.getStringProperty(1, "session.timeout.ms");
|
public static final String SESSION_TIMEOUT_MS = ConfigurationsUtils.getStringProperty(propDefault, "session.timeout.ms");
|
||||||
public static final String MAX_POLL_RECORDS = FlowWriteConfigurations.getStringProperty(1, "max.poll.records");
|
public static final String MAX_POLL_RECORDS = ConfigurationsUtils.getStringProperty(propDefault, "max.poll.records");
|
||||||
public static final String MAX_PARTITION_FETCH_BYTES = FlowWriteConfigurations.getStringProperty(1, "max.partition.fetch.bytes");
|
public static final String MAX_PARTITION_FETCH_BYTES = ConfigurationsUtils.getStringProperty(propDefault, "max.partition.fetch.bytes");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* kafka sink config
|
* kafka sink config
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
public static final String PRODUCER_ACK = ConfigurationsUtils.getStringProperty(propDefault, "producer.ack");
|
||||||
public static final String FILE_DATA_SINK_KAFKA_TOPIC = FlowWriteConfigurations.getStringProperty(0, "file.data.kafka.topic");
|
public static final String PRODUCER_KAFKA_COMPRESSION_TYPE = ConfigurationsUtils.getStringProperty(propService, "producer.kafka.compression.type");
|
||||||
public static final String PRODUCER_ACK = FlowWriteConfigurations.getStringProperty(1, "producer.ack");
|
public static final String PERCENT_SINK_KAFKA_SERVERS = ConfigurationsUtils.getStringProperty(propService,"percent.sink.kafka.servers");
|
||||||
public static final String PRODUCER_KAFKA_COMPRESSION_TYPE = FlowWriteConfigurations.getStringProperty(0, "producer.kafka.compression.type");
|
public static final String SINK_FILE_DATA_KAFKA_SERVERS = ConfigurationsUtils.getStringProperty(propService,"sink.file.data.kafka.servers");
|
||||||
public static final String OUTPUT_KAFKA_TOPIC = FlowWriteConfigurations.getStringProperty(0, "output.kafka.topic");
|
public static final String SINK_PERCENT_KAFKA_TOPIC = ConfigurationsUtils.getStringProperty(propService, "sink.percent.kafka.topic");
|
||||||
|
public static final String SINK_FILE_DATA_SINK_KAFKA_TOPIC = ConfigurationsUtils.getStringProperty(propService, "sink.file.data.kafka.topic");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* connection kafka
|
* connection kafka
|
||||||
*/
|
*/
|
||||||
public static final String RETRIES = FlowWriteConfigurations.getStringProperty(1, "retries");
|
public static final String RETRIES = ConfigurationsUtils.getStringProperty(propDefault, "retries");
|
||||||
public static final String LINGER_MS = FlowWriteConfigurations.getStringProperty(1, "linger.ms");
|
public static final String LINGER_MS = ConfigurationsUtils.getStringProperty(propDefault, "linger.ms");
|
||||||
public static final Integer REQUEST_TIMEOUT_MS = FlowWriteConfigurations.getIntProperty(1, "request.timeout.ms");
|
public static final Integer REQUEST_TIMEOUT_MS = ConfigurationsUtils.getIntProperty(propDefault, "request.timeout.ms");
|
||||||
public static final Integer BATCH_SIZE = FlowWriteConfigurations.getIntProperty(1, "batch.size");
|
public static final Integer BATCH_SIZE = ConfigurationsUtils.getIntProperty(propDefault, "batch.size");
|
||||||
public static final Integer BUFFER_MEMORY = FlowWriteConfigurations.getIntProperty(1, "buffer.memory");
|
public static final Integer BUFFER_MEMORY = ConfigurationsUtils.getIntProperty(propDefault, "buffer.memory");
|
||||||
public static final Integer MAX_REQUEST_SIZE = FlowWriteConfigurations.getIntProperty(1, "max.request.size");
|
public static final Integer MAX_REQUEST_SIZE = ConfigurationsUtils.getIntProperty(propDefault, "max.request.size");
|
||||||
|
|
||||||
/**
|
|
||||||
* http
|
|
||||||
*/
|
|
||||||
public static final String APP_ID_HTTP = FlowWriteConfigurations.getStringProperty(1, "app.id.http");
|
|
||||||
public static final Integer APP_TICK_TUPLE_FREQ_SECS = FlowWriteConfigurations.getIntProperty(1, "app.tick.tuple.freq.secs");
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* common config
|
* common config
|
||||||
*/
|
*/
|
||||||
/**
|
public static final String SOURCE_KAFKA_SERVERS = ConfigurationsUtils.getStringProperty(propService, "source.kafka.servers");;
|
||||||
* public static final String SOURCE_KAFKA_SERVERS = NacosConfig.getStringProperty("source.kafka.servers");
|
public static final String ZOOKEEPER_SERVERS = ConfigurationsUtils.getStringProperty(propService, "zookeeper.servers");
|
||||||
* public static final String SINK_KAFKA_SERVERS = NacosConfig.getStringProperty("sink.kafka.servers");
|
public static final String TOOLS_LIBRARY = ConfigurationsUtils.getStringProperty(propService, "tools.library");
|
||||||
* public static final String ZOOKEEPER_SERVERS = NacosConfig.getStringProperty("zookeeper.servers");
|
|
||||||
* public static final String TOOLS_LIBRARY = NacosConfig.getStringProperty("tools.library");
|
|
||||||
* public static final String HBASE_ZOOKEEPER_SERVERS = NacosConfig.getStringProperty("hbase.zookeeper.servers");
|
|
||||||
*/
|
|
||||||
public static final String SOURCE_KAFKA_SERVERS = FlowWriteConfigurations.getStringProperty(0,"source.kafka.servers");
|
|
||||||
public static final String PERCENT_SINK_KAFKA_SERVERS = FlowWriteConfigurations.getStringProperty(0,"percent.sink.kafka.servers");
|
|
||||||
public static final String ZOOKEEPER_SERVERS = FlowWriteConfigurations.getStringProperty(0,"zookeeper.servers");
|
|
||||||
public static final String TOOLS_LIBRARY = FlowWriteConfigurations.getStringProperty(0,"tools.library");
|
|
||||||
public static final String HBASE_ZOOKEEPER_SERVERS = FlowWriteConfigurations.getStringProperty(0,"hbase.zookeeper.servers");
|
|
||||||
|
|
||||||
public static final String FILE_DATA_SINK_KAFKA_SERVERS = FlowWriteConfigurations.getStringProperty(0,"file.data.sink.kafka.servers");
|
|
||||||
/**
|
/**
|
||||||
* oos
|
* OOS config
|
||||||
*/
|
*/
|
||||||
public static final String OOS_SERVERS = FlowWriteConfigurations.getStringProperty(0, "oos.servers");
|
public static final String OOS_SERVERS = ConfigurationsUtils.getStringProperty(propService, "oos.servers");
|
||||||
|
|
||||||
|
public static final String PROMETHEUS_PUSHGATEWAY_ADDRESS = ConfigurationsUtils.getStringProperty(propService, "prometheus.pushgateway.address");
|
||||||
|
|
||||||
|
public static final Integer PUSHGATEWAY_STATISTICS_TIME = ConfigurationsUtils.getIntProperty(propService, "pushgateway.statistics.time");
|
||||||
|
public static final Integer DEAL_FILE_STATISTICS_TIME = ConfigurationsUtils.getIntProperty(propService, "deal.file.statistics.time");
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
package com.zdjizhi.common;
|
|
||||||
|
|
||||||
import cn.hutool.log.Log;
|
|
||||||
import cn.hutool.log.LogFactory;
|
|
||||||
import com.alibaba.nacos.api.NacosFactory;
|
|
||||||
import com.alibaba.nacos.api.PropertyKeyConst;
|
|
||||||
import com.alibaba.nacos.api.config.ConfigService;
|
|
||||||
import com.alibaba.nacos.api.exception.NacosException;
|
|
||||||
import com.zdjizhi.utils.StringUtil;
|
|
||||||
import com.zdjizhi.utils.system.FlowWriteConfigurations;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.StringReader;
|
|
||||||
import java.util.Properties;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author qidaijie
|
|
||||||
* @Package com.zdjizhi.common
|
|
||||||
* @Description:
|
|
||||||
* @date 2022/3/189:36
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public class NacosConfig {
|
|
||||||
private static final Log logger = LogFactory.get();
|
|
||||||
private static Properties propCommon = new Properties();
|
|
||||||
private static Properties propNacos = new Properties();
|
|
||||||
|
|
||||||
private static NacosConfig nacosConfig;
|
|
||||||
|
|
||||||
private static void getInstance() {
|
|
||||||
nacosConfig = new NacosConfig();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 构造函数-新
|
|
||||||
*/
|
|
||||||
private NacosConfig() {
|
|
||||||
//获取连接
|
|
||||||
getConnection();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 初始化Nacos配置列表
|
|
||||||
*/
|
|
||||||
private static void getConnection() {
|
|
||||||
try {
|
|
||||||
propNacos.setProperty(PropertyKeyConst.SERVER_ADDR, FlowWriteConfig.NACOS_SERVER);
|
|
||||||
propNacos.setProperty(PropertyKeyConst.NAMESPACE, FlowWriteConfig.NACOS_COMMON_NAMESPACE);
|
|
||||||
propNacos.setProperty(PropertyKeyConst.USERNAME, FlowWriteConfig.NACOS_USERNAME);
|
|
||||||
propNacos.setProperty(PropertyKeyConst.PASSWORD, FlowWriteConfig.NACOS_PIN);
|
|
||||||
ConfigService configService = NacosFactory.createConfigService(propNacos);
|
|
||||||
String commonConfig = configService.getConfig("etl_connection_config.properties", FlowWriteConfig.NACOS_GROUP, 5000);
|
|
||||||
if (StringUtil.isNotBlank(commonConfig)) {
|
|
||||||
propCommon.load(new StringReader(commonConfig));
|
|
||||||
}
|
|
||||||
} catch (NacosException | IOException e) {
|
|
||||||
logger.error("Get topology run configuration error,The exception message is :" + e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取String类型配置
|
|
||||||
*
|
|
||||||
* @param key config key
|
|
||||||
* @return value
|
|
||||||
*/
|
|
||||||
public static String getStringProperty(String key) {
|
|
||||||
|
|
||||||
if (nacosConfig == null) {
|
|
||||||
getInstance();
|
|
||||||
}
|
|
||||||
return propCommon.getProperty(key);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取Integer类型配置
|
|
||||||
*
|
|
||||||
* @param key config key
|
|
||||||
* @return value
|
|
||||||
*/
|
|
||||||
public static Integer getIntegerProperty(String key) {
|
|
||||||
if (nacosConfig == null) {
|
|
||||||
getInstance();
|
|
||||||
}
|
|
||||||
|
|
||||||
return Integer.parseInt(propCommon.getProperty(key));
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取Long类型配置
|
|
||||||
*
|
|
||||||
* @param key config key
|
|
||||||
* @return value
|
|
||||||
*/
|
|
||||||
public static Long getLongProperty(String key) {
|
|
||||||
if (nacosConfig == null) {
|
|
||||||
getInstance();
|
|
||||||
}
|
|
||||||
|
|
||||||
return Long.parseLong(propCommon.getProperty(key));
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
package com.zdjizhi.bean;
|
package com.zdjizhi.common.pojo;
|
||||||
|
|
||||||
import com.alibaba.fastjson.JSONArray;
|
|
||||||
|
import com.alibaba.fastjson2.JSONArray;
|
||||||
|
|
||||||
public class FileMeta {
|
public class FileMeta {
|
||||||
private long common_log_id;
|
private long common_log_id;
|
||||||
75
src/main/java/com/zdjizhi/common/pojo/KnowlegeBaseMeta.java
Normal file
75
src/main/java/com/zdjizhi/common/pojo/KnowlegeBaseMeta.java
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
package com.zdjizhi.common.pojo;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.common.pojo
|
||||||
|
* @Description:
|
||||||
|
* @date 2023/5/2011:18
|
||||||
|
*/
|
||||||
|
public class KnowlegeBaseMeta {
|
||||||
|
private String id;
|
||||||
|
private String name;
|
||||||
|
private String sha256;
|
||||||
|
private String format;
|
||||||
|
private String path;
|
||||||
|
|
||||||
|
public KnowlegeBaseMeta(String id, String name, String sha256, String format, String path) {
|
||||||
|
this.id = id;
|
||||||
|
this.name = name;
|
||||||
|
this.sha256 = sha256;
|
||||||
|
this.format = format;
|
||||||
|
this.path = path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setId(String id) {
|
||||||
|
this.id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSha256() {
|
||||||
|
return sha256;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSha256(String sha256) {
|
||||||
|
this.sha256 = sha256;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getFormat() {
|
||||||
|
return format;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setFormat(String format) {
|
||||||
|
this.format = format;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPath() {
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPath(String path) {
|
||||||
|
this.path = path;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "KnowlegeBaseMeta{" +
|
||||||
|
"id='" + id + '\'' +
|
||||||
|
", name='" + name + '\'' +
|
||||||
|
", sha256='" + sha256 + '\'' +
|
||||||
|
", format='" + format + '\'' +
|
||||||
|
", path='" + path + '\'' +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.zdjizhi.bean;
|
package com.zdjizhi.common.pojo;
|
||||||
|
|
||||||
public class SourceList {
|
public class SourceList {
|
||||||
private String destination_oss_path;
|
private String destination_oss_path;
|
||||||
@@ -0,0 +1,97 @@
|
|||||||
|
package com.zdjizhi.operator.count;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import io.prometheus.client.CollectorRegistry;
|
||||||
|
import io.prometheus.client.Gauge;
|
||||||
|
import io.prometheus.client.exporter.PushGateway;
|
||||||
|
import org.apache.flink.api.java.tuple.Tuple5;
|
||||||
|
import org.apache.flink.configuration.Configuration;
|
||||||
|
import org.apache.flink.streaming.api.functions.ProcessFunction;
|
||||||
|
import org.apache.flink.util.Collector;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Timer;
|
||||||
|
import java.util.TimerTask;
|
||||||
|
|
||||||
|
public class SendCountProcess extends ProcessFunction<Tuple5<Long, Long, Long, Long, Long>, String> {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
private long recordCount = 0L;
|
||||||
|
private long failedCount = 0L;
|
||||||
|
private long httpRequestCount = 0L;
|
||||||
|
private long httpResponseCount = 0L;
|
||||||
|
private long mailEmlCount = 0L;
|
||||||
|
|
||||||
|
|
||||||
|
static final Gauge recordCountsGauge = Gauge.build()
|
||||||
|
.name("recordCount").labelNames("ServerName", "Duration").help("The general intput log volume, the unit is slip").register();
|
||||||
|
|
||||||
|
static final Gauge failedCountGauge = Gauge.build()
|
||||||
|
.name("failedCount").labelNames("ServerName", "Duration").help("The general intput log volume, the unit is slip").register();
|
||||||
|
|
||||||
|
static final Gauge httpRequestCountGauge = Gauge.build()
|
||||||
|
.name("httpRequestCount").labelNames("ServerName", "Duration").help("The general intput log volume, the unit is slip").register();
|
||||||
|
|
||||||
|
static final Gauge httpResponseCountGauge = Gauge.build()
|
||||||
|
.name("httpResponseCount").labelNames("ServerName", "Duration").help("The general intput log volume, the unit is slip").register();
|
||||||
|
static final Gauge mailEmlCountCountGauge = Gauge.build()
|
||||||
|
.name("mailEmlCount").labelNames("ServerName", "Duration").help("The general intput log volume, the unit is slip").register();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void open(Configuration parameters) {
|
||||||
|
|
||||||
|
Timer timer = new Timer();
|
||||||
|
//分钟级指标
|
||||||
|
timer.schedule(new TimerTask() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
|
||||||
|
if (recordCount > 0 || failedCount > 0 || httpRequestCount > 0 || httpResponseCount > 0 || mailEmlCount > 0) {
|
||||||
|
recordCountsGauge.labels("recordCount", String.valueOf(FlowWriteConfig.PUSHGATEWAY_STATISTICS_TIME / 60)).set(recordCount);
|
||||||
|
failedCountGauge.labels("failedCount", String.valueOf(FlowWriteConfig.PUSHGATEWAY_STATISTICS_TIME / 60)).set(failedCount);
|
||||||
|
httpRequestCountGauge.labels("httpRequestCount", String.valueOf(FlowWriteConfig.PUSHGATEWAY_STATISTICS_TIME / 60)).set(httpRequestCount);
|
||||||
|
httpResponseCountGauge.labels("httpResponseCount", String.valueOf(FlowWriteConfig.PUSHGATEWAY_STATISTICS_TIME / 60)).set(httpResponseCount);
|
||||||
|
mailEmlCountCountGauge.labels("mailEmlCount", String.valueOf(FlowWriteConfig.PUSHGATEWAY_STATISTICS_TIME / 60)).set(mailEmlCount);
|
||||||
|
try {
|
||||||
|
//将指标推送至pushgateway
|
||||||
|
push();
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
recordCount = 0;
|
||||||
|
failedCount = 0;
|
||||||
|
httpRequestCount = 0;
|
||||||
|
httpResponseCount = 0;
|
||||||
|
mailEmlCount = 0;
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, 0, FlowWriteConfig.PUSHGATEWAY_STATISTICS_TIME * 1000);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void processElement(Tuple5<Long, Long, Long, Long, Long> value, Context ctx, Collector<String> out) {
|
||||||
|
try {
|
||||||
|
recordCount = recordCount + value.f0;
|
||||||
|
failedCount = failedCount + value.f1;
|
||||||
|
httpRequestCount = httpRequestCount + value.f2;
|
||||||
|
httpResponseCount = httpResponseCount + value.f3;
|
||||||
|
mailEmlCount = mailEmlCount + value.f4;
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("统计指标处理失败,原因为" + e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void push() throws IOException {
|
||||||
|
CollectorRegistry registry = CollectorRegistry.defaultRegistry;
|
||||||
|
PushGateway pg = new PushGateway(FlowWriteConfig.PROMETHEUS_PUSHGATEWAY_ADDRESS);
|
||||||
|
pg.push(registry, FlowWriteConfig.SOURCE_KAFKA_TOPIC);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
15
src/main/java/com/zdjizhi/operator/filter/FilterNull.java
Normal file
15
src/main/java/com/zdjizhi/operator/filter/FilterNull.java
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
package com.zdjizhi.operator.filter;
|
||||||
|
|
||||||
|
import com.geedgenetworks.utils.StringUtil;
|
||||||
|
import org.apache.flink.api.common.functions.FilterFunction;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @version 2021/5/27 15:01
|
||||||
|
*/
|
||||||
|
public class FilterNull implements FilterFunction<String> {
|
||||||
|
@Override
|
||||||
|
public boolean filter(String message) {
|
||||||
|
return StringUtil.isNotBlank(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
45
src/main/java/com/zdjizhi/operator/map/MapCompleted.java
Normal file
45
src/main/java/com/zdjizhi/operator/map/MapCompleted.java
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
package com.zdjizhi.operator.map;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.alibaba.fastjson2.JSONObject;
|
||||||
|
import com.geedgenetworks.utils.IpLookupV2;
|
||||||
|
import com.zdjizhi.tools.general.IpLookupUtils;
|
||||||
|
import com.zdjizhi.tools.transform.TransForm;
|
||||||
|
import com.zdjizhi.tools.json.MetaUtil;
|
||||||
|
import org.apache.flink.api.java.tuple.Tuple2;
|
||||||
|
import org.apache.flink.configuration.Configuration;
|
||||||
|
import org.apache.flink.streaming.api.functions.ProcessFunction;
|
||||||
|
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
|
||||||
|
import org.apache.flink.util.Collector;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @version 2021/5/27 15:01
|
||||||
|
*/
|
||||||
|
public class MapCompleted extends ProcessFunction<String,com.alibaba.fastjson2.JSONObject> {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void open(Configuration parameters) throws Exception {
|
||||||
|
super.open(parameters);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void processElement(String message, ProcessFunction<String, com.alibaba.fastjson2.JSONObject>.Context ctx, Collector<com.alibaba.fastjson2.JSONObject> out) throws Exception {
|
||||||
|
try {
|
||||||
|
JSONObject jsonObject = JSONObject.parseObject(message);
|
||||||
|
jsonObject.put("common_ingestion_time", ctx.timestamp() / 1000);
|
||||||
|
|
||||||
|
MetaUtil.dropJsonField(jsonObject);
|
||||||
|
TransForm.transformLog(jsonObject);
|
||||||
|
out.collect(jsonObject);
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("TransForm log failed ( The field type is not verified ),The exception is :{}\n The error Message is:{}", e, message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
40
src/main/java/com/zdjizhi/operator/map/TypeMapCompleted.java
Normal file
40
src/main/java/com/zdjizhi/operator/map/TypeMapCompleted.java
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
package com.zdjizhi.operator.map;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.alibaba.fastjson2.JSONObject;
|
||||||
|
import com.zdjizhi.tools.json.MetaUtil;
|
||||||
|
import com.zdjizhi.tools.transform.TransForm;
|
||||||
|
import org.apache.flink.configuration.Configuration;
|
||||||
|
import org.apache.flink.streaming.api.functions.ProcessFunction;
|
||||||
|
import org.apache.flink.util.Collector;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @version 2021/5/27 15:01
|
||||||
|
*/
|
||||||
|
public class TypeMapCompleted extends ProcessFunction<String, JSONObject> {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void open(Configuration parameters) throws Exception {
|
||||||
|
super.open(parameters);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void processElement(String message, ProcessFunction<String, JSONObject>.Context ctx, Collector<JSONObject> out) throws Exception {
|
||||||
|
try {
|
||||||
|
JSONObject jsonObject = JSONObject.parseObject(message);
|
||||||
|
jsonObject.put("common_ingestion_time", ctx.timestamp() / 1000);
|
||||||
|
|
||||||
|
TransForm.transformLog(jsonObject);
|
||||||
|
MetaUtil.typeTransform(jsonObject);
|
||||||
|
out.collect(jsonObject);
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("TransForm logs failed( The field type is verified ),The exception is :{}\n The error Message is:{}", e, message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,32 +1,37 @@
|
|||||||
package com.zdjizhi.utils.functions;
|
package com.zdjizhi.operator.process;
|
||||||
|
|
||||||
|
|
||||||
import cn.hutool.log.Log;
|
import cn.hutool.log.Log;
|
||||||
import cn.hutool.log.LogFactory;
|
import cn.hutool.log.LogFactory;
|
||||||
import com.alibaba.fastjson.JSONArray;
|
import com.alibaba.fastjson2.JSONArray;
|
||||||
import com.alibaba.fastjson.JSONObject;
|
import com.alibaba.fastjson2.JSONObject;
|
||||||
import com.zdjizhi.bean.FileMeta;
|
import com.geedgenetworks.utils.StringUtil;
|
||||||
import com.zdjizhi.bean.SourceList;
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
import com.zdjizhi.utils.JsonMapper;
|
import com.zdjizhi.common.pojo.FileMeta;
|
||||||
import com.zdjizhi.utils.StringUtil;
|
import com.zdjizhi.common.pojo.SourceList;
|
||||||
import com.zdjizhi.utils.general.FileEdit;
|
import com.zdjizhi.tools.general.FileEdit;
|
||||||
|
import org.apache.flink.api.java.tuple.Tuple5;
|
||||||
|
import org.apache.flink.configuration.Configuration;
|
||||||
import org.apache.flink.streaming.api.functions.ProcessFunction;
|
import org.apache.flink.streaming.api.functions.ProcessFunction;
|
||||||
import org.apache.flink.util.Collector;
|
import org.apache.flink.util.Collector;
|
||||||
import org.apache.flink.util.OutputTag;
|
import org.apache.flink.util.OutputTag;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Timer;
|
||||||
|
import java.util.TimerTask;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author wangchengcheng
|
* @author wangchengcheng
|
||||||
* @Package com.zdjizhi.utils.functions
|
* @Package com.zdjizhi.utils.functions
|
||||||
* @Description:
|
* @Description:
|
||||||
* @date 2021/10/14
|
* @date 2023/0928
|
||||||
*/
|
*/
|
||||||
public class DealFileProcessFunction extends ProcessFunction<Map<String, Object>, String> {
|
public class DealFileProcessFunction extends ProcessFunction<JSONObject, String> {
|
||||||
private static final Log logger = LogFactory.get();
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
public static final OutputTag<Tuple5<Long, Long, Long, Long, Long>> dealFileMetircTag = new OutputTag<Tuple5<Long, Long, Long, Long, Long>>("DealFileMetircTag") {
|
||||||
|
};
|
||||||
|
|
||||||
private String rpUrlValue;
|
private String rpUrlValue;
|
||||||
private String rqUrlValue;
|
private String rqUrlValue;
|
||||||
private String emailUrlValue;
|
private String emailUrlValue;
|
||||||
@@ -42,71 +47,108 @@ public class DealFileProcessFunction extends ProcessFunction<Map<String, Object>
|
|||||||
private String domain = null;
|
private String domain = null;
|
||||||
private String schemaType = null;
|
private String schemaType = null;
|
||||||
|
|
||||||
|
private long recordCount = 0L;
|
||||||
|
private long failedCount = 0L;
|
||||||
|
private long httpRequestCount = 0L;
|
||||||
|
private long httpResponseCount = 0L;
|
||||||
|
private long mailEmlCount = 0L;
|
||||||
|
private boolean metricSendFlag = true;
|
||||||
|
|
||||||
|
|
||||||
//初始化侧输流的标记
|
//初始化侧输流的标记
|
||||||
public static OutputTag<String> metaToKafa = new OutputTag<String>("metaToKafka") {
|
public static OutputTag<String> metaToKafa = new OutputTag<String>("metaToKafka") {
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void open(Configuration parameters) throws Exception {
|
||||||
|
super.open(parameters);
|
||||||
|
Timer timer = new Timer();
|
||||||
|
//注册定时器
|
||||||
|
timer.schedule(new TimerTask() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
if (!metricSendFlag) {
|
||||||
|
metricSendFlag = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, 0, FlowWriteConfig.DEAL_FILE_STATISTICS_TIME * 1000);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
@Override
|
@Override
|
||||||
public void processElement(Map<String, Object> message, Context context, Collector<String> collector) throws Exception {
|
public void processElement(JSONObject message, Context context, Collector<String> collector) throws Exception {
|
||||||
try {
|
try {
|
||||||
|
//定时向下游推送指标
|
||||||
|
if (metricSendFlag) {
|
||||||
|
metricSendFlag = false;
|
||||||
|
if (recordCount > 0 || failedCount > 0 || httpRequestCount > 0 || httpResponseCount > 0 || mailEmlCount > 0) {
|
||||||
|
context.output(dealFileMetircTag, Tuple5.of(recordCount, failedCount, httpRequestCount, httpResponseCount, mailEmlCount));
|
||||||
|
recordCount = 0L;
|
||||||
|
failedCount = 0L;
|
||||||
|
httpRequestCount = 0;
|
||||||
|
httpResponseCount = 0;
|
||||||
|
mailEmlCount = 0L;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
recordCount++;
|
||||||
if (message.size() > 0) {
|
if (message.size() > 0) {
|
||||||
|
|
||||||
rpUrlValue = (String) message.get("http_response_body");
|
rpUrlValue = (String) message.get("http_response_body");
|
||||||
rqUrlValue = (String) message.get("http_request_body");
|
rqUrlValue = (String) message.get("http_request_body");
|
||||||
emailUrlValue = (String) message.get("mail_eml_file");
|
emailUrlValue = (String) message.get("mail_eml_file");
|
||||||
|
|
||||||
if (StringUtil.isNotBlank(rpUrlValue) || StringUtil.isNotBlank(rqUrlValue) || StringUtil.isNotBlank(emailUrlValue)) {
|
if (StringUtil.isNotBlank(rpUrlValue) || StringUtil.isNotBlank(rqUrlValue) || StringUtil.isNotBlank(emailUrlValue)) {
|
||||||
cfgId = (long) message.getOrDefault("common_policy_id",0L);
|
cfgId = (long) message.getOrDefault("common_policy_id", 0L);
|
||||||
sIp = (String) message.get("common_client_ip");
|
sIp = (String) message.get("common_client_ip");
|
||||||
sPort = (int) message.get("common_client_port");
|
sPort = (int) message.get("common_client_port");
|
||||||
dIp = (String) message.get("common_server_ip");
|
dIp = (String) message.get("common_server_ip");
|
||||||
dPort = (int) message.get("common_server_port");
|
dPort = (int) message.get("common_server_port");
|
||||||
foundTime = (long) message.get("common_recv_time");
|
foundTime = (long) message.get("common_recv_time");
|
||||||
schemaType = (String) message.get("common_schema_type");
|
schemaType = (String) message.get("common_schema_type");
|
||||||
domain = (String)message.getOrDefault("http_domain","");
|
domain = (String) message.getOrDefault("common_server_domain", "");
|
||||||
account = (String)message.getOrDefault("common_subscribe_id","");
|
account = (String) message.getOrDefault("common_subscribe_id", "");
|
||||||
|
|
||||||
FileMeta fileMeta = new FileMeta();
|
FileMeta fileMeta = new FileMeta();
|
||||||
JSONArray jsonarray = new JSONArray();
|
JSONArray jsonarray = new JSONArray();
|
||||||
if (StringUtil.isNotBlank(rqUrlValue)) {
|
if (StringUtil.isNotBlank(rqUrlValue)) {
|
||||||
System.out.println(rqUrlValue);
|
String fileId = FileEdit.getFileId(rqUrlValue, "_1");
|
||||||
String fileId = FileEdit.getFileId(rqUrlValue,"_1");
|
|
||||||
message.put("http_request_body", FileEdit.getFileDownloadUrl(fileId));
|
message.put("http_request_body", FileEdit.getFileDownloadUrl(fileId));
|
||||||
SourceList request = new SourceList();
|
SourceList request = new SourceList();
|
||||||
request.setSource_oss_path(rqUrlValue);
|
request.setSource_oss_path(rqUrlValue);
|
||||||
request.setDestination_oss_path(FileEdit.getFileUploadUrl(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, rqUrlValue, schemaType, fileId));
|
request.setDestination_oss_path(FileEdit.getFileUploadUrl(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, rqUrlValue, schemaType, fileId));
|
||||||
jsonarray.add(request);
|
jsonarray.add(request);
|
||||||
|
httpRequestCount++;
|
||||||
}
|
}
|
||||||
if (StringUtil.isNotBlank(rpUrlValue)) {
|
if (StringUtil.isNotBlank(rpUrlValue)) {
|
||||||
String fileId = FileEdit.getFileId(rpUrlValue,"_2");
|
|
||||||
|
String fileId = FileEdit.getFileId(rpUrlValue, "_2");
|
||||||
message.put("http_response_body", FileEdit.getFileDownloadUrl(fileId));
|
message.put("http_response_body", FileEdit.getFileDownloadUrl(fileId));
|
||||||
SourceList response = new SourceList();
|
SourceList response = new SourceList();
|
||||||
response.setSource_oss_path(rpUrlValue);
|
response.setSource_oss_path(rpUrlValue);
|
||||||
response.setDestination_oss_path(FileEdit.getFileUploadUrl(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, rpUrlValue, schemaType, fileId));
|
response.setDestination_oss_path(FileEdit.getFileUploadUrl(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, rpUrlValue, schemaType, fileId));
|
||||||
jsonarray.add(response);
|
jsonarray.add(response);
|
||||||
|
httpResponseCount++;
|
||||||
}
|
}
|
||||||
if (StringUtil.isNotBlank(emailUrlValue)) {
|
if (StringUtil.isNotBlank(emailUrlValue)) {
|
||||||
String fileId = FileEdit.getFileId(emailUrlValue,"_9");
|
String fileId = FileEdit.getFileId(emailUrlValue, "_9");
|
||||||
message.put("mail_eml_file", FileEdit.getFileDownloadUrl(fileId));
|
message.put("mail_eml_file", FileEdit.getFileDownloadUrl(fileId));
|
||||||
SourceList emailFile = new SourceList();
|
SourceList emailFile = new SourceList();
|
||||||
emailFile.setSource_oss_path(emailUrlValue);
|
emailFile.setSource_oss_path(emailUrlValue);
|
||||||
emailFile.setDestination_oss_path(FileEdit.getFileUploadUrl(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, emailUrlValue, schemaType, fileId));
|
emailFile.setDestination_oss_path(FileEdit.getFileUploadUrl(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, emailUrlValue, schemaType, fileId));
|
||||||
jsonarray.add(emailFile);
|
jsonarray.add(emailFile);
|
||||||
|
mailEmlCount++;
|
||||||
}
|
}
|
||||||
fileMeta.setSource_list(jsonarray);
|
fileMeta.setSource_list(jsonarray);
|
||||||
fileMeta.setCommon_log_id((long) message.get("common_log_id"));
|
fileMeta.setCommon_log_id((long) message.get("common_log_id"));
|
||||||
fileMeta.setCommon_recv_time(Integer.parseInt(message.get("common_recv_time").toString()));
|
fileMeta.setCommon_recv_time(Integer.parseInt(message.get("common_recv_time").toString()));
|
||||||
fileMeta.setCommon_schema_type((String) message.get("common_schema_type"));
|
fileMeta.setCommon_schema_type((String) message.get("common_schema_type"));
|
||||||
fileMeta.setProcessing_time((int) (System.currentTimeMillis() / 1000));
|
fileMeta.setProcessing_time((int) (System.currentTimeMillis() / 1000));
|
||||||
|
|
||||||
context.output(metaToKafa, JSONObject.toJSONString(fileMeta));
|
context.output(metaToKafa, JSONObject.toJSONString(fileMeta));
|
||||||
}
|
}
|
||||||
collector.collect(JsonMapper.toJsonString(message));
|
collector.collect(JSONObject.toJSONString(message));
|
||||||
|
|
||||||
}
|
}
|
||||||
} catch (RuntimeException e) {
|
} catch (RuntimeException e) {
|
||||||
logger.error("处理带有非结构结构化字段的日志出错:" + e + "\n" + message);
|
logger.error("处理带有非结构结构化字段的日志出错:" + e + "\n" + message);
|
||||||
|
failedCount++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -0,0 +1,130 @@
|
|||||||
|
package com.zdjizhi.tools.connections.hadoop;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import com.geedgenetworks.utils.StringUtil;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @version 2022/11/2 17:57
|
||||||
|
*/
|
||||||
|
public class HadoopUtils {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
private static HadoopUtils hadoopUtils;
|
||||||
|
|
||||||
|
private static FileSystem fileSystem;
|
||||||
|
|
||||||
|
private static void getInstance() {
|
||||||
|
hadoopUtils = new HadoopUtils();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 构造函数
|
||||||
|
*/
|
||||||
|
private HadoopUtils() {
|
||||||
|
//获取连接
|
||||||
|
getConnection();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void getConnection() {
|
||||||
|
Configuration configuration = new Configuration();
|
||||||
|
try {
|
||||||
|
//指定用户
|
||||||
|
System.setProperty("HADOOP_USER_NAME", "etl");
|
||||||
|
//配置hdfs相关信息
|
||||||
|
configuration.set("fs.defaultFS", "hdfs://ns1");
|
||||||
|
configuration.set("hadoop.proxyuser.root.hosts", "*");
|
||||||
|
configuration.set("hadoop.proxyuser.root.groups", "*");
|
||||||
|
configuration.set("dfs.nameservices", "ns1");
|
||||||
|
configuration.set("dfs.ha.namenodes.ns1", "nn1,nn2");
|
||||||
|
String[] servers = StringUtil.split(FlowWriteConfig.HDFS_SERVERS, FlowWriteConfig.FORMAT_SPLITTER);
|
||||||
|
configuration.set("dfs.namenode.rpc-address.ns1.nn1", servers[0]);
|
||||||
|
configuration.set("dfs.namenode.rpc-address.ns1.nn2", servers[1]);
|
||||||
|
configuration.set("dfs.client.failover.proxy.provider.ns1", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
|
||||||
|
//创建fileSystem,用于连接hdfs
|
||||||
|
fileSystem = FileSystem.get(configuration);
|
||||||
|
} catch (IOException | RuntimeException e) {
|
||||||
|
logger.error("Failed to create HDFS connection. message is: " + e.getMessage());
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// /**
|
||||||
|
// * 创建hdfs连接
|
||||||
|
// */
|
||||||
|
// static {
|
||||||
|
// if (FlowWriteConfig.FILE_SYSTEM_TYPE.equals(FlowWriteConfig.KNOWLEDGEBASE_FILE_STORAGE_TYPE)) {
|
||||||
|
// Configuration configuration = new Configuration();
|
||||||
|
// try {
|
||||||
|
// //指定用户
|
||||||
|
// System.setProperty("HADOOP_USER_NAME", "etl");
|
||||||
|
// //配置hdfs相关信息
|
||||||
|
// configuration.set("fs.defaultFS", "hdfs://ns1");
|
||||||
|
// configuration.set("hadoop.proxyuser.root.hosts", "*");
|
||||||
|
// configuration.set("hadoop.proxyuser.root.groups", "*");
|
||||||
|
// configuration.set("dfs.nameservices", "ns1");
|
||||||
|
// configuration.set("dfs.ha.namenodes.ns1", "nn1,nn2");
|
||||||
|
// String[] servers = StringUtil.split(FlowWriteConfig.HDFS_SERVERS, FlowWriteConfig.FORMAT_SPLITTER);
|
||||||
|
// configuration.set("dfs.namenode.rpc-address.ns1.nn1", servers[0]);
|
||||||
|
// configuration.set("dfs.namenode.rpc-address.ns1.nn2", servers[1]);
|
||||||
|
// configuration.set("dfs.client.failover.proxy.provider.ns1", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
|
||||||
|
// //创建fileSystem,用于连接hdfs
|
||||||
|
// fileSystem = FileSystem.get(configuration);
|
||||||
|
// } catch (IOException | RuntimeException e) {
|
||||||
|
// logger.error("Failed to create HDFS connection. message is: " + e.getMessage());
|
||||||
|
// e.printStackTrace();
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 下载HDFS文件
|
||||||
|
*
|
||||||
|
* @param filePath 文件路径
|
||||||
|
* @return 文件
|
||||||
|
*/
|
||||||
|
public static byte[] downloadFileByBytes(String filePath) {
|
||||||
|
if (hadoopUtils == null) {
|
||||||
|
getInstance();
|
||||||
|
}
|
||||||
|
|
||||||
|
try (FSDataInputStream open = fileSystem.open(new Path(filePath))) {
|
||||||
|
byte[] bytes = new byte[open.available()];
|
||||||
|
open.read(0, bytes, 0, open.available());
|
||||||
|
return bytes;
|
||||||
|
} catch (IOException e) {
|
||||||
|
logger.error("An I/O exception when files are download from HDFS. Message is :" + e.getMessage());
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 更新文件到HDFS
|
||||||
|
*
|
||||||
|
* @param filePath 文件路径
|
||||||
|
* @param bytes 文件
|
||||||
|
*/
|
||||||
|
public static void uploadFileByBytes(String filePath, byte[] bytes) {
|
||||||
|
if (hadoopUtils == null) {
|
||||||
|
getInstance();
|
||||||
|
}
|
||||||
|
try (FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path(filePath), true)) {
|
||||||
|
fsDataOutputStream.write(bytes);
|
||||||
|
// fsDataOutputStream.flush();
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("Uploading files to the HDFS is abnormal. Message is :" + e.getMessage());
|
||||||
|
} catch (IOException e) {
|
||||||
|
logger.error("An I/O exception when files are uploaded to HDFS. Message is :" + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@@ -0,0 +1,192 @@
|
|||||||
|
package com.zdjizhi.tools.connections.hbase;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import com.geedgenetworks.utils.StringUtil;
|
||||||
|
import org.apache.hadoop.hbase.TableName;
|
||||||
|
import org.apache.hadoop.hbase.client.*;
|
||||||
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @version 2022/7/1510:12
|
||||||
|
*/
|
||||||
|
class GtpCRelation {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取全量的GTpc数据
|
||||||
|
*/
|
||||||
|
static void getAllGtpCRelation(Connection connection, Map<String, HashMap<String, Object>> gtpcMap) {
|
||||||
|
long begin = System.currentTimeMillis();
|
||||||
|
ResultScanner scanner = null;
|
||||||
|
try {
|
||||||
|
Table table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_GTPC_TABLE_NAME));
|
||||||
|
Scan scan = new Scan();
|
||||||
|
if (FlowWriteConfig.HBASE_GTPC_SCAN_MAX_ROWS > 0) {
|
||||||
|
scan.setLimit(FlowWriteConfig.HBASE_GTPC_SCAN_MAX_ROWS);
|
||||||
|
}
|
||||||
|
scanner = table.getScanner(scan);
|
||||||
|
for (Result result : scanner) {
|
||||||
|
int acctStatusType = GtpCRelation.getMsgType(result);
|
||||||
|
if (acctStatusType == 1) {
|
||||||
|
String upLinkTeid = HBaseUtils.getTeid(result, "uplink_teid");
|
||||||
|
String downLinkTeid = HBaseUtils.getTeid(result, "downlink_teid");
|
||||||
|
String phoneNumber = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "phone_number").trim();
|
||||||
|
String imsi = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imsi").trim();
|
||||||
|
String imei = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imei").trim();
|
||||||
|
Long lastUpdateTime = HBaseUtils.getLong(result, FlowWriteConfig.GTPC_FAMILY_NAME, "last_update_time");
|
||||||
|
|
||||||
|
HashMap<String, Object> buildUserData = buildUserData(phoneNumber, imsi, imei, lastUpdateTime);
|
||||||
|
|
||||||
|
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
|
||||||
|
String vsysId = HBaseUtils.getVsysId(result).trim();
|
||||||
|
updateCache(gtpcMap, upLinkTeid+vsysId, buildUserData, lastUpdateTime);
|
||||||
|
updateCache(gtpcMap, downLinkTeid+vsysId, buildUserData, lastUpdateTime);
|
||||||
|
} else {
|
||||||
|
updateCache(gtpcMap, upLinkTeid, buildUserData, lastUpdateTime);
|
||||||
|
updateCache(gtpcMap, downLinkTeid, buildUserData, lastUpdateTime);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.warn("The obtain the number of GTP-C relationships : " + gtpcMap.size());
|
||||||
|
logger.warn("The time spent to obtain GTP-C relationships : " + (System.currentTimeMillis() - begin) + "ms");
|
||||||
|
} catch (IOException | RuntimeException e) {
|
||||||
|
logger.error("The relationship between USER and TEID obtained from HBase is abnormal! message is :" + e);
|
||||||
|
} finally {
|
||||||
|
if (scanner != null) {
|
||||||
|
scanner.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 增量更新GTP-C关系
|
||||||
|
*
|
||||||
|
* @param connection HBase连接
|
||||||
|
* @param gtpcMap gtp-c关系缓存
|
||||||
|
* @param startTime 开始时间
|
||||||
|
* @param endTime 结束时间
|
||||||
|
*/
|
||||||
|
static void upgradeGtpCRelation(Connection connection, Map<String, HashMap<String, Object>> gtpcMap, Long startTime, Long endTime) {
|
||||||
|
Long begin = System.currentTimeMillis();
|
||||||
|
Table table = null;
|
||||||
|
ResultScanner scanner = null;
|
||||||
|
Scan scan = new Scan();
|
||||||
|
try {
|
||||||
|
table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_GTPC_TABLE_NAME));
|
||||||
|
scan.setTimeRange(startTime, endTime);
|
||||||
|
if (FlowWriteConfig.HBASE_GTPC_SCAN_MAX_ROWS > 0) {
|
||||||
|
scan.setLimit(FlowWriteConfig.HBASE_GTPC_SCAN_MAX_ROWS);
|
||||||
|
}
|
||||||
|
scanner = table.getScanner(scan);
|
||||||
|
for (Result result : scanner) {
|
||||||
|
int acctStatusType = GtpCRelation.getMsgType(result);
|
||||||
|
String upLinkTeid = HBaseUtils.getTeid(result, "uplink_teid");
|
||||||
|
String downLinkTeid = HBaseUtils.getTeid(result, "downlink_teid");
|
||||||
|
if (acctStatusType == 1) {
|
||||||
|
String phoneNumber = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "phone_number").trim();
|
||||||
|
String imsi = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imsi").trim();
|
||||||
|
String imei = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imei").trim();
|
||||||
|
Long lastUpdateTime = HBaseUtils.getLong(result, FlowWriteConfig.GTPC_FAMILY_NAME, "last_update_time");
|
||||||
|
|
||||||
|
HashMap<String, Object> buildUserData = buildUserData(phoneNumber, imsi, imei, lastUpdateTime);
|
||||||
|
|
||||||
|
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
|
||||||
|
String vsysId = HBaseUtils.getVsysId(result).trim();
|
||||||
|
updateCache(gtpcMap, upLinkTeid+vsysId, buildUserData, lastUpdateTime);
|
||||||
|
updateCache(gtpcMap, downLinkTeid+vsysId, buildUserData, lastUpdateTime);
|
||||||
|
} else {
|
||||||
|
updateCache(gtpcMap, upLinkTeid, buildUserData, lastUpdateTime);
|
||||||
|
updateCache(gtpcMap, downLinkTeid, buildUserData, lastUpdateTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if (acctStatusType == 2) {
|
||||||
|
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
|
||||||
|
String vsysId = HBaseUtils.getVsysId(result).trim();
|
||||||
|
gtpcMap.remove(upLinkTeid+vsysId);
|
||||||
|
gtpcMap.remove(downLinkTeid+vsysId);
|
||||||
|
} else {
|
||||||
|
gtpcMap.remove(upLinkTeid);
|
||||||
|
gtpcMap.remove(downLinkTeid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Long end = System.currentTimeMillis();
|
||||||
|
logger.warn("The current number of GTPC relationships is: " + gtpcMap.keySet().size());
|
||||||
|
logger.warn("The time used to update the GTPC relationship is: " + (end - begin) + "ms");
|
||||||
|
} catch (IOException | RuntimeException e) {
|
||||||
|
logger.error("GTPC relationship update exception, the content is:" + e);
|
||||||
|
} finally {
|
||||||
|
if (scanner != null) {
|
||||||
|
scanner.close();
|
||||||
|
}
|
||||||
|
if (table != null) {
|
||||||
|
try {
|
||||||
|
table.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
logger.error("HBase Table Close ERROR! Exception message is:" + e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取当前用户上下线状态信息
|
||||||
|
*
|
||||||
|
* @param result HBase内获取的数据
|
||||||
|
* @return onff_type 状态 1-上线 2-下线
|
||||||
|
*/
|
||||||
|
private static int getMsgType(Result result) {
|
||||||
|
boolean hasType = result.containsColumn(Bytes.toBytes(FlowWriteConfig.GTPC_FAMILY_NAME), Bytes.toBytes("msg_type"));
|
||||||
|
if (hasType) {
|
||||||
|
return Bytes.toInt(result.getValue(Bytes.toBytes(FlowWriteConfig.GTPC_FAMILY_NAME), Bytes.toBytes("msg_type")));
|
||||||
|
} else {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 构建用户信息
|
||||||
|
*
|
||||||
|
* @param phoneNumber 手机号
|
||||||
|
* @param imsi 用户标识
|
||||||
|
* @param imei 设备标识
|
||||||
|
* @return 用户信息
|
||||||
|
*/
|
||||||
|
private static HashMap<String, Object> buildUserData(String phoneNumber, String imsi, String imei, Long lastUpdateTime) {
|
||||||
|
HashMap<String, Object> tmpMap = new HashMap<>(4);
|
||||||
|
tmpMap.put("common_phone_number", phoneNumber);
|
||||||
|
tmpMap.put("common_imsi", imsi);
|
||||||
|
tmpMap.put("common_imei", imei);
|
||||||
|
tmpMap.put("last_update_time", lastUpdateTime);
|
||||||
|
return tmpMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 判断缓存与新获取的数据时间戳大小,若大于缓存内记录的时间戳;则更新缓存
|
||||||
|
*
|
||||||
|
* @param gtpcMap 缓存集合
|
||||||
|
* @param key 上下行teid
|
||||||
|
* @param userData 获取HBase内的用户信息
|
||||||
|
* @param lastUpdateTime 该用户信息最后更新时间
|
||||||
|
*/
|
||||||
|
private static void updateCache(Map<String, HashMap<String, Object>> gtpcMap, String key, HashMap<String, Object> userData, Long lastUpdateTime) {
|
||||||
|
if (StringUtil.isNotBlank(key)){
|
||||||
|
if (gtpcMap.containsKey(key)) {
|
||||||
|
Long oldUpdateTime = Long.parseLong(gtpcMap.get(key).get("last_update_time").toString());
|
||||||
|
if (lastUpdateTime > oldUpdateTime) {
|
||||||
|
gtpcMap.put(key, userData);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
gtpcMap.put(key, userData);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,216 @@
|
|||||||
|
package com.zdjizhi.tools.connections.hbase;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import com.geedgenetworks.utils.StringUtil;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
|
import org.apache.hadoop.hbase.client.*;
|
||||||
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
import java.util.concurrent.ScheduledExecutorService;
|
||||||
|
import java.util.concurrent.ScheduledThreadPoolExecutor;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* HBase 工具类
|
||||||
|
*
|
||||||
|
* @author qidaijie
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class HBaseUtils {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
private static Map<String, String> radiusMap = new ConcurrentHashMap<>(16);
|
||||||
|
private static Map<String, HashMap<String, Object>> gtpcMap = new ConcurrentHashMap<>(16);
|
||||||
|
private static Connection connection;
|
||||||
|
private static Long time;
|
||||||
|
|
||||||
|
private static HBaseUtils hBaseUtils;
|
||||||
|
|
||||||
|
private static void getInstance() {
|
||||||
|
hBaseUtils = new HBaseUtils();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 构造函数-新
|
||||||
|
*/
|
||||||
|
private HBaseUtils() {
|
||||||
|
//获取连接
|
||||||
|
getConnection();
|
||||||
|
//拉取所有
|
||||||
|
RadiusRelation.getAllRadiusRelation(connection, radiusMap);
|
||||||
|
GtpCRelation.getAllGtpCRelation(connection, gtpcMap);
|
||||||
|
//定时更新
|
||||||
|
updateCache();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void getConnection() {
|
||||||
|
try {
|
||||||
|
Configuration configuration = HBaseConfiguration.create();
|
||||||
|
configuration.set("hbase.zookeeper.quorum", FlowWriteConfig.ZOOKEEPER_SERVERS);
|
||||||
|
configuration.set("hbase.client.retries.number", "1");
|
||||||
|
configuration.set("hbase.client.pause", "100");
|
||||||
|
configuration.set("hbase.rpc.timeout", FlowWriteConfig.HBASE_RPC_TIMEOUT);
|
||||||
|
configuration.set("zookeeper.recovery.retry", "1");
|
||||||
|
configuration.set("zookeeper.recovery.retry.intervalmill", "200");
|
||||||
|
connection = ConnectionFactory.createConnection(configuration);
|
||||||
|
time = System.currentTimeMillis();
|
||||||
|
logger.warn("HBaseUtils get HBase connection,now to getAll().");
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
logger.error("HBaseUtils getHbaseConn() IOException===>{" + ioe + "}<===");
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("HBaseUtils getHbaseConn() Exception===>{" + e + "}<===");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取HBase内String类型的值
|
||||||
|
*
|
||||||
|
* @param result 结果集
|
||||||
|
* @param familyName 列族名称
|
||||||
|
* @param columnName 列名称
|
||||||
|
* @return 结果数据
|
||||||
|
*/
|
||||||
|
static String getString(Result result, String familyName, String columnName) {
|
||||||
|
byte[] familyBytes = Bytes.toBytes(familyName);
|
||||||
|
byte[] columnBytes = Bytes.toBytes(columnName);
|
||||||
|
boolean contains = result.containsColumn(familyBytes, columnBytes);
|
||||||
|
if (contains) {
|
||||||
|
String data = Bytes.toString(result.getValue(familyBytes, columnBytes)).trim();
|
||||||
|
if (StringUtil.isNotBlank(data)) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取HBase内String类型的值
|
||||||
|
*
|
||||||
|
* @param result 结果集
|
||||||
|
* @param columnName 列名称
|
||||||
|
* @return 结果数据
|
||||||
|
*/
|
||||||
|
static Long getLong(Result result, String familyName, String columnName) {
|
||||||
|
byte[] familyBytes = Bytes.toBytes(familyName);
|
||||||
|
byte[] columnBytes = Bytes.toBytes(columnName);
|
||||||
|
boolean contains = result.containsColumn(familyBytes, columnBytes);
|
||||||
|
if (contains) {
|
||||||
|
return Bytes.toLong(result.getValue(familyBytes, columnBytes));
|
||||||
|
}
|
||||||
|
return 0L;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取HBase内String类型的值
|
||||||
|
*
|
||||||
|
* @param result 结果集
|
||||||
|
* @param columnName 列名称
|
||||||
|
* @return 结果数据
|
||||||
|
*/
|
||||||
|
static String getTeid(Result result, String columnName) {
|
||||||
|
byte[] familyBytes = Bytes.toBytes(FlowWriteConfig.GTPC_FAMILY_NAME);
|
||||||
|
byte[] columnBytes = Bytes.toBytes(columnName);
|
||||||
|
boolean contains = result.containsColumn(familyBytes, columnBytes);
|
||||||
|
if (contains) {
|
||||||
|
String data = String.valueOf(Bytes.toLong(result.getValue(familyBytes, columnBytes))).trim();
|
||||||
|
if (StringUtil.isNotBlank(data)) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "0";
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取HBase内String类型的值
|
||||||
|
*
|
||||||
|
* @param result 结果集
|
||||||
|
* @return 结果数据
|
||||||
|
*/
|
||||||
|
static String getVsysId(Result result) {
|
||||||
|
byte[] familyBytes = Bytes.toBytes("common");
|
||||||
|
byte[] columnBytes = Bytes.toBytes("vsys_id");
|
||||||
|
boolean contains = result.containsColumn(familyBytes, columnBytes);
|
||||||
|
if (contains) {
|
||||||
|
String data = String.valueOf(Bytes.toInt(result.getValue(familyBytes, columnBytes))).trim();
|
||||||
|
if (StringUtil.isNotBlank(data)) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "1";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 更新变量
|
||||||
|
*/
|
||||||
|
private static void change() {
|
||||||
|
if (hBaseUtils == null) {
|
||||||
|
getInstance();
|
||||||
|
}
|
||||||
|
long nowTime = System.currentTimeMillis();
|
||||||
|
RadiusRelation.upgradeRadiusRelation(connection, radiusMap, time - 1000, nowTime + 500);
|
||||||
|
GtpCRelation.upgradeGtpCRelation(connection, gtpcMap, time - 1000, nowTime + 500);
|
||||||
|
time = nowTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 验证定时器,每隔一段时间验证一次-验证获取新的Cookie
|
||||||
|
*/
|
||||||
|
private void updateCache() {
|
||||||
|
ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1);
|
||||||
|
executorService.scheduleAtFixedRate(new Runnable() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
if (FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS != 0) {
|
||||||
|
change();
|
||||||
|
}
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, 1, FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS, TimeUnit.SECONDS);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取Radius account
|
||||||
|
*
|
||||||
|
* @param clientIp client_ip
|
||||||
|
* @return account
|
||||||
|
*/
|
||||||
|
public static String getAccount(String clientIp) {
|
||||||
|
if (FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS != 0) {
|
||||||
|
if (hBaseUtils == null) {
|
||||||
|
getInstance();
|
||||||
|
}
|
||||||
|
return radiusMap.getOrDefault(clientIp, "");
|
||||||
|
}
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取GTPC用户信息
|
||||||
|
*
|
||||||
|
* @param teid TEID
|
||||||
|
* @return account
|
||||||
|
*/
|
||||||
|
public static HashMap<String, Object> getGtpData(String teid) {
|
||||||
|
if (FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS != 0) {
|
||||||
|
if (hBaseUtils == null) {
|
||||||
|
getInstance();
|
||||||
|
}
|
||||||
|
return gtpcMap.get(teid);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,130 @@
|
|||||||
|
package com.zdjizhi.tools.connections.hbase;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import org.apache.hadoop.hbase.TableName;
|
||||||
|
import org.apache.hadoop.hbase.client.*;
|
||||||
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @version 2022/7/1510:12
|
||||||
|
*/
|
||||||
|
class RadiusRelation {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取全量的Radius数据
|
||||||
|
*/
|
||||||
|
static void getAllRadiusRelation(Connection connection, Map<String, String> radiusMap) {
|
||||||
|
long begin = System.currentTimeMillis();
|
||||||
|
ResultScanner scanner = null;
|
||||||
|
try {
|
||||||
|
Table table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_RADIUS_TABLE_NAME));
|
||||||
|
Scan scan = new Scan();
|
||||||
|
if (FlowWriteConfig.HBASE_RADIUS_SCAN_MAX_ROWS > 0) {
|
||||||
|
scan.setLimit(FlowWriteConfig.HBASE_RADIUS_SCAN_MAX_ROWS);
|
||||||
|
}
|
||||||
|
scanner = table.getScanner(scan);
|
||||||
|
for (Result result : scanner) {
|
||||||
|
int acctStatusType = RadiusRelation.getAcctStatusType(result);
|
||||||
|
String framedIp = HBaseUtils.getString(result, FlowWriteConfig.RADIUS_FAMILY_NAME, "framed_ip").trim();
|
||||||
|
String account = HBaseUtils.getString(result, FlowWriteConfig.RADIUS_FAMILY_NAME, "account").trim();
|
||||||
|
if (acctStatusType == 1) {
|
||||||
|
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
|
||||||
|
String vsysId = HBaseUtils.getVsysId(result).trim();
|
||||||
|
radiusMap.put(framedIp + vsysId, account);
|
||||||
|
} else {
|
||||||
|
radiusMap.put(framedIp, account);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.warn("The obtain the number of RADIUS relationships : " + radiusMap.size());
|
||||||
|
logger.warn("The time spent to obtain radius relationships : " + (System.currentTimeMillis() - begin) + "ms");
|
||||||
|
} catch (IOException | RuntimeException e) {
|
||||||
|
logger.error("The relationship between framedIP and account obtained from HBase is abnormal! message is :" + e);
|
||||||
|
} finally {
|
||||||
|
if (scanner != null) {
|
||||||
|
scanner.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 增量更新Radius关系
|
||||||
|
*
|
||||||
|
* @param connection HBase连接
|
||||||
|
* @param radiusMap radius关系缓存
|
||||||
|
* @param startTime 开始时间
|
||||||
|
* @param endTime 结束时间
|
||||||
|
*/
|
||||||
|
static void upgradeRadiusRelation(Connection connection, Map<String, String> radiusMap, Long startTime, Long endTime) {
|
||||||
|
Long begin = System.currentTimeMillis();
|
||||||
|
Table table = null;
|
||||||
|
ResultScanner scanner = null;
|
||||||
|
Scan scan = new Scan();
|
||||||
|
try {
|
||||||
|
table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_RADIUS_TABLE_NAME));
|
||||||
|
scan.setTimeRange(startTime, endTime);
|
||||||
|
if (FlowWriteConfig.HBASE_RADIUS_SCAN_MAX_ROWS > 0) {
|
||||||
|
scan.setLimit(FlowWriteConfig.HBASE_RADIUS_SCAN_MAX_ROWS);
|
||||||
|
}
|
||||||
|
scanner = table.getScanner(scan);
|
||||||
|
for (Result result : scanner) {
|
||||||
|
int acctStatusType = RadiusRelation.getAcctStatusType(result);
|
||||||
|
String framedIp = HBaseUtils.getString(result, FlowWriteConfig.RADIUS_FAMILY_NAME, "framed_ip").trim();
|
||||||
|
String account = HBaseUtils.getString(result, FlowWriteConfig.RADIUS_FAMILY_NAME, "account").trim();
|
||||||
|
if (acctStatusType == 1) {
|
||||||
|
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
|
||||||
|
String vsysId = HBaseUtils.getVsysId(result).trim();
|
||||||
|
radiusMap.put(framedIp + vsysId, account);
|
||||||
|
} else {
|
||||||
|
radiusMap.put(framedIp, account);
|
||||||
|
}
|
||||||
|
} else if (acctStatusType == 2) {
|
||||||
|
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
|
||||||
|
String vsysId = HBaseUtils.getVsysId(result).trim();
|
||||||
|
radiusMap.remove(framedIp + vsysId);
|
||||||
|
} else {
|
||||||
|
radiusMap.remove(framedIp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Long end = System.currentTimeMillis();
|
||||||
|
logger.warn("The current number of Radius relationships is: " + radiusMap.keySet().size());
|
||||||
|
logger.warn("The time used to update the Radius relationship is: " + (end - begin) + "ms");
|
||||||
|
} catch (IOException | RuntimeException e) {
|
||||||
|
logger.error("Radius relationship update exception, the content is:" + e);
|
||||||
|
} finally {
|
||||||
|
if (scanner != null) {
|
||||||
|
scanner.close();
|
||||||
|
}
|
||||||
|
if (table != null) {
|
||||||
|
try {
|
||||||
|
table.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
logger.error("HBase Table Close ERROR! Exception message is:" + e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取当前用户上下线状态信息
|
||||||
|
*
|
||||||
|
* @param result HBase内获取的数据
|
||||||
|
* @return 状态 1-上线 2-下线
|
||||||
|
*/
|
||||||
|
private static int getAcctStatusType(Result result) {
|
||||||
|
boolean hasType = result.containsColumn(Bytes.toBytes(FlowWriteConfig.RADIUS_FAMILY_NAME), Bytes.toBytes("acct_status_type"));
|
||||||
|
if (hasType) {
|
||||||
|
return Bytes.toInt(result.getValue(Bytes.toBytes(FlowWriteConfig.RADIUS_FAMILY_NAME), Bytes.toBytes("acct_status_type")));
|
||||||
|
} else {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,257 @@
|
|||||||
|
package com.zdjizhi.tools.connections.http;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import com.zdjizhi.tools.exception.FlowWriteException;
|
||||||
|
import com.geedgenetworks.utils.StringUtil;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.http.*;
|
||||||
|
import org.apache.http.client.ClientProtocolException;
|
||||||
|
import org.apache.http.client.HttpRequestRetryHandler;
|
||||||
|
import org.apache.http.client.config.RequestConfig;
|
||||||
|
import org.apache.http.client.methods.*;
|
||||||
|
import org.apache.http.client.protocol.HttpClientContext;
|
||||||
|
import org.apache.http.config.Registry;
|
||||||
|
import org.apache.http.config.RegistryBuilder;
|
||||||
|
import org.apache.http.conn.ConnectTimeoutException;
|
||||||
|
import org.apache.http.conn.ConnectionKeepAliveStrategy;
|
||||||
|
import org.apache.http.conn.socket.ConnectionSocketFactory;
|
||||||
|
import org.apache.http.conn.socket.PlainConnectionSocketFactory;
|
||||||
|
import org.apache.http.conn.ssl.NoopHostnameVerifier;
|
||||||
|
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
|
||||||
|
import org.apache.http.impl.client.CloseableHttpClient;
|
||||||
|
import org.apache.http.impl.client.HttpClients;
|
||||||
|
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
||||||
|
import org.apache.http.message.BasicHeaderElementIterator;
|
||||||
|
import org.apache.http.protocol.HTTP;
|
||||||
|
import org.apache.http.util.EntityUtils;
|
||||||
|
|
||||||
|
import javax.net.ssl.*;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.InterruptedIOException;
|
||||||
|
import java.net.*;
|
||||||
|
import java.security.KeyManagementException;
|
||||||
|
import java.security.NoSuchAlgorithmException;
|
||||||
|
import java.security.cert.X509Certificate;
|
||||||
|
|
||||||
|
public class HttpClientService {
|
||||||
|
|
||||||
|
private static final Log log = LogFactory.get();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 在调用SSL之前需要重写验证方法,取消检测SSL
|
||||||
|
* 创建ConnectionManager,添加Connection配置信息
|
||||||
|
*
|
||||||
|
* @return HttpClient 支持https
|
||||||
|
*/
|
||||||
|
private PoolingHttpClientConnectionManager getSslClientManager() {
|
||||||
|
try {
|
||||||
|
// 在调用SSL之前需要重写验证方法,取消检测SSL
|
||||||
|
X509TrustManager trustManager = new X509TrustManager() {
|
||||||
|
@Override
|
||||||
|
public X509Certificate[] getAcceptedIssuers() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void checkClientTrusted(X509Certificate[] xcs, String str) {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void checkServerTrusted(X509Certificate[] xcs, String str) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
SSLContext ctx = SSLContext.getInstance(SSLConnectionSocketFactory.TLS);
|
||||||
|
ctx.init(null, new TrustManager[]{trustManager}, null);
|
||||||
|
SSLConnectionSocketFactory socketFactory = new SSLConnectionSocketFactory(ctx, NoopHostnameVerifier.INSTANCE);
|
||||||
|
Registry<ConnectionSocketFactory> socketFactoryRegistry = RegistryBuilder.<ConnectionSocketFactory>create()
|
||||||
|
.register("http", PlainConnectionSocketFactory.INSTANCE)
|
||||||
|
.register("https", socketFactory).build();
|
||||||
|
// 创建ConnectionManager,添加Connection配置信息
|
||||||
|
PoolingHttpClientConnectionManager connManager = new PoolingHttpClientConnectionManager(socketFactoryRegistry);
|
||||||
|
// 设置最大连接数
|
||||||
|
connManager.setMaxTotal(FlowWriteConfig.HTTP_POOL_MAX_CONNECTION);
|
||||||
|
// 设置每个连接的路由数
|
||||||
|
connManager.setDefaultMaxPerRoute(FlowWriteConfig.HTTP_POOL_MAX_PER_ROUTE);
|
||||||
|
return connManager;
|
||||||
|
} catch (KeyManagementException | NoSuchAlgorithmException e) {
|
||||||
|
throw new FlowWriteException(e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取Http客户端连接对象
|
||||||
|
*
|
||||||
|
* @param socketTimeOut 响应超时时间
|
||||||
|
* @return Http客户端连接对象
|
||||||
|
*/
|
||||||
|
private CloseableHttpClient getHttpClient(int socketTimeOut) {
|
||||||
|
// 创建Http请求配置参数
|
||||||
|
RequestConfig requestConfig = RequestConfig.custom()
|
||||||
|
// 获取连接超时时间
|
||||||
|
.setConnectionRequestTimeout(FlowWriteConfig.HTTP_POOL_REQUEST_TIMEOUT)
|
||||||
|
// 请求超时时间
|
||||||
|
.setConnectTimeout(FlowWriteConfig.HTTP_POOL_CONNECT_TIMEOUT)
|
||||||
|
// 响应超时时间
|
||||||
|
.setSocketTimeout(socketTimeOut)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 测出超时重试机制为了防止超时不生效而设置
|
||||||
|
* 如果直接放回false,不重试
|
||||||
|
* 这里会根据情况进行判断是否重试
|
||||||
|
*/
|
||||||
|
HttpRequestRetryHandler retry = (exception, executionCount, context) -> {
|
||||||
|
if (executionCount >= 3) {// 如果已经重试了3次,就放弃
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (exception instanceof NoHttpResponseException) {// 如果服务器丢掉了连接,那么就重试
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (exception instanceof SSLHandshakeException) {// 不要重试SSL握手异常
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (exception instanceof SocketTimeoutException) {
|
||||||
|
if (exception.getMessage().contains("Read timed out")) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (exception instanceof UnknownHostException) {// 目标服务器不可达
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (exception instanceof ConnectTimeoutException) {// 连接被拒绝
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (exception instanceof SSLException) {// ssl握手异常
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (exception instanceof InterruptedIOException) {// 超时
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
HttpClientContext clientContext = HttpClientContext.adapt(context);
|
||||||
|
HttpRequest request = clientContext.getRequest();
|
||||||
|
// 如果请求是幂等的,就再次尝试
|
||||||
|
if (!(request instanceof HttpEntityEnclosingRequest)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
ConnectionKeepAliveStrategy myStrategy = (response, context) -> {
|
||||||
|
HeaderElementIterator it = new BasicHeaderElementIterator
|
||||||
|
(response.headerIterator(HTTP.CONN_KEEP_ALIVE));
|
||||||
|
while (it.hasNext()) {
|
||||||
|
HeaderElement he = it.nextElement();
|
||||||
|
String param = he.getName();
|
||||||
|
String value = he.getValue();
|
||||||
|
if (value != null && param.equalsIgnoreCase("timeout")) {
|
||||||
|
return Long.parseLong(value) * 1000;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 60 * 1000;//如果没有约定,则默认定义时长为60s
|
||||||
|
};
|
||||||
|
|
||||||
|
// 创建httpClient
|
||||||
|
return HttpClients.custom()
|
||||||
|
// 把请求相关的超时信息设置到连接客户端
|
||||||
|
.setDefaultRequestConfig(requestConfig)
|
||||||
|
// 把请求重试设置到连接客户端
|
||||||
|
.setRetryHandler(retry)
|
||||||
|
.setKeepAliveStrategy(myStrategy)
|
||||||
|
// 配置连接池管理对象
|
||||||
|
.setConnectionManager(getSslClientManager())
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
public InputStream httpGetInputStream(String url, int socketTimeout, Header... headers) {
|
||||||
|
InputStream result = null;
|
||||||
|
// 获取客户端连接对象
|
||||||
|
CloseableHttpClient httpClient = getHttpClient(socketTimeout);
|
||||||
|
// 创建GET请求对象
|
||||||
|
HttpGet httpGet = new HttpGet(url);
|
||||||
|
if (StringUtil.isNotEmpty(headers)) {
|
||||||
|
for (Header h : headers) {
|
||||||
|
httpGet.addHeader(h);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CloseableHttpResponse response = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 执行请求
|
||||||
|
response = httpClient.execute(httpGet);
|
||||||
|
// 获取响应实体
|
||||||
|
result = IOUtils.toBufferedInputStream(response.getEntity().getContent());
|
||||||
|
// 获取响应信息
|
||||||
|
EntityUtils.consume(response.getEntity());
|
||||||
|
} catch (ClientProtocolException e) {
|
||||||
|
log.error("current file: {},Protocol error:{}", url, e.getMessage());
|
||||||
|
|
||||||
|
} catch (ParseException e) {
|
||||||
|
log.error("current file: {}, Parser error:{}", url, e.getMessage());
|
||||||
|
|
||||||
|
} catch (IOException e) {
|
||||||
|
log.error("current file: {},IO error:{}", url, e.getMessage());
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
if (null != response) {
|
||||||
|
try {
|
||||||
|
EntityUtils.consume(response.getEntity());
|
||||||
|
response.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
log.error("Release Connection error:{}", e.getMessage());
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public byte[] httpGetByte(String url, int socketTimeout, Header... headers) {
|
||||||
|
byte[] result = null;
|
||||||
|
// 获取客户端连接对象
|
||||||
|
CloseableHttpClient httpClient = getHttpClient(socketTimeout);
|
||||||
|
// 创建GET请求对象
|
||||||
|
HttpGet httpGet = new HttpGet(url);
|
||||||
|
if (StringUtil.isNotEmpty(headers)) {
|
||||||
|
for (Header h : headers) {
|
||||||
|
httpGet.addHeader(h);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CloseableHttpResponse response = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 执行请求
|
||||||
|
response = httpClient.execute(httpGet);
|
||||||
|
// 获取响应实体
|
||||||
|
result = IOUtils.toByteArray(response.getEntity().getContent());
|
||||||
|
// 获取响应信息
|
||||||
|
EntityUtils.consume(response.getEntity());
|
||||||
|
} catch (ClientProtocolException e) {
|
||||||
|
log.error("current file: {},Protocol error:{}", url, e.getMessage());
|
||||||
|
|
||||||
|
} catch (ParseException e) {
|
||||||
|
log.error("current file: {}, Parser error:{}", url, e.getMessage());
|
||||||
|
|
||||||
|
} catch (IOException e) {
|
||||||
|
log.error("current file: {},IO error:{}", url, e.getMessage());
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
if (null != response) {
|
||||||
|
try {
|
||||||
|
EntityUtils.consume(response.getEntity());
|
||||||
|
response.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
log.error("Release Connection error:{}", e.getMessage());
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.zdjizhi.utils.kafka;
|
package com.zdjizhi.tools.connections.kafka;
|
||||||
|
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
import org.apache.kafka.common.config.SslConfigs;
|
import org.apache.kafka.common.config.SslConfigs;
|
||||||
@@ -7,9 +7,7 @@ import java.util.Properties;
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @author qidaijie
|
* @author qidaijie
|
||||||
* @Package com.zdjizhi.utils.kafka
|
* @version 2021/9/610:37
|
||||||
* @Description:
|
|
||||||
* @date 2021/9/610:37
|
|
||||||
*/
|
*/
|
||||||
class CertUtils {
|
class CertUtils {
|
||||||
/**
|
/**
|
||||||
@@ -1,25 +1,15 @@
|
|||||||
package com.zdjizhi.utils.kafka;
|
package com.zdjizhi.tools.connections.kafka;
|
||||||
|
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
||||||
import org.apache.flink.api.common.serialization.TypeInformationSerializationSchema;
|
|
||||||
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
|
|
||||||
import org.apache.flink.api.common.typeutils.base.StringSerializer;
|
|
||||||
import org.apache.flink.connector.kafka.source.KafkaSource;
|
|
||||||
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
|
|
||||||
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
|
|
||||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
|
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
|
||||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase;
|
|
||||||
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author qidaijie
|
* @author qidaijie
|
||||||
* @Package com.zdjizhi.utils.kafka
|
* @version 2021/6/813:54
|
||||||
* @Description:
|
|
||||||
* @date 2021/6/813:54
|
|
||||||
*/
|
*/
|
||||||
public class KafkaConsumer {
|
public class KafkaConsumer {
|
||||||
private static Properties createConsumerConfig() {
|
private static Properties createConsumerConfig() {
|
||||||
@@ -35,25 +25,6 @@ public class KafkaConsumer {
|
|||||||
return properties;
|
return properties;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* 用户序列化kafka数据,增加 kafka Timestamp内容。
|
|
||||||
*
|
|
||||||
* @return kafka logs -> map
|
|
||||||
*/
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public static FlinkKafkaConsumer<Map<String, Object>> myDeserializationConsumer() {
|
|
||||||
FlinkKafkaConsumer<Map<String, Object>> kafkaConsumer = new FlinkKafkaConsumer<>(FlowWriteConfig.SOURCE_KAFKA_TOPIC,
|
|
||||||
new TimestampDeserializationSchema(), createConsumerConfig());
|
|
||||||
|
|
||||||
//随着checkpoint提交,将offset提交到kafka
|
|
||||||
kafkaConsumer.setCommitOffsetsOnCheckpoints(true);
|
|
||||||
|
|
||||||
//从消费组当前的offset开始消费
|
|
||||||
kafkaConsumer.setStartFromGroupOffsets();
|
|
||||||
|
|
||||||
return kafkaConsumer;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 官方序列化kafka数据
|
* 官方序列化kafka数据
|
||||||
*
|
*
|
||||||
@@ -0,0 +1,58 @@
|
|||||||
|
package com.zdjizhi.tools.connections.kafka;
|
||||||
|
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
||||||
|
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
|
||||||
|
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @version 2021/6/814:04
|
||||||
|
*/
|
||||||
|
public class KafkaProducer {
|
||||||
|
|
||||||
|
private static Properties createProducerConfig(String kafkaServers) {
|
||||||
|
Properties properties = new Properties();
|
||||||
|
properties.put("bootstrap.servers", kafkaServers);
|
||||||
|
properties.put("acks", FlowWriteConfig.PRODUCER_ACK);
|
||||||
|
properties.put("retries", FlowWriteConfig.RETRIES);
|
||||||
|
properties.put("linger.ms", FlowWriteConfig.LINGER_MS);
|
||||||
|
properties.put("request.timeout.ms", FlowWriteConfig.REQUEST_TIMEOUT_MS);
|
||||||
|
properties.put("batch.size", FlowWriteConfig.BATCH_SIZE);
|
||||||
|
properties.put("buffer.memory", FlowWriteConfig.BUFFER_MEMORY);
|
||||||
|
properties.put("max.request.size", FlowWriteConfig.MAX_REQUEST_SIZE);
|
||||||
|
properties.put("compression.type", FlowWriteConfig.PRODUCER_KAFKA_COMPRESSION_TYPE);
|
||||||
|
|
||||||
|
CertUtils.chooseCert(kafkaServers, properties);
|
||||||
|
|
||||||
|
return properties;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static FlinkKafkaProducer<String> getPercentKafkaProducer() {
|
||||||
|
FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<String>(
|
||||||
|
FlowWriteConfig.SINK_PERCENT_KAFKA_TOPIC,
|
||||||
|
new SimpleStringSchema(),
|
||||||
|
createProducerConfig(FlowWriteConfig.PERCENT_SINK_KAFKA_SERVERS), Optional.empty());
|
||||||
|
|
||||||
|
|
||||||
|
//允许producer记录失败日志而不是捕获和抛出它们
|
||||||
|
kafkaProducer.setLogFailuresOnly(true);
|
||||||
|
|
||||||
|
return kafkaProducer;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static FlinkKafkaProducer<String> getTrafficFileMetaKafkaProducer() {
|
||||||
|
FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<String>(
|
||||||
|
FlowWriteConfig.SINK_FILE_DATA_SINK_KAFKA_TOPIC,
|
||||||
|
new SimpleStringSchema(),
|
||||||
|
createProducerConfig(FlowWriteConfig.SINK_FILE_DATA_KAFKA_SERVERS), Optional.empty());
|
||||||
|
|
||||||
|
//允许producer记录失败日志而不是捕获和抛出它们
|
||||||
|
kafkaProducer.setLogFailuresOnly(true);
|
||||||
|
|
||||||
|
return kafkaProducer;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,53 @@
|
|||||||
|
package com.zdjizhi.tools.connections.nacos;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.alibaba.nacos.api.NacosFactory;
|
||||||
|
import com.alibaba.nacos.api.PropertyKeyConst;
|
||||||
|
import com.alibaba.nacos.api.config.ConfigService;
|
||||||
|
import com.alibaba.nacos.api.exception.NacosException;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.tools.connections.nacos
|
||||||
|
* @Description:
|
||||||
|
* @date 2023/7/2714:49
|
||||||
|
*/
|
||||||
|
public class NacosConnection {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
private ConfigService configService;
|
||||||
|
|
||||||
|
|
||||||
|
public ConfigService getSchemaService() {
|
||||||
|
Properties properties = new Properties();
|
||||||
|
properties.setProperty(PropertyKeyConst.SERVER_ADDR, FlowWriteConfig.NACOS_SERVER);
|
||||||
|
properties.setProperty(PropertyKeyConst.NAMESPACE, FlowWriteConfig.NACOS_SCHEMA_NAMESPACE);
|
||||||
|
properties.setProperty(PropertyKeyConst.USERNAME, FlowWriteConfig.NACOS_USERNAME);
|
||||||
|
properties.setProperty(PropertyKeyConst.PASSWORD, FlowWriteConfig.NACOS_PIN);
|
||||||
|
try {
|
||||||
|
configService = NacosFactory.createConfigService(properties);
|
||||||
|
} catch (NacosException e) {
|
||||||
|
logger.error("NacosException:{}", e);
|
||||||
|
}
|
||||||
|
return configService;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public ConfigService getPublicService() {
|
||||||
|
Properties properties = new Properties();
|
||||||
|
properties.setProperty(PropertyKeyConst.SERVER_ADDR, FlowWriteConfig.NACOS_SERVER);
|
||||||
|
properties.setProperty(PropertyKeyConst.NAMESPACE, FlowWriteConfig.NACOS_PUBLIC_NAMESPACE);
|
||||||
|
properties.setProperty(PropertyKeyConst.USERNAME, FlowWriteConfig.NACOS_USERNAME);
|
||||||
|
properties.setProperty(PropertyKeyConst.PASSWORD, FlowWriteConfig.NACOS_PIN);
|
||||||
|
try {
|
||||||
|
configService = NacosFactory.createConfigService(properties);
|
||||||
|
} catch (NacosException e) {
|
||||||
|
logger.error("NacosException:{}", e);
|
||||||
|
}
|
||||||
|
return configService;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.zdjizhi.utils.zookeeper;
|
package com.zdjizhi.tools.connections.zookeeper;
|
||||||
|
|
||||||
import cn.hutool.log.Log;
|
import cn.hutool.log.Log;
|
||||||
import cn.hutool.log.LogFactory;
|
import cn.hutool.log.LogFactory;
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.zdjizhi.utils.zookeeper;
|
package com.zdjizhi.tools.connections.zookeeper;
|
||||||
|
|
||||||
import cn.hutool.core.util.StrUtil;
|
import cn.hutool.core.util.StrUtil;
|
||||||
import cn.hutool.log.Log;
|
import cn.hutool.log.Log;
|
||||||
@@ -13,9 +13,7 @@ import java.util.concurrent.CountDownLatch;
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @author qidaijie
|
* @author qidaijie
|
||||||
* @Package cn.ac.iie.utils.zookeeper
|
* @version 2020/11/1411:28
|
||||||
* @Description:
|
|
||||||
* @date 2020/11/1411:28
|
|
||||||
*/
|
*/
|
||||||
public class ZookeeperUtils implements Watcher {
|
public class ZookeeperUtils implements Watcher {
|
||||||
private static final Log logger = LogFactory.get();
|
private static final Log logger = LogFactory.get();
|
||||||
@@ -1,10 +1,8 @@
|
|||||||
package com.zdjizhi.utils.exception;
|
package com.zdjizhi.tools.exception;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author qidaijie
|
* @author qidaijie
|
||||||
* @Package com.zdjizhi.storm.utils.execption
|
* @version 2021/3/2 59:42
|
||||||
* @Description:
|
|
||||||
* @date 2021/3/259:42
|
|
||||||
*/
|
*/
|
||||||
public class FlowWriteException extends RuntimeException {
|
public class FlowWriteException extends RuntimeException {
|
||||||
|
|
||||||
@@ -0,0 +1,41 @@
|
|||||||
|
package com.zdjizhi.tools.general;
|
||||||
|
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.common
|
||||||
|
* @Description:
|
||||||
|
* @date 2023/7/2714:22
|
||||||
|
*/
|
||||||
|
public class ConfigurationsUtils {
|
||||||
|
|
||||||
|
public static String getStringProperty(Properties properties, String key) {
|
||||||
|
if (!properties.isEmpty() && properties.containsKey(key)) {
|
||||||
|
return properties.getProperty(key).trim();
|
||||||
|
}
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Integer getIntProperty(Properties properties, String key) {
|
||||||
|
if (!properties.isEmpty() && properties.containsKey(key)) {
|
||||||
|
return Integer.parseInt(properties.getProperty(key).trim());
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Long getLongProperty(Properties properties, String key) {
|
||||||
|
if (!properties.isEmpty() && properties.containsKey(key)) {
|
||||||
|
return Long.parseLong(properties.getProperty(key).trim());
|
||||||
|
}
|
||||||
|
return 0L;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Boolean getBooleanProperty(Properties properties, String key) {
|
||||||
|
if (!properties.isEmpty() && properties.containsKey(key)) {
|
||||||
|
return Boolean.parseBoolean(properties.getProperty(key).trim());
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
package com.zdjizhi.utils.general;
|
package com.zdjizhi.tools.general;
|
||||||
|
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
import com.zdjizhi.utils.ordinary.MD5Utils;
|
import com.zdjizhi.tools.ordinary.MD5Utils;
|
||||||
|
|
||||||
import static com.zdjizhi.utils.system.FlowWriteConfigurations.judgeFileType;
|
import static com.zdjizhi.common.FlowWriteConfig.judgeFileType;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
199
src/main/java/com/zdjizhi/tools/general/IpLookupUtils.java
Normal file
199
src/main/java/com/zdjizhi/tools/general/IpLookupUtils.java
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
package com.zdjizhi.tools.general;
|
||||||
|
|
||||||
|
import cn.hutool.crypto.digest.DigestUtil;
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.alibaba.fastjson2.*;
|
||||||
|
import com.alibaba.nacos.api.config.ConfigService;
|
||||||
|
import com.alibaba.nacos.api.config.listener.Listener;
|
||||||
|
import com.alibaba.nacos.api.exception.NacosException;
|
||||||
|
import com.geedgenetworks.utils.IpLookupV2;
|
||||||
|
import com.geedgenetworks.utils.StringUtil;
|
||||||
|
import com.google.common.base.Joiner;
|
||||||
|
import com.zdjizhi.common.CommonConfig;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import com.zdjizhi.common.pojo.KnowlegeBaseMeta;
|
||||||
|
import com.zdjizhi.tools.connections.http.HttpClientService;
|
||||||
|
import com.zdjizhi.tools.connections.nacos.NacosConnection;
|
||||||
|
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.concurrent.Executor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @version 2022/11/16 15:23
|
||||||
|
*/
|
||||||
|
public class IpLookupUtils {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
private static final String ipv4BuiltInName = "ip_v4_built_in.mmdb";
|
||||||
|
private static final String ipv6BuiltInName = "ip_v6_built_in.mmdb";
|
||||||
|
private static final String ipv4UserDefinedName = "ip_v4_user_defined.mmdb";
|
||||||
|
private static final String ipv6UserDefinedName = "ip_v6_user_defined.mmdb";
|
||||||
|
private static final String asnV4Name = "asn_v4.mmdb";
|
||||||
|
private static final String asnV6Name = "asn_v6.mmdb";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ip定位库
|
||||||
|
*/
|
||||||
|
private static IpLookupV2 ipLookup;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 定位库默认分隔符
|
||||||
|
*/
|
||||||
|
private static final String LOCATION_SEPARATOR = ".";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 最大重试次数
|
||||||
|
*/
|
||||||
|
private static final int TRY_TIMES = 5;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* http connections
|
||||||
|
*/
|
||||||
|
private static final HttpClientService httpClientService;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 定位库元数据缓存
|
||||||
|
*/
|
||||||
|
private static final HashMap<String, KnowlegeBaseMeta> knowledgeMetaCache = new HashMap<>(16);
|
||||||
|
|
||||||
|
static {
|
||||||
|
JSONPath jsonPath = JSONPath.of(getFilterParameter());
|
||||||
|
httpClientService = new HttpClientService();
|
||||||
|
|
||||||
|
NacosConnection nacosConnection = new NacosConnection();
|
||||||
|
ConfigService schemaService = nacosConnection.getPublicService();
|
||||||
|
try {
|
||||||
|
String configInfo = schemaService.getConfigAndSignListener(FlowWriteConfig.NACOS_KNOWLEDGEBASE_DATA_ID, FlowWriteConfig.NACOS_PUBLIC_GROUP, FlowWriteConfig.NACOS_CONNECTION_TIMEOUT, new Listener() {
|
||||||
|
@Override
|
||||||
|
public Executor getExecutor() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void receiveConfigInfo(String configInfo) {
|
||||||
|
if (StringUtil.isNotBlank(configInfo)) {
|
||||||
|
updateIpLookup(jsonPath, configInfo);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (StringUtil.isNotBlank(configInfo)) {
|
||||||
|
updateIpLookup(jsonPath, configInfo);
|
||||||
|
}
|
||||||
|
} catch (NacosException e) {
|
||||||
|
logger.error("Get Schema config from Nacos error,The exception message is :" + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void updateIpLookup(JSONPath jsonPath, String configInfo) {
|
||||||
|
String extract = jsonPath.extract(JSONReader.of(configInfo)).toString();
|
||||||
|
if (StringUtil.isNotBlank(extract)) {
|
||||||
|
JSONArray jsonArray = JSON.parseArray(extract);
|
||||||
|
if (jsonArray.size() > 0) {
|
||||||
|
for (int i = 0; i < jsonArray.size(); i++) {
|
||||||
|
KnowlegeBaseMeta knowlegeBaseMeta = JSONObject.parseObject(jsonArray.getString(i), KnowlegeBaseMeta.class);
|
||||||
|
String fileName = Joiner.on(LOCATION_SEPARATOR).useForNull("").join(knowlegeBaseMeta.getName(), knowlegeBaseMeta.getFormat());
|
||||||
|
knowledgeMetaCache.put(fileName, knowlegeBaseMeta);
|
||||||
|
}
|
||||||
|
reloadIpLookup();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 从HDFS下载文件更新IpLookup
|
||||||
|
*
|
||||||
|
* @return 更新后的IpLookup
|
||||||
|
*/
|
||||||
|
public static void reloadIpLookup() {
|
||||||
|
int retryNum = 0;
|
||||||
|
IpLookupV2.Builder builder = new IpLookupV2.Builder(false);
|
||||||
|
for (String fileName : knowledgeMetaCache.keySet()) {
|
||||||
|
KnowlegeBaseMeta knowlegeBaseMeta = knowledgeMetaCache.get(fileName);
|
||||||
|
String metaSha256 = knowlegeBaseMeta.getSha256();
|
||||||
|
do {
|
||||||
|
byte[] httpGetByte = httpClientService.httpGetByte(knowlegeBaseMeta.getPath(), FlowWriteConfig.HTTP_SOCKET_TIMEOUT);
|
||||||
|
if (httpGetByte.length > 0) {
|
||||||
|
String downloadFileSha256 = DigestUtil.sha256Hex(httpGetByte);
|
||||||
|
if (metaSha256.equals(downloadFileSha256)) {
|
||||||
|
ByteArrayInputStream inputStream = new ByteArrayInputStream(httpGetByte);
|
||||||
|
switch (fileName) {
|
||||||
|
case ipv4BuiltInName:
|
||||||
|
builder.loadDataFileV4(inputStream);
|
||||||
|
break;
|
||||||
|
case ipv6BuiltInName:
|
||||||
|
builder.loadDataFileV6(inputStream);
|
||||||
|
break;
|
||||||
|
case ipv4UserDefinedName:
|
||||||
|
builder.loadDataFilePrivateV4(inputStream);
|
||||||
|
break;
|
||||||
|
case ipv6UserDefinedName:
|
||||||
|
builder.loadDataFilePrivateV6(inputStream);
|
||||||
|
break;
|
||||||
|
case asnV4Name:
|
||||||
|
builder.loadAsnDataFileV4(inputStream);
|
||||||
|
break;
|
||||||
|
case asnV6Name:
|
||||||
|
builder.loadAsnDataFileV6(inputStream);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
retryNum = TRY_TIMES;
|
||||||
|
} else {
|
||||||
|
logger.error("通过HOS下载{}的sha256为:{} ,Nacos内记录为:{} ,sha256不相等 开始第{}次重试下载文件", fileName, downloadFileSha256, metaSha256, retryNum);
|
||||||
|
retryNum++;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.error("通过HOS下载{}的流为空 ,开始第{}次重试下载文件", fileName, retryNum);
|
||||||
|
retryNum++;
|
||||||
|
}
|
||||||
|
} while (retryNum < TRY_TIMES);
|
||||||
|
}
|
||||||
|
ipLookup = builder.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据配置组合生成知识库元数据过滤参数
|
||||||
|
*
|
||||||
|
* @return 过滤参数
|
||||||
|
*/
|
||||||
|
private static String getFilterParameter() {
|
||||||
|
String[] typeList = CommonConfig.KNOWLEDGEBASE_TYPE_LIST.split(",");
|
||||||
|
String[] nameList = CommonConfig.KNOWLEDGEBASE_NAME_LIST.split(",");
|
||||||
|
String expr = "[?(@.version=='latest')]";
|
||||||
|
|
||||||
|
if (typeList.length > 1) {
|
||||||
|
StringBuilder typeBuilder = new StringBuilder();
|
||||||
|
typeBuilder.append("[?(@.type in (");
|
||||||
|
for (int i = 0; i < typeList.length; i++) {
|
||||||
|
if (i == typeList.length - 1) {
|
||||||
|
typeBuilder.append("'").append(typeList[i]).append("'))]");
|
||||||
|
} else {
|
||||||
|
typeBuilder.append("'").append(typeList[i]).append("',");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expr = expr + typeBuilder;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nameList.length > 1) {
|
||||||
|
StringBuilder nameBuilder = new StringBuilder();
|
||||||
|
nameBuilder.append("[?(@.name in (");
|
||||||
|
for (int i = 0; i < nameList.length; i++) {
|
||||||
|
if (i == nameList.length - 1) {
|
||||||
|
nameBuilder.append("'").append(nameList[i]).append("'))]");
|
||||||
|
} else {
|
||||||
|
nameBuilder.append("'").append(nameList[i]).append("',");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expr = expr + nameBuilder;
|
||||||
|
}
|
||||||
|
|
||||||
|
return expr;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static IpLookupV2 getIpLookup() {
|
||||||
|
return ipLookup;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
package com.zdjizhi.utils.general;
|
package com.zdjizhi.tools.general;
|
||||||
|
|
||||||
import cn.hutool.log.Log;
|
import cn.hutool.log.Log;
|
||||||
import cn.hutool.log.LogFactory;
|
import cn.hutool.log.LogFactory;
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
import com.zdjizhi.utils.zookeeper.DistributedLock;
|
import com.zdjizhi.tools.connections.zookeeper.DistributedLock;
|
||||||
import com.zdjizhi.utils.zookeeper.ZookeeperUtils;
|
import com.zdjizhi.tools.connections.zookeeper.ZookeeperUtils;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 雪花算法
|
* 雪花算法
|
||||||
@@ -25,7 +25,7 @@ public class SnowflakeId {
|
|||||||
/**
|
/**
|
||||||
* 开始时间截 (2020-11-14 00:00:00) max 17years
|
* 开始时间截 (2020-11-14 00:00:00) max 17years
|
||||||
*/
|
*/
|
||||||
private final long twepoch = 1605283200000L;
|
private final long twepoch = 1693274481297L;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 机器id所占的位数
|
* 机器id所占的位数
|
||||||
114
src/main/java/com/zdjizhi/tools/json/DataTypeCheck.java
Normal file
114
src/main/java/com/zdjizhi/tools/json/DataTypeCheck.java
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
package com.zdjizhi.tools.json;
|
||||||
|
|
||||||
|
|
||||||
|
import com.alibaba.fastjson2.JSONArray;
|
||||||
|
import com.alibaba.fastjson2.JSONObject;
|
||||||
|
import com.zdjizhi.tools.exception.FlowWriteException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.tools.json
|
||||||
|
* @Description:
|
||||||
|
* @date 2023/5/2014:58
|
||||||
|
*/
|
||||||
|
class DataTypeCheck {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据Schema指定的字段类型,对原始日志类型不一致的数据进行转换。
|
||||||
|
*
|
||||||
|
* @param jsonObject 原始日志json
|
||||||
|
* @param fieldName 需要转换类型的字段名
|
||||||
|
* @param fieldValue 需要转换类型的值
|
||||||
|
* @param fieldType schema指定的类型
|
||||||
|
* @throws RuntimeException 抛出转换类型异常
|
||||||
|
*/
|
||||||
|
static void typeConverter(JSONObject jsonObject, String fieldName, Object fieldValue, String fieldType) throws RuntimeException {
|
||||||
|
switch (fieldType) {
|
||||||
|
case "String":
|
||||||
|
jsonObject.put(fieldName, fieldValue.toString());
|
||||||
|
break;
|
||||||
|
case "Integer":
|
||||||
|
jsonObject.put(fieldName, castToInt(fieldValue));
|
||||||
|
break;
|
||||||
|
case "long":
|
||||||
|
jsonObject.put(fieldName, castToLong(fieldValue));
|
||||||
|
break;
|
||||||
|
case "List":
|
||||||
|
jsonObject.put(fieldName, JSONArray.parseArray(fieldValue.toString()));
|
||||||
|
break;
|
||||||
|
case "Map":
|
||||||
|
jsonObject.put(fieldName, JSONObject.parseObject(fieldValue.toString()));
|
||||||
|
break;
|
||||||
|
case "double":
|
||||||
|
jsonObject.put(fieldName, castToDouble(fieldValue));
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Integer 类型判断方法
|
||||||
|
*
|
||||||
|
* @param value json value
|
||||||
|
* @return Integer value or null
|
||||||
|
*/
|
||||||
|
private static Integer castToInt(Object value) {
|
||||||
|
if (value instanceof Integer) {
|
||||||
|
return (Integer) value;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value instanceof Number) {
|
||||||
|
return ((Number) value).intValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value instanceof String) {
|
||||||
|
String strVal = value.toString();
|
||||||
|
return Integer.parseInt(strVal);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new FlowWriteException("can not cast to int, value : " + value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Long类型判断方法
|
||||||
|
*
|
||||||
|
* @param value json value
|
||||||
|
* @return (Long)value or null
|
||||||
|
*/
|
||||||
|
private static Long castToLong(Object value) {
|
||||||
|
if (value == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value instanceof Number) {
|
||||||
|
return ((Number) value).longValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value instanceof String) {
|
||||||
|
String strVal = (String) value;
|
||||||
|
return Long.parseLong(strVal);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new FlowWriteException("can not cast to long, value : " + value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Double类型判断方法
|
||||||
|
*
|
||||||
|
* @param value json value
|
||||||
|
* @return double value or null
|
||||||
|
*/
|
||||||
|
private static Double castToDouble(Object value) {
|
||||||
|
if (value instanceof Number) {
|
||||||
|
return ((Number) value).doubleValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value instanceof String) {
|
||||||
|
String strVal = (String) value;
|
||||||
|
return Double.parseDouble(strVal);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new FlowWriteException("can not cast to double, value : " + value);
|
||||||
|
}
|
||||||
|
}
|
||||||
45
src/main/java/com/zdjizhi/tools/json/JsonPathUtil.java
Normal file
45
src/main/java/com/zdjizhi/tools/json/JsonPathUtil.java
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
package com.zdjizhi.tools.json;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.alibaba.fastjson2.JSONPath;
|
||||||
|
import com.alibaba.fastjson2.JSONReader;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.tools.json
|
||||||
|
* @Description:
|
||||||
|
* @date 2023/5/1917:51
|
||||||
|
*/
|
||||||
|
public class JsonPathUtil {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
private static Map<String, JSONPath> jsonPathMap = new ConcurrentHashMap<>(16);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据表达式使用JsonPath解析数据
|
||||||
|
*
|
||||||
|
* @param message json
|
||||||
|
* @param expr 解析表达式
|
||||||
|
* @return 解析结果
|
||||||
|
*/
|
||||||
|
public static Object analysis(String message, String expr) {
|
||||||
|
Object flattenResult = "";
|
||||||
|
try {
|
||||||
|
JSONReader parser = JSONReader.of(message);
|
||||||
|
if (jsonPathMap.containsKey(expr)) {
|
||||||
|
flattenResult = jsonPathMap.get(expr).extract(parser);
|
||||||
|
} else {
|
||||||
|
JSONPath jsonPath = JSONPath.of(expr);
|
||||||
|
jsonPathMap.put(expr, jsonPath);
|
||||||
|
flattenResult = jsonPath.extract(parser);
|
||||||
|
}
|
||||||
|
} catch (ClassCastException | ArrayIndexOutOfBoundsException e) {
|
||||||
|
logger.error("The label resolution exception or [expr] analytic expression error" + e.getMessage());
|
||||||
|
}
|
||||||
|
return flattenResult;
|
||||||
|
}
|
||||||
|
}
|
||||||
301
src/main/java/com/zdjizhi/tools/json/MetaUtil.java
Normal file
301
src/main/java/com/zdjizhi/tools/json/MetaUtil.java
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
package com.zdjizhi.tools.json;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.alibaba.fastjson2.JSONArray;
|
||||||
|
import com.alibaba.fastjson2.JSONObject;
|
||||||
|
import com.alibaba.nacos.api.config.ConfigService;
|
||||||
|
import com.alibaba.nacos.api.config.listener.Listener;
|
||||||
|
import com.alibaba.nacos.api.exception.NacosException;
|
||||||
|
import com.geedgenetworks.utils.StringUtil;
|
||||||
|
import com.zdjizhi.common.CommonConfig;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import com.zdjizhi.tools.connections.nacos.NacosConnection;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.Executor;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 使用FastJson解析json的工具类
|
||||||
|
*
|
||||||
|
* @author qidaijie
|
||||||
|
*/
|
||||||
|
public class MetaUtil {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 有此标识的字段为失效字段,不计入最终日志字段
|
||||||
|
*/
|
||||||
|
private static final String VISIBILITY = "disabled";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取需要删除字段的列表
|
||||||
|
*/
|
||||||
|
private static final ArrayList<String> dropList = new ArrayList<>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取schema指定的有效字段及类型
|
||||||
|
*/
|
||||||
|
private static HashMap<String, Class> schemaFieldsTypeMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取包含默认值的字段
|
||||||
|
*/
|
||||||
|
private static final HashMap<String, Object> defaultFieldsMap = new HashMap<>(16);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取任务列表
|
||||||
|
* list的每个元素是一个四元字符串数组 (有format标识的字段,补全的字段,用到的功能函数,用到的参数),例如:
|
||||||
|
* (mail_subject mail_subject decode_of_base64 mail_subject_charset)
|
||||||
|
*/
|
||||||
|
private static ArrayList<String[]> jobList;
|
||||||
|
|
||||||
|
static {
|
||||||
|
NacosConnection nacosConnection = new NacosConnection();
|
||||||
|
ConfigService schemaService = nacosConnection.getSchemaService();
|
||||||
|
try {
|
||||||
|
String schema = schemaService.getConfigAndSignListener(FlowWriteConfig.NACOS_SCHEMA_DATA_ID, FlowWriteConfig.NACOS_SCHEMA_GROUP, FlowWriteConfig.NACOS_CONNECTION_TIMEOUT, new Listener() {
|
||||||
|
@Override
|
||||||
|
public Executor getExecutor() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void receiveConfigInfo(String configInfo) {
|
||||||
|
if (StringUtil.isNotBlank(configInfo)) {
|
||||||
|
if (CommonConfig.SCHEMA_UPDATE_ENABLED) {
|
||||||
|
logger.info("Reload the " + FlowWriteConfig.NACOS_SCHEMA_DATA_ID + "schema when it changes.");
|
||||||
|
clearCache();
|
||||||
|
schemaFieldsTypeMap = getFieldsFromSchema(configInfo);
|
||||||
|
jobList = getJobListFromHttp(configInfo);
|
||||||
|
} else {
|
||||||
|
logger.info("The schema changes, but the dynamic update configuration is disabled and no operation is performed!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (StringUtil.isNotBlank(schema)) {
|
||||||
|
schemaFieldsTypeMap = getFieldsFromSchema(schema);
|
||||||
|
jobList = getJobListFromHttp(schema);
|
||||||
|
}
|
||||||
|
} catch (NacosException e) {
|
||||||
|
logger.error("Get Schema config from Nacos error,The exception message is :" + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 模式匹配,给定一个类型字符串返回一个类类型
|
||||||
|
*
|
||||||
|
* @param type 类型
|
||||||
|
* @return 类类型
|
||||||
|
*/
|
||||||
|
|
||||||
|
private static Class getClassName(String type) {
|
||||||
|
Class clazz;
|
||||||
|
|
||||||
|
switch (type) {
|
||||||
|
case "int":
|
||||||
|
clazz = Integer.class;
|
||||||
|
break;
|
||||||
|
case "string":
|
||||||
|
clazz = String.class;
|
||||||
|
break;
|
||||||
|
case "long":
|
||||||
|
clazz = long.class;
|
||||||
|
break;
|
||||||
|
case "array":
|
||||||
|
clazz = List.class;
|
||||||
|
break;
|
||||||
|
case "double":
|
||||||
|
clazz = double.class;
|
||||||
|
break;
|
||||||
|
case "float":
|
||||||
|
clazz = float.class;
|
||||||
|
break;
|
||||||
|
case "char":
|
||||||
|
clazz = char.class;
|
||||||
|
break;
|
||||||
|
case "byte":
|
||||||
|
clazz = byte.class;
|
||||||
|
break;
|
||||||
|
case "boolean":
|
||||||
|
clazz = boolean.class;
|
||||||
|
break;
|
||||||
|
case "short":
|
||||||
|
clazz = short.class;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
clazz = String.class;
|
||||||
|
}
|
||||||
|
return clazz;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 类型转换
|
||||||
|
*
|
||||||
|
* @param jsonObject 原始日志map
|
||||||
|
*/
|
||||||
|
public static void typeTransform(JSONObject jsonObject) throws RuntimeException {
|
||||||
|
dropJsonField(jsonObject);
|
||||||
|
MetaUtil.setFieldDefault(jsonObject);
|
||||||
|
for (Map.Entry<String, Object> entry : jsonObject.entrySet()) {
|
||||||
|
String key = entry.getKey();
|
||||||
|
Object value = entry.getValue();
|
||||||
|
if (schemaFieldsTypeMap.containsKey(key)) {
|
||||||
|
try {
|
||||||
|
Class<?> schemaFieldClass = schemaFieldsTypeMap.get(key);
|
||||||
|
if (schemaFieldClass != value.getClass()) {
|
||||||
|
String simpleName = schemaFieldClass.getSimpleName();
|
||||||
|
DataTypeCheck.typeConverter(jsonObject, key, value, simpleName);
|
||||||
|
}
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("The {} field type conversion is abnormal! message is:", key, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 删除schema内指定的无效字段
|
||||||
|
*
|
||||||
|
* @param jsonObject 原始日志
|
||||||
|
*/
|
||||||
|
public static void dropJsonField(JSONObject jsonObject) {
|
||||||
|
for (String field : dropList) {
|
||||||
|
jsonObject.remove(field);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据schema内指定的默认值,给数据赋值。
|
||||||
|
*
|
||||||
|
* @param jsonObject 原始日志
|
||||||
|
*/
|
||||||
|
private static void setFieldDefault(JSONObject jsonObject) {
|
||||||
|
if (defaultFieldsMap.keySet().size() > 0) {
|
||||||
|
for (String fieldName : defaultFieldsMap.keySet()) {
|
||||||
|
Object logValue = jsonObject.get(fieldName);
|
||||||
|
if (logValue == null) {
|
||||||
|
jsonObject.put(fieldName, defaultFieldsMap.get(fieldName));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 通过schema来获取所需的字段及字段类型。
|
||||||
|
*
|
||||||
|
* @return 用于反射生成schema类型的对象的一个map集合
|
||||||
|
*/
|
||||||
|
private static HashMap<String, Class> getFieldsFromSchema(String schema) {
|
||||||
|
HashMap<String, Class> map = new HashMap<>(256);
|
||||||
|
|
||||||
|
//获取fields,并转化为数组,数组的每个元素都是一个name doc type
|
||||||
|
JSONObject schemaJson = JSONObject.parseObject(schema);
|
||||||
|
JSONArray fields = schemaJson.getJSONArray("fields");
|
||||||
|
|
||||||
|
for (Object field : fields) {
|
||||||
|
String filedStr = field.toString();
|
||||||
|
JSONObject fieldJson = JSONObject.parseObject(filedStr);
|
||||||
|
String name = fieldJson.getString("name");
|
||||||
|
if (checkKeepField(filedStr)) {
|
||||||
|
String type = fieldJson.getString("type");
|
||||||
|
if (type.contains("{")) {
|
||||||
|
JSONObject types = JSONObject.parseObject(type);
|
||||||
|
type = types.getString("type");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fieldJson.containsKey("default")) {
|
||||||
|
defaultFieldsMap.put(name, fieldJson.get("default"));
|
||||||
|
}
|
||||||
|
//组合用来生成实体类的map
|
||||||
|
map.put(name, getClassName(type));
|
||||||
|
} else {
|
||||||
|
dropList.add(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 判断字段是否需要保留
|
||||||
|
*
|
||||||
|
* @param message 单个field-json
|
||||||
|
* @return true or false
|
||||||
|
*/
|
||||||
|
private static boolean checkKeepField(String message) {
|
||||||
|
boolean isKeepField = true;
|
||||||
|
JSONObject fieldJson = JSONObject.parseObject(message);
|
||||||
|
boolean hasDoc = fieldJson.containsKey("doc");
|
||||||
|
if (hasDoc) {
|
||||||
|
JSONObject doc = JSONObject.parseObject(fieldJson.getString("doc"));
|
||||||
|
if (doc.containsKey("visibility")) {
|
||||||
|
String visibility = doc.getString("visibility");
|
||||||
|
if (VISIBILITY.equals(visibility)) {
|
||||||
|
isKeepField = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return isKeepField;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 解析schema,解析之后返回一个任务列表 (useList toList funcList paramlist)
|
||||||
|
*
|
||||||
|
* @param schema 日志schema
|
||||||
|
* @return 任务列表
|
||||||
|
*/
|
||||||
|
private static ArrayList<String[]> getJobListFromHttp(String schema) {
|
||||||
|
ArrayList<String[]> list = new ArrayList<>();
|
||||||
|
|
||||||
|
JSONObject schemaJson = JSONObject.parseObject(schema);
|
||||||
|
JSONArray fields = schemaJson.getJSONArray("fields");
|
||||||
|
for (Object field : fields) {
|
||||||
|
JSONObject fieldJson = JSONObject.parseObject(field.toString());
|
||||||
|
boolean hasDoc = fieldJson.containsKey("doc");
|
||||||
|
if (hasDoc) {
|
||||||
|
JSONObject docJson = JSONObject.parseObject(fieldJson.getString("doc"));
|
||||||
|
boolean hasFormat = docJson.containsKey("format");
|
||||||
|
if (hasFormat) {
|
||||||
|
String name = fieldJson.getString("name");
|
||||||
|
JSONArray formatList = docJson.getJSONArray("format");
|
||||||
|
for (Object format : formatList) {
|
||||||
|
JSONObject formatJson = JSONObject.parseObject(format.toString());
|
||||||
|
String function = formatJson.getString("function");
|
||||||
|
String appendTo;
|
||||||
|
String params = null;
|
||||||
|
|
||||||
|
if (formatJson.containsKey("appendTo")) {
|
||||||
|
appendTo = formatJson.getString("appendTo");
|
||||||
|
} else {
|
||||||
|
appendTo = name;
|
||||||
|
}
|
||||||
|
if (formatJson.containsKey("param")) {
|
||||||
|
params = formatJson.getString("param");
|
||||||
|
}
|
||||||
|
list.add(new String[]{name, appendTo, function, params});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ArrayList<String[]> getJobList() {
|
||||||
|
return jobList;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 在配置变动时,清空缓存重新获取
|
||||||
|
*/
|
||||||
|
private static void clearCache() {
|
||||||
|
jobList.clear();
|
||||||
|
schemaFieldsTypeMap.clear();
|
||||||
|
dropList.clear();
|
||||||
|
defaultFieldsMap.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.zdjizhi.utils.ordinary;
|
package com.zdjizhi.tools.ordinary;
|
||||||
|
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
|
|
||||||
115
src/main/java/com/zdjizhi/tools/transform/TransForm.java
Normal file
115
src/main/java/com/zdjizhi/tools/transform/TransForm.java
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
package com.zdjizhi.tools.transform;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson2.JSONObject;
|
||||||
|
import com.zdjizhi.tools.json.MetaUtil;
|
||||||
|
import com.zdjizhi.tools.transform.impl.TransformFunctionImpl;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 描述:转换或补全工具类
|
||||||
|
*
|
||||||
|
* @author qidaijie
|
||||||
|
*/
|
||||||
|
public class TransForm {
|
||||||
|
private static TransformFunctionImpl transformFunction = new TransformFunctionImpl();
|
||||||
|
|
||||||
|
public static void transformLog(JSONObject jsonObject) {
|
||||||
|
for (String[] strings : MetaUtil.getJobList()) {
|
||||||
|
//该日志字段的值
|
||||||
|
Object logValue = jsonObject.get(strings[0]);
|
||||||
|
//结果值映射到的日志字段key
|
||||||
|
String appendToKey = strings[1];
|
||||||
|
//匹配操作函数的字段
|
||||||
|
String function = strings[2];
|
||||||
|
//额外的参数的值
|
||||||
|
String param = strings[3];
|
||||||
|
|
||||||
|
//结果值映射到的日志字段原始value
|
||||||
|
Object appendToValue = jsonObject.get(appendToKey);
|
||||||
|
|
||||||
|
functionSet(function, jsonObject, appendToKey, appendToValue, logValue, param);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据schema描述对应字段进行操作的 函数集合
|
||||||
|
*
|
||||||
|
* @param function 匹配操作函数的字段
|
||||||
|
* @param jsonObject 原始日志解析map
|
||||||
|
* @param appendToKey 需要补全的字段的key
|
||||||
|
* @param appendToValue 需要补全的字段的值
|
||||||
|
* @param logValue 用到的参数的值
|
||||||
|
* @param param 额外的参数的值
|
||||||
|
*/
|
||||||
|
private static void functionSet(String function, JSONObject jsonObject, String appendToKey, Object appendToValue, Object logValue, String param) {
|
||||||
|
|
||||||
|
switch (function) {
|
||||||
|
case "current_timestamp":
|
||||||
|
if (!(appendToValue instanceof Long)) {
|
||||||
|
jsonObject.put(appendToKey, transformFunction.getCurrentTime());
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "snowflake_id":
|
||||||
|
jsonObject.put(appendToKey, transformFunction.getSnowflakeId());
|
||||||
|
break;
|
||||||
|
case "geo_ip_detail":
|
||||||
|
if (logValue != null && appendToValue == null) {
|
||||||
|
jsonObject.put(appendToKey, transformFunction.getGeoIpDetail(logValue.toString()));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "geo_asn":
|
||||||
|
if (logValue != null && appendToValue == null) {
|
||||||
|
jsonObject.put(appendToKey, transformFunction.getGeoAsn(logValue.toString()));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "geo_ip_country":
|
||||||
|
if (logValue != null && appendToValue == null) {
|
||||||
|
jsonObject.put(appendToKey, transformFunction.getGeoIpCountry(logValue.toString()));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "flattenSpec":
|
||||||
|
if (logValue != null && param != null) {
|
||||||
|
jsonObject.put(appendToKey, transformFunction.flattenSpec(logValue.toString(), param));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "if":
|
||||||
|
if (param != null) {
|
||||||
|
jsonObject.put(appendToKey, transformFunction.condition(jsonObject, param));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "decode_of_base64":
|
||||||
|
if (logValue != null) {
|
||||||
|
jsonObject.put( appendToKey, transformFunction.decodeBase64(jsonObject, logValue.toString(), param));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "sub_domain":
|
||||||
|
if (appendToValue == null && logValue != null) {
|
||||||
|
jsonObject.put( appendToKey, transformFunction.getTopDomain(logValue.toString()));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "radius_match":
|
||||||
|
if (logValue != null) {
|
||||||
|
jsonObject.put( appendToKey, transformFunction.radiusMatch(jsonObject, logValue.toString()));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "gtpc_match":
|
||||||
|
if (logValue != null) {
|
||||||
|
transformFunction.gtpcMatch(jsonObject, logValue.toString(), appendToKey, param);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "set_value":
|
||||||
|
if (param != null) {
|
||||||
|
transformFunction.setValue(jsonObject, appendToKey, param);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "get_value":
|
||||||
|
if (logValue != null) {
|
||||||
|
transformFunction.getValue(jsonObject, appendToKey, logValue);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
package com.zdjizhi.tools.transform;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson2.JSONObject;
|
||||||
|
import com.geedgenetworks.utils.IpLookupV2;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.tools.general
|
||||||
|
* @Description:
|
||||||
|
* @date 2023/5/2010:11
|
||||||
|
*/
|
||||||
|
public interface TransformFunction {
|
||||||
|
|
||||||
|
long getCurrentTime();
|
||||||
|
|
||||||
|
long getSnowflakeId();
|
||||||
|
|
||||||
|
String getGeoIpDetail(String ip);
|
||||||
|
|
||||||
|
String getGeoIpCountry(String ip);
|
||||||
|
|
||||||
|
String getGeoAsn(String ip);
|
||||||
|
|
||||||
|
String radiusMatch(JSONObject jsonObject, String ip);
|
||||||
|
|
||||||
|
void gtpcMatch(JSONObject jsonObject, String logValue, String appendToKey, String param);
|
||||||
|
|
||||||
|
String getTopDomain(String domain);
|
||||||
|
|
||||||
|
String decodeBase64(JSONObject jsonObject,String message, String param);
|
||||||
|
|
||||||
|
Object flattenSpec(String message, String expr);
|
||||||
|
|
||||||
|
Object condition(JSONObject jsonObject, String ifParam);
|
||||||
|
|
||||||
|
void setValue(JSONObject jsonObject, String appendToKey, String param);
|
||||||
|
|
||||||
|
void getValue(JSONObject jsonObject, String appendToKey, Object logValue);
|
||||||
|
}
|
||||||
@@ -0,0 +1,307 @@
|
|||||||
|
package com.zdjizhi.tools.transform.impl;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.alibaba.fastjson2.JSONObject;
|
||||||
|
import com.geedgenetworks.utils.FormatUtils;
|
||||||
|
import com.geedgenetworks.utils.StringUtil;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import com.zdjizhi.tools.general.IpLookupUtils;
|
||||||
|
import com.zdjizhi.tools.general.SnowflakeId;
|
||||||
|
import com.zdjizhi.tools.transform.TransformFunction;
|
||||||
|
import com.zdjizhi.tools.connections.hbase.HBaseUtils;
|
||||||
|
import com.zdjizhi.tools.json.JsonPathUtil;
|
||||||
|
|
||||||
|
import java.io.UnsupportedEncodingException;
|
||||||
|
import java.util.Base64;
|
||||||
|
import java.util.HashMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.tools.transform.impl
|
||||||
|
* @Description:
|
||||||
|
* @date 2023/5/20 10:12
|
||||||
|
*/
|
||||||
|
public class TransformFunctionImpl implements TransformFunction {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
private static final int IF_PARAM_LENGTH = 3;
|
||||||
|
/**
|
||||||
|
* if函数连接分隔符
|
||||||
|
*/
|
||||||
|
private static final String IF_CONDITION_SPLITTER = "=";
|
||||||
|
|
||||||
|
private static final String SEPARATOR = "!=";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 标识字段为日志字段还是schema指定字段
|
||||||
|
*/
|
||||||
|
private static final String IS_JSON_KEY_TAG = "$.";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 生成当前时间戳的操作
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public long getCurrentTime() {
|
||||||
|
return System.currentTimeMillis() / 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getSnowflakeId() {
|
||||||
|
return SnowflakeId.generateId();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据clientIp获取location信息
|
||||||
|
*
|
||||||
|
* @param ip client IP
|
||||||
|
* @return ip地址详细信息
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String getGeoIpDetail(String ip) {
|
||||||
|
String detail = "";
|
||||||
|
try {
|
||||||
|
detail = IpLookupUtils.getIpLookup().cityLookupDetail(ip);
|
||||||
|
} catch (NullPointerException npe) {
|
||||||
|
logger.error("The IP Location MMDB file is not loaded or IP is null! " + npe);
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("Get clientIP location error! " + e.getMessage());
|
||||||
|
}
|
||||||
|
return detail;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据ip获取country信息
|
||||||
|
*
|
||||||
|
* @param ip server IP
|
||||||
|
* @return 国家
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String getGeoIpCountry(String ip) {
|
||||||
|
String country = "";
|
||||||
|
try {
|
||||||
|
country = IpLookupUtils.getIpLookup().countryLookup(ip);
|
||||||
|
} catch (NullPointerException npe) {
|
||||||
|
logger.error("The IP Location MMDB file is not loaded or IP is null! " + npe);
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("Get ServerIP location error! " + e.getMessage());
|
||||||
|
}
|
||||||
|
return country;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据ip获取asn信息
|
||||||
|
*
|
||||||
|
* @param ip client/server IP
|
||||||
|
* @return ASN
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String getGeoAsn(String ip) {
|
||||||
|
String asn = "";
|
||||||
|
try {
|
||||||
|
asn = IpLookupUtils.getIpLookup().asnLookup(ip);
|
||||||
|
} catch (NullPointerException npe) {
|
||||||
|
logger.error("The ASN MMDB file is not loaded or IP is null! " + npe);
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("Get IP ASN error! " + e.getMessage());
|
||||||
|
}
|
||||||
|
return asn;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* radius借助HBase补齐
|
||||||
|
*
|
||||||
|
* @param ip client IP
|
||||||
|
* @return account
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String radiusMatch(JSONObject jsonObject, String ip) {
|
||||||
|
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
|
||||||
|
int vsysId = jsonObject.getIntValue("common_vsys_id", 1);
|
||||||
|
return HBaseUtils.getAccount(ip + vsysId);
|
||||||
|
} else {
|
||||||
|
return HBaseUtils.getAccount(ip);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 借助HBase补齐GTP-C信息,解析tunnels信息,优先使用gtp_uplink_teid,其次使用gtp_downlink_teid
|
||||||
|
* <p>
|
||||||
|
* "common_tunnels":[{"tunnels_schema_type":"GTP","gtp_endpoint_a2b_teid":235261261,"gtp_endpoint_b2a_teid":665547833,"gtp_sgw_ip":"192.56.5.2","gtp_pgw_ip":"192.56.10.20","gtp_sgw_port":2152,"gtp_pgw_port":2152}]
|
||||||
|
*
|
||||||
|
* @param jsonObject 原始日志json
|
||||||
|
* @param logValue 上行TEID
|
||||||
|
* @param appendToKey 结果值映射到的日志字段key
|
||||||
|
* @param param 用于解析jsonarray,直接定位到GTP信息所在的位置
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void gtpcMatch(JSONObject jsonObject, String logValue, String appendToKey, String param) {
|
||||||
|
try {
|
||||||
|
String teid = null;
|
||||||
|
String[] exprs = param.split(FlowWriteConfig.FORMAT_SPLITTER);
|
||||||
|
for (String expr : exprs) {
|
||||||
|
Object result = JsonPathUtil.analysis(logValue, expr);
|
||||||
|
if (result != null) {
|
||||||
|
teid = result.toString();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (teid != null) {
|
||||||
|
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
|
||||||
|
int vsysId = jsonObject.getIntValue("common_vsys_id", 1);
|
||||||
|
teid = teid + vsysId;
|
||||||
|
}
|
||||||
|
String[] appendToKeys = appendToKey.split(FlowWriteConfig.FORMAT_SPLITTER);
|
||||||
|
HashMap<String, Object> userData = HBaseUtils.getGtpData(teid);
|
||||||
|
if (userData != null) {
|
||||||
|
for (String key : appendToKeys) {
|
||||||
|
jsonObject.put(key, userData.get(key).toString());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.warn("Description The user whose TEID is " + teid + " was not matched!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (RuntimeException re) {
|
||||||
|
logger.error("An exception occurred in teid type conversion or parsing of user information!" + re.getMessage());
|
||||||
|
re.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 解析顶级域名
|
||||||
|
*
|
||||||
|
* @param domain 初始域名
|
||||||
|
* @return 顶级域名
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String getTopDomain(String domain) {
|
||||||
|
String topDomain = "";
|
||||||
|
try {
|
||||||
|
topDomain = FormatUtils.getTopPrivateDomain(domain);
|
||||||
|
} catch (StringIndexOutOfBoundsException outException) {
|
||||||
|
logger.error("Parse top-level domain exceptions, exception domain names:" + domain);
|
||||||
|
}
|
||||||
|
return topDomain;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据编码解码base64
|
||||||
|
*
|
||||||
|
* @param jsonObject 原始日志json
|
||||||
|
* @param message base64
|
||||||
|
* @param param 用于获取编码的参数
|
||||||
|
* @return 解码字符串
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String decodeBase64(JSONObject jsonObject, String message, String param) {
|
||||||
|
String decodeResult = "";
|
||||||
|
Object charset = isJsonValue(jsonObject, param);
|
||||||
|
try {
|
||||||
|
if (StringUtil.isNotBlank(message)) {
|
||||||
|
byte[] base64decodedBytes = Base64.getDecoder().decode(message);
|
||||||
|
if (charset == null) {
|
||||||
|
decodeResult = new String(base64decodedBytes);
|
||||||
|
} else {
|
||||||
|
decodeResult = new String(base64decodedBytes, charset.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("Resolve Base64 exception, exception information:" + e.getMessage());
|
||||||
|
} catch (UnsupportedEncodingException e) {
|
||||||
|
logger.error("The Character Encoding [" + charset.toString() + "] is not supported.exception information:" + e.getMessage());
|
||||||
|
}
|
||||||
|
return decodeResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据表达式解析json
|
||||||
|
*
|
||||||
|
* @param message json
|
||||||
|
* @param expr 解析表达式
|
||||||
|
* @return 解析结果
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public Object flattenSpec(String message, String expr) {
|
||||||
|
return JsonPathUtil.analysis(message, expr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* IF函数实现,解析日志构建三目运算;包含判断是否为数字若为数字则转换为long类型返回结果。
|
||||||
|
*
|
||||||
|
* @param jsonObject 原始日志反序列化对象
|
||||||
|
* @param ifParam 字段名/普通字符串
|
||||||
|
* @return resultA or resultB or null
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public Object condition(JSONObject jsonObject, String ifParam) {
|
||||||
|
Object result = null;
|
||||||
|
try {
|
||||||
|
String[] split = ifParam.split(FlowWriteConfig.FORMAT_SPLITTER);
|
||||||
|
if (split.length == IF_PARAM_LENGTH) {
|
||||||
|
String expression = split[0];
|
||||||
|
Object resultA = isJsonValue(jsonObject, split[1]);
|
||||||
|
Object resultB = isJsonValue(jsonObject, split[2]);
|
||||||
|
if (expression.contains(SEPARATOR)) {
|
||||||
|
String[] regexp = expression.split(SEPARATOR);
|
||||||
|
Object direction = isJsonValue(jsonObject, regexp[0]);
|
||||||
|
if (direction instanceof Number) {
|
||||||
|
result = Integer.parseInt(direction.toString()) != Integer.parseInt(regexp[1]) ? resultA : resultB;
|
||||||
|
} else if (direction instanceof String) {
|
||||||
|
result = direction.equals(regexp[1]) ? resultA : resultB;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
String[] regexp = expression.split(IF_CONDITION_SPLITTER);
|
||||||
|
Object direction = isJsonValue(jsonObject, regexp[0]);
|
||||||
|
if (direction instanceof Number) {
|
||||||
|
result = Integer.parseInt(direction.toString()) == Integer.parseInt(regexp[1]) ? resultA : resultB;
|
||||||
|
} else if (direction instanceof String) {
|
||||||
|
result = direction.equals(regexp[1]) ? resultA : resultB;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("IF function execution exception, exception information:" + e.getMessage());
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 给json中的某个key赋值(指定值)
|
||||||
|
*
|
||||||
|
* @param jsonObject 原始日志json
|
||||||
|
* @param appendToKey 要赋值的key
|
||||||
|
* @param param 参数(指定值)
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void setValue(JSONObject jsonObject, String appendToKey, String param) {
|
||||||
|
jsonObject.put(appendToKey, param);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 从json中获取A的值,赋值给B
|
||||||
|
*
|
||||||
|
* @param jsonObject 原始日志json
|
||||||
|
* @param appendToKey 要赋值的key
|
||||||
|
* @param logValue 获取的值
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void getValue(JSONObject jsonObject, String appendToKey, Object logValue) {
|
||||||
|
jsonObject.put(appendToKey, logValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 判断是否为日志字段,是则返回对应value,否则返回原始字符串
|
||||||
|
*
|
||||||
|
* @param jsonObject 原始日志反序列化对象
|
||||||
|
* @param param 字段名/普通字符串
|
||||||
|
* @return JSON.Value or String
|
||||||
|
*/
|
||||||
|
private static Object isJsonValue(JSONObject jsonObject, String param) {
|
||||||
|
if (param.contains(IS_JSON_KEY_TAG)) {
|
||||||
|
return jsonObject.get(param.substring(2));
|
||||||
|
} else {
|
||||||
|
return param;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,66 +2,47 @@ package com.zdjizhi.topology;
|
|||||||
|
|
||||||
import cn.hutool.log.Log;
|
import cn.hutool.log.Log;
|
||||||
import cn.hutool.log.LogFactory;
|
import cn.hutool.log.LogFactory;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson2.JSONObject;
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
import com.zdjizhi.utils.functions.DealFileProcessFunction;
|
import com.zdjizhi.operator.count.SendCountProcess;
|
||||||
import com.zdjizhi.utils.functions.FilterNullFunction;
|
import com.zdjizhi.operator.map.MapCompleted;
|
||||||
import com.zdjizhi.utils.functions.MapCompletedFunction;
|
import com.zdjizhi.operator.map.TypeMapCompleted;
|
||||||
import com.zdjizhi.utils.functions.TypeMapCompletedFunction;
|
import com.zdjizhi.operator.process.DealFileProcessFunction;
|
||||||
import com.zdjizhi.utils.kafka.KafkaConsumer;
|
import com.zdjizhi.tools.connections.kafka.KafkaConsumer;
|
||||||
import com.zdjizhi.utils.kafka.KafkaProducer;
|
import com.zdjizhi.tools.connections.kafka.KafkaProducer;
|
||||||
import org.apache.flink.streaming.api.datastream.DataStream;
|
|
||||||
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
||||||
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author 王成成
|
|
||||||
* @Package com.zdjizhi.topology
|
|
||||||
* @Description:
|
|
||||||
* @date 2022.06.01
|
|
||||||
*/
|
|
||||||
public class LogFlowWriteTopology {
|
public class LogFlowWriteTopology {
|
||||||
private static final Log logger = LogFactory.get();
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
|
|
||||||
final StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
|
final StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
|
||||||
|
|
||||||
//两个输出之间的最大时间 (单位milliseconds)
|
//两个输出之间的最大时间 (单位milliseconds)
|
||||||
environment.setBufferTimeout(FlowWriteConfig.BUFFER_TIMEOUT);
|
environment.setBufferTimeout(FlowWriteConfig.BUFFER_TIMEOUT);
|
||||||
|
|
||||||
if (FlowWriteConfig.LOG_NEED_COMPLETE == 1) {
|
SingleOutputStreamOperator<JSONObject> completedStream;
|
||||||
|
if (FlowWriteConfig.LOG_TRANSFORM_TYPE == 0) {//不对日志字段类型做校验。
|
||||||
|
completedStream = environment.addSource(KafkaConsumer.flinkConsumer()).name(FlowWriteConfig.SOURCE_KAFKA_TOPIC).setParallelism(FlowWriteConfig.SOURCE_PARALLELISM)
|
||||||
|
.process(new MapCompleted()).name("MapCompletedFunction").setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
|
||||||
|
|
||||||
SingleOutputStreamOperator<Map<String, Object>> streamSource = environment.addSource(KafkaConsumer.myDeserializationConsumer())
|
} else {//对日志字段类型做弱校验,可根据schema进行强转。
|
||||||
.setParallelism(FlowWriteConfig.SOURCE_PARALLELISM).name(FlowWriteConfig.SOURCE_KAFKA_TOPIC);
|
completedStream = environment.addSource(KafkaConsumer.flinkConsumer()).name(FlowWriteConfig.SOURCE_KAFKA_TOPIC).setParallelism(FlowWriteConfig.SOURCE_PARALLELISM)
|
||||||
|
.process(new TypeMapCompleted()).name("TypeMapCompletedFunction").setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
|
||||||
|
|
||||||
DataStream<Map<String, Object>> cleaningLog;
|
|
||||||
switch (FlowWriteConfig.LOG_TRANSFORM_TYPE) {
|
|
||||||
case 0:
|
|
||||||
//对原始日志进行处理补全转换等,不对日志字段类型做校验。
|
|
||||||
cleaningLog = streamSource.map(new MapCompletedFunction()).name("MapCompletedFunction")
|
|
||||||
.setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
|
|
||||||
break;
|
|
||||||
case 1:
|
|
||||||
//对原始日志进行处理补全转换等,对日志字段类型做若校验,可根据schema进行强转。
|
|
||||||
cleaningLog = streamSource.map(new TypeMapCompletedFunction()).name("TypeMapCompletedFunction")
|
|
||||||
.setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
//对原始日志进行处理补全转换等,不对日志字段类型做校验。
|
|
||||||
cleaningLog = streamSource.map(new MapCompletedFunction()).name("MapCompletedFunction")
|
|
||||||
.setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
|
|
||||||
}
|
|
||||||
//处理带有非结构化日志的数据
|
|
||||||
SingleOutputStreamOperator<String> process = cleaningLog.process(new DealFileProcessFunction());
|
|
||||||
SingleOutputStreamOperator<String> resultFileMetaData = process.getSideOutput(DealFileProcessFunction.metaToKafa).filter(new FilterNullFunction()).name("FilterAbnormalTrafficFileMetaData").setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
|
|
||||||
SingleOutputStreamOperator<String> result = process.filter(new FilterNullFunction()).name("FilterAbnormalData").setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
|
|
||||||
//文件元数据发送至TRAFFIC-FILE-METADATA
|
|
||||||
resultFileMetaData.addSink(KafkaProducer.getTrafficFileMetaKafkaProducer()).name("toTrafficFileMeta")
|
|
||||||
.setParallelism(FlowWriteConfig.FILE_DATA_SINK_PARALLELISM);
|
|
||||||
//补全后的数据发送给百分点的kafka
|
|
||||||
result.addSink(KafkaProducer.getPercentKafkaProducer()).name("toPercentKafka")
|
|
||||||
.setParallelism(FlowWriteConfig.PERCENT_SINK_PARALLELISM);
|
|
||||||
}
|
}
|
||||||
|
//处理带有非结构化文件字段的数据
|
||||||
|
SingleOutputStreamOperator<String> dealFileProcessFunction = completedStream.process(new DealFileProcessFunction()).name("DealFileProcessFunction").uid("DealFile-ProcessFunction").setParallelism(FlowWriteConfig.DEAL_FILE_PARALLELISM);
|
||||||
|
//补全后的数据发送至百分点的kafka
|
||||||
|
dealFileProcessFunction.addSink(KafkaProducer.getPercentKafkaProducer()).name("ToPercentKafka").uid("To-Percent-Kafka").setParallelism(FlowWriteConfig.SINK_PERCENT_PARALLELISM);
|
||||||
|
//文件元数据发送至TRAFFIC-FILE-METADATA
|
||||||
|
dealFileProcessFunction.getSideOutput(DealFileProcessFunction.metaToKafa).addSink(KafkaProducer.getTrafficFileMetaKafkaProducer()).name("toTrafficFileMeta").uid("to-Traffic-FileMeta").setParallelism(FlowWriteConfig.SINK_FILE_DATA_PARALLELISM);
|
||||||
|
dealFileProcessFunction.getSideOutput(DealFileProcessFunction.dealFileMetircTag).process(new SendCountProcess()).name("SendCountProcess").uid("Send-Count-Process").setParallelism(1);
|
||||||
try {
|
try {
|
||||||
environment.execute(args[0]);
|
environment.execute(args[0]);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
@@ -71,4 +52,5 @@ public class LogFlowWriteTopology {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,124 +0,0 @@
|
|||||||
package com.zdjizhi.utils.app;
|
|
||||||
|
|
||||||
import cn.hutool.log.Log;
|
|
||||||
import cn.hutool.log.LogFactory;
|
|
||||||
import com.alibaba.fastjson.JSONArray;
|
|
||||||
import com.alibaba.fastjson.JSONObject;
|
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
|
||||||
import com.zdjizhi.utils.StringUtil;
|
|
||||||
import com.zdjizhi.utils.http.HttpClientUtil;
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
|
||||||
import java.util.concurrent.ScheduledExecutorService;
|
|
||||||
import java.util.concurrent.ScheduledThreadPoolExecutor;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* AppId 工具类
|
|
||||||
*
|
|
||||||
* @author qidaijie
|
|
||||||
*/
|
|
||||||
|
|
||||||
@Deprecated
|
|
||||||
public class AppUtils {
|
|
||||||
private static final Log logger = LogFactory.get();
|
|
||||||
private static Map<Integer, String> appIdMap = new ConcurrentHashMap<>(128);
|
|
||||||
private static AppUtils appUtils;
|
|
||||||
|
|
||||||
private static void getAppInstance() {
|
|
||||||
appUtils = new AppUtils();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 构造函数-新
|
|
||||||
*/
|
|
||||||
private AppUtils() {
|
|
||||||
//定时更新
|
|
||||||
updateAppIdCache();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 更新变量
|
|
||||||
*/
|
|
||||||
private static void change() {
|
|
||||||
if (appUtils == null) {
|
|
||||||
getAppInstance();
|
|
||||||
}
|
|
||||||
timestampsFilter();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取变更内容
|
|
||||||
*/
|
|
||||||
private static void timestampsFilter() {
|
|
||||||
try {
|
|
||||||
Long begin = System.currentTimeMillis();
|
|
||||||
String schema = HttpClientUtil.requestByGetMethod(FlowWriteConfig.APP_ID_HTTP);
|
|
||||||
if (StringUtil.isNotBlank(schema)) {
|
|
||||||
String data = JSONObject.parseObject(schema).getString("data");
|
|
||||||
JSONArray objects = JSONArray.parseArray(data);
|
|
||||||
for (Object object : objects) {
|
|
||||||
JSONArray jsonArray = JSONArray.parseArray(object.toString());
|
|
||||||
int key = jsonArray.getInteger(0);
|
|
||||||
String value = jsonArray.getString(1);
|
|
||||||
if (appIdMap.containsKey(key)) {
|
|
||||||
if (!value.equals(appIdMap.get(key))) {
|
|
||||||
appIdMap.put(key, value);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
appIdMap.put(key, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
logger.warn("Updating the correspondence takes time:" + (begin - System.currentTimeMillis()));
|
|
||||||
logger.warn("Pull the length of the interface data:[" + objects.size() + "]");
|
|
||||||
}
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("Update cache app-id failed, exception:" + e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 验证定时器,每隔一段时间验证一次-验证获取新的Cookie
|
|
||||||
*/
|
|
||||||
private void updateAppIdCache() {
|
|
||||||
ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1);
|
|
||||||
executorService.scheduleAtFixedRate(new Runnable() {
|
|
||||||
@Override
|
|
||||||
public void run() {
|
|
||||||
try {
|
|
||||||
if (FlowWriteConfig.APP_TICK_TUPLE_FREQ_SECS != 0) {
|
|
||||||
change();
|
|
||||||
}
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("AppUtils update AppCache is error===>{" + e + "}<===");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, 1, FlowWriteConfig.APP_TICK_TUPLE_FREQ_SECS, TimeUnit.SECONDS);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取 appName
|
|
||||||
*
|
|
||||||
* @param appId app_id
|
|
||||||
* @return account
|
|
||||||
*/
|
|
||||||
public static String getAppName(int appId) {
|
|
||||||
|
|
||||||
if (appUtils == null) {
|
|
||||||
getAppInstance();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (appIdMap.containsKey(appId)) {
|
|
||||||
return appIdMap.get(appId);
|
|
||||||
} else {
|
|
||||||
logger.warn("AppMap get appName is null, ID is :" + appId);
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
package com.zdjizhi.utils.functions;
|
|
||||||
|
|
||||||
import com.zdjizhi.utils.StringUtil;
|
|
||||||
import org.apache.flink.api.common.functions.FilterFunction;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author qidaijie
|
|
||||||
* @Package com.zdjizhi.utils.functions
|
|
||||||
* @Description:
|
|
||||||
* @date 2021/5/2715:01
|
|
||||||
*/
|
|
||||||
public class FilterNullFunction implements FilterFunction<String> {
|
|
||||||
@Override
|
|
||||||
public boolean filter(String message) {
|
|
||||||
return StringUtil.isNotBlank(message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
package com.zdjizhi.utils.functions;
|
|
||||||
|
|
||||||
|
|
||||||
import com.zdjizhi.utils.general.TransFormMap;
|
|
||||||
import org.apache.flink.api.common.functions.MapFunction;
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author qidaijie
|
|
||||||
* @Package com.zdjizhi.utils.functions
|
|
||||||
* @Description:
|
|
||||||
* @date 2021/5/2715:01
|
|
||||||
*/
|
|
||||||
public class MapCompletedFunction implements MapFunction<Map<String, Object>, Map<String, Object>> {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public Map<String, Object> map(Map<String, Object> logs) {
|
|
||||||
return TransFormMap.dealCommonMessage(logs);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
package com.zdjizhi.utils.functions;
|
|
||||||
|
|
||||||
import com.zdjizhi.utils.general.TransFormTypeMap;
|
|
||||||
import org.apache.flink.api.common.functions.MapFunction;
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author qidaijie
|
|
||||||
* @Package com.zdjizhi.utils.functions
|
|
||||||
* @Description:
|
|
||||||
* @date 2021/5/2715:01
|
|
||||||
*/
|
|
||||||
public class TypeMapCompletedFunction implements MapFunction<Map<String, Object>, Map<String, Object>> {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public Map<String, Object> map(Map<String, Object> logs) {
|
|
||||||
|
|
||||||
return TransFormTypeMap.dealCommonMessage(logs);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,180 +0,0 @@
|
|||||||
package com.zdjizhi.utils.general;
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* CityHash64算法对logid进行散列计算
|
|
||||||
* 版本规划暂不实现-TSG22.01
|
|
||||||
*
|
|
||||||
* @author qidaijie
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public class CityHash {
|
|
||||||
|
|
||||||
private static final long k0 = 0xc3a5c85c97cb3127L;
|
|
||||||
private static final long k1 = 0xb492b66fbe98f273L;
|
|
||||||
private static final long k2 = 0x9ae16a3b2f90404fL;
|
|
||||||
private static final long k3 = 0xc949d7c7509e6557L;
|
|
||||||
private static final long k5 = 0x9ddfea08eb382d69L;
|
|
||||||
|
|
||||||
private CityHash() {}
|
|
||||||
|
|
||||||
public static long CityHash64(byte[] s, int index, int len) {
|
|
||||||
if (len <= 16 ) {
|
|
||||||
return HashLen0to16(s, index, len);
|
|
||||||
} else if (len > 16 && len <= 32) {
|
|
||||||
return HashLen17to32(s, index, len);
|
|
||||||
} else if (len > 32 && len <= 64) {
|
|
||||||
return HashLen33to64(s, index, len);
|
|
||||||
} else {
|
|
||||||
long x = Fetch64(s, index);
|
|
||||||
long y = Fetch64(s, index + len - 16) ^ k1;
|
|
||||||
long z = Fetch64(s, index + len - 56) ^ k0;
|
|
||||||
long[] v = WeakHashLen32WithSeeds(s, len - 64, len, y);
|
|
||||||
long[] w = WeakHashLen32WithSeeds(s, len - 32, len * k1, k0);
|
|
||||||
z += ShiftMix(v[1]) * k1;
|
|
||||||
x = Rotate(z + x, 39) * k1;
|
|
||||||
y = Rotate(y, 33) * k1;
|
|
||||||
|
|
||||||
len = (len - 1) & ~63;
|
|
||||||
do {
|
|
||||||
x = Rotate(x + y + v[0] + Fetch64(s, index + 16), 37) * k1;
|
|
||||||
y = Rotate(y + v[1] + Fetch64(s, index + 48), 42) * k1;
|
|
||||||
x ^= w[1];
|
|
||||||
y ^= v[0];
|
|
||||||
z = Rotate(z ^ w[0], 33);
|
|
||||||
v = WeakHashLen32WithSeeds(s, index, v[1] * k1, x + w[0]);
|
|
||||||
w = WeakHashLen32WithSeeds(s, index + 32, z + w[1], y);
|
|
||||||
long t = z;
|
|
||||||
z = x;
|
|
||||||
x = t;
|
|
||||||
index += 64;
|
|
||||||
len -= 64;
|
|
||||||
} while (len != 0);
|
|
||||||
return HashLen16(HashLen16(v[0], w[0]) + ShiftMix(y) * k1 + z,
|
|
||||||
HashLen16(v[1], w[1]) + x);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long HashLen0to16(byte[] s, int index, int len) {
|
|
||||||
if (len > 8) {
|
|
||||||
long a = Fetch64(s, index);
|
|
||||||
long b = Fetch64(s, index + len - 8);
|
|
||||||
return HashLen16(a, RotateByAtLeastOne(b + len, len)) ^ b;
|
|
||||||
}
|
|
||||||
if (len >= 4) {
|
|
||||||
long a = Fetch32(s, index);
|
|
||||||
return HashLen16(len + (a << 3), Fetch32(s, index + len - 4));
|
|
||||||
}
|
|
||||||
if (len > 0) {
|
|
||||||
byte a = s[index];
|
|
||||||
byte b = s[index + len >>> 1];
|
|
||||||
byte c = s[index + len - 1];
|
|
||||||
int y = (a) + (b << 8);
|
|
||||||
int z = len + (c << 2);
|
|
||||||
return ShiftMix(y * k2 ^ z * k3) * k2;
|
|
||||||
}
|
|
||||||
return k2;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long HashLen17to32(byte[] s, int index, int len) {
|
|
||||||
long a = Fetch64(s, index) * k1;
|
|
||||||
long b = Fetch64(s, index + 8);
|
|
||||||
long c = Fetch64(s, index + len - 8) * k2;
|
|
||||||
long d = Fetch64(s, index + len - 16) * k0;
|
|
||||||
return HashLen16(Rotate(a - b, 43) + Rotate(c, 30) + d,
|
|
||||||
a + Rotate(b ^ k3, 20) - c + len);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long HashLen33to64(byte[] s, int index, int len) {
|
|
||||||
long z = Fetch64(s, index + 24);
|
|
||||||
long a = Fetch64(s, index) + (len + Fetch64(s, index + len - 16)) * k0;
|
|
||||||
long b = Rotate(a + z, 52);
|
|
||||||
long c = Rotate(a, 37);
|
|
||||||
a += Fetch64(s, index + 8);
|
|
||||||
c += Rotate(a, 7);
|
|
||||||
a += Fetch64(s, index + 16);
|
|
||||||
long vf = a + z;
|
|
||||||
long vs = b + Rotate(a, 31) + c;
|
|
||||||
a = Fetch64(s, index + 16) + Fetch64(s, index + len - 32);
|
|
||||||
z = Fetch64(s, index + len - 8);
|
|
||||||
b = Rotate(a + z, 52);
|
|
||||||
c = Rotate(a, 37);
|
|
||||||
a += Fetch64(s, index + len - 24);
|
|
||||||
c += Rotate(a, 7);
|
|
||||||
a += Fetch64(s, index + len - 16);
|
|
||||||
long wf = a + z;
|
|
||||||
long ws = b + Rotate(a, 31) + c;
|
|
||||||
long r = ShiftMix((vf + ws) * k2 + (wf + vs) * k0);
|
|
||||||
return ShiftMix(r * k0 + vs) * k2;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long Fetch64(byte[] p, int index) {
|
|
||||||
return toLongLE(p,index);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long Fetch32(byte[] p, int index) {
|
|
||||||
return toIntLE(p,index);
|
|
||||||
}
|
|
||||||
private static long[] WeakHashLen32WithSeeds(
|
|
||||||
long w, long x, long y, long z, long a, long b) {
|
|
||||||
a += w;
|
|
||||||
b = Rotate(b + a + z, 21);
|
|
||||||
long c = a;
|
|
||||||
a += x;
|
|
||||||
a += y;
|
|
||||||
b += Rotate(a, 44);
|
|
||||||
return new long[]{a + z, b + c};
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long[] WeakHashLen32WithSeeds(byte[] s, int index, long a, long b) {
|
|
||||||
return WeakHashLen32WithSeeds(Fetch64(s, index),
|
|
||||||
Fetch64(s, index + 8),
|
|
||||||
Fetch64(s, index + 16),
|
|
||||||
Fetch64(s, index + 24),
|
|
||||||
a,
|
|
||||||
b);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long toLongLE(byte[] b, int i) {
|
|
||||||
return 0xffffffffffffffffL & (((long) b[i + 7] << 56) + ((long) (b[i + 6] & 255) << 48) + ((long) (b[i + 5] & 255) << 40) + ((long) (b[i + 4] & 255) << 32) + ((long) (b[i + 3] & 255) << 24) + ((b[i + 2] & 255) << 16) + ((b[i + 1] & 255) << 8) + ((b[i + 0] & 255) << 0));
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long toIntLE(byte[] b, int i) {
|
|
||||||
return 0xffffffffL & (((b[i + 3] & 255) << 24) + ((b[i + 2] & 255) << 16) + ((b[i + 1] & 255) << 8) + ((b[i + 0] & 255) << 0));
|
|
||||||
}
|
|
||||||
private static long RotateByAtLeastOne(long val, int shift) {
|
|
||||||
return (val >>> shift) | (val << (64 - shift));
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long ShiftMix(long val) {
|
|
||||||
return val ^ (val >>> 47);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long Uint128Low64(long[] x) {
|
|
||||||
return x[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long Rotate(long val, int shift) {
|
|
||||||
return shift == 0 ? val : (val >>> shift) | (val << (64 - shift));
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long Uint128High64(long[] x) {
|
|
||||||
return x[1];
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long Hash128to64(long[] x) {
|
|
||||||
long a = (Uint128Low64(x) ^ Uint128High64(x)) * k5;
|
|
||||||
a ^= (a >>> 47);
|
|
||||||
long b = (Uint128High64(x) ^ a) * k5;
|
|
||||||
b ^= (b >>> 47);
|
|
||||||
b *= k5;
|
|
||||||
return b;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static long HashLen16(long u, long v) {
|
|
||||||
return Hash128to64(new long[]{u,v});
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,130 +0,0 @@
|
|||||||
package com.zdjizhi.utils.general;
|
|
||||||
|
|
||||||
|
|
||||||
import cn.hutool.log.Log;
|
|
||||||
import cn.hutool.log.LogFactory;
|
|
||||||
import com.zdjizhi.utils.JsonMapper;
|
|
||||||
import com.zdjizhi.utils.json.JsonParseUtil;
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 描述:转换或补全工具类
|
|
||||||
*
|
|
||||||
* @author qidaijie
|
|
||||||
*/
|
|
||||||
public class TransFormMap {
|
|
||||||
private static final Log logger = LogFactory.get();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 解析日志,并补全
|
|
||||||
*
|
|
||||||
* @param jsonMap kafka Topic消费原始日志并解析
|
|
||||||
* @return 补全后的日志
|
|
||||||
*/
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public static Map<String, Object> dealCommonMessage(Map<String, Object> jsonMap) {
|
|
||||||
try {
|
|
||||||
JsonParseUtil.dropJsonField(jsonMap);
|
|
||||||
for (String[] strings : JsonParseUtil.getJobList()) {
|
|
||||||
//用到的参数的值
|
|
||||||
Object logValue = JsonParseUtil.getValue(jsonMap, strings[0]);
|
|
||||||
//需要补全的字段的key
|
|
||||||
String appendToKeyName = strings[1];
|
|
||||||
//需要补全的字段的值
|
|
||||||
Object appendTo = JsonParseUtil.getValue(jsonMap, appendToKeyName);
|
|
||||||
//匹配操作函数的字段
|
|
||||||
String function = strings[2];
|
|
||||||
//额外的参数的值
|
|
||||||
String param = strings[3];
|
|
||||||
functionSet(function, jsonMap, appendToKeyName, appendTo, logValue, param);
|
|
||||||
}
|
|
||||||
return jsonMap;
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("TransForm logs failed,The exception is :" + e);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 根据schema描述对应字段进行操作的 函数集合
|
|
||||||
*
|
|
||||||
* @param function 匹配操作函数的字段
|
|
||||||
* @param jsonMap 原始日志解析map
|
|
||||||
* @param appendToKeyName 需要补全的字段的key
|
|
||||||
* @param appendTo 需要补全的字段的值
|
|
||||||
* @param logValue 用到的参数的值
|
|
||||||
* @param param 额外的参数的值
|
|
||||||
*/
|
|
||||||
private static void functionSet(String function, Map<String, Object> jsonMap, String appendToKeyName, Object appendTo, Object logValue, String param) {
|
|
||||||
switch (function) {
|
|
||||||
case "current_timestamp":
|
|
||||||
if (!(appendTo instanceof Long)) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getCurrentTime());
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "snowflake_id":
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, SnowflakeId.generateId());
|
|
||||||
break;
|
|
||||||
case "geo_ip_detail":
|
|
||||||
if (logValue != null && appendTo == null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoIpDetail(logValue.toString()));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "geo_asn":
|
|
||||||
if (logValue != null && appendTo == null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoAsn(logValue.toString()));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "geo_ip_country":
|
|
||||||
if (logValue != null && appendTo == null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoIpCountry(logValue.toString()));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "set_value":
|
|
||||||
if (param != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, param);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "get_value":
|
|
||||||
if (logValue != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, logValue);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "if":
|
|
||||||
if (param != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.condition(jsonMap, param));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "sub_domain":
|
|
||||||
if (appendTo == null && logValue != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getTopDomain(logValue.toString()));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "radius_match":
|
|
||||||
if (logValue != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.radiusMatch(logValue.toString()));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "decode_of_base64":
|
|
||||||
if (logValue != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.decodeBase64(logValue.toString(), TransFunction.isJsonValue(jsonMap, param)));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "flattenSpec":
|
|
||||||
if (logValue != null && param != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.flattenSpec(logValue.toString(), param));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "app_match":
|
|
||||||
if (logValue != null && appendTo == null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.appMatch(logValue.toString()));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,132 +0,0 @@
|
|||||||
package com.zdjizhi.utils.general;
|
|
||||||
|
|
||||||
|
|
||||||
import cn.hutool.log.Log;
|
|
||||||
import cn.hutool.log.LogFactory;
|
|
||||||
import com.zdjizhi.utils.JsonMapper;
|
|
||||||
import com.zdjizhi.utils.json.JsonParseUtil;
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 描述:转换或补全工具类
|
|
||||||
*
|
|
||||||
* @author qidaijie
|
|
||||||
*/
|
|
||||||
public class TransFormTypeMap {
|
|
||||||
private static final Log logger = LogFactory.get();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 解析日志,并补全
|
|
||||||
*
|
|
||||||
* @param message kafka Topic原始日志
|
|
||||||
* @return 补全后的日志
|
|
||||||
*/
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public static Map<String, Object> dealCommonMessage(Map<String, Object> message) {
|
|
||||||
try {
|
|
||||||
Map<String, Object> jsonMap = JsonParseUtil.typeTransform(message);
|
|
||||||
for (String[] strings : JsonParseUtil.getJobList()) {
|
|
||||||
//用到的参数的值
|
|
||||||
Object logValue = JsonParseUtil.getValue(jsonMap, strings[0]);
|
|
||||||
//需要补全的字段的key
|
|
||||||
String appendToKeyName = strings[1];
|
|
||||||
//需要补全的字段的值
|
|
||||||
Object appendToKeyValue = JsonParseUtil.getValue(jsonMap, appendToKeyName);
|
|
||||||
//匹配操作函数的字段
|
|
||||||
String function = strings[2];
|
|
||||||
//额外的参数的值
|
|
||||||
String param = strings[3];
|
|
||||||
functionSet(function, jsonMap, appendToKeyName, appendToKeyValue, logValue, param);
|
|
||||||
}
|
|
||||||
return jsonMap;
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("TransForm logs failed,The exception is :" + e);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 根据schema描述对应字段进行操作的 函数集合
|
|
||||||
*
|
|
||||||
* @param function 匹配操作函数的字段
|
|
||||||
* @param jsonMap 原始日志解析map
|
|
||||||
* @param appendToKeyName 需要补全的字段的key
|
|
||||||
* @param appendToKeyValue 需要补全的字段的值
|
|
||||||
* @param logValue 用到的参数的值
|
|
||||||
* @param param 额外的参数的值
|
|
||||||
*/
|
|
||||||
private static void functionSet(String function, Map<String, Object> jsonMap, String appendToKeyName, Object appendToKeyValue, Object logValue, String param) {
|
|
||||||
switch (function) {
|
|
||||||
case "current_timestamp":
|
|
||||||
if (!(appendToKeyValue instanceof Long)) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getCurrentTime());
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "snowflake_id":
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, SnowflakeId.generateId());
|
|
||||||
//版本规划暂不实现TSG-22.01
|
|
||||||
// JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getDecimalHash(SnowflakeId.generateId()));
|
|
||||||
break;
|
|
||||||
case "geo_ip_detail":
|
|
||||||
if (logValue != null && appendToKeyValue == null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoIpDetail(logValue.toString()));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "geo_asn":
|
|
||||||
if (logValue != null && appendToKeyValue == null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoAsn(logValue.toString()));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "geo_ip_country":
|
|
||||||
if (logValue != null && appendToKeyValue == null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoIpCountry(logValue.toString()));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "set_value":
|
|
||||||
if (param != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, param);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "get_value":
|
|
||||||
if (logValue != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, logValue);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "if":
|
|
||||||
if (param != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.condition(jsonMap, param));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "sub_domain":
|
|
||||||
if (appendToKeyValue == null && logValue != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getTopDomain(logValue.toString()));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "radius_match":
|
|
||||||
if (logValue != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.radiusMatch(logValue.toString()));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "decode_of_base64":
|
|
||||||
if (logValue != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.decodeBase64(logValue.toString(), TransFunction.isJsonValue(jsonMap, param)));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "flattenSpec":
|
|
||||||
if (logValue != null && param != null) {
|
|
||||||
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.flattenSpec(logValue.toString(), param));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case "app_match":
|
|
||||||
if (logValue != null && appendToKeyValue == null) {
|
|
||||||
// JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.appMatch(logValue.toString()));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,297 +0,0 @@
|
|||||||
package com.zdjizhi.utils.general;
|
|
||||||
|
|
||||||
import cn.hutool.core.codec.Base64;
|
|
||||||
import cn.hutool.core.util.StrUtil;
|
|
||||||
import cn.hutool.log.Log;
|
|
||||||
import cn.hutool.log.LogFactory;
|
|
||||||
import com.jayway.jsonpath.InvalidPathException;
|
|
||||||
import com.jayway.jsonpath.JsonPath;
|
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
|
||||||
import com.zdjizhi.utils.FormatUtils;
|
|
||||||
import com.zdjizhi.utils.IpLookupV2;
|
|
||||||
import com.zdjizhi.utils.StringUtil;
|
|
||||||
import com.zdjizhi.utils.app.AppUtils;
|
|
||||||
import com.zdjizhi.utils.hbase.HBaseUtils;
|
|
||||||
import com.zdjizhi.utils.json.JsonParseUtil;
|
|
||||||
import com.zdjizhi.utils.json.TypeUtils;
|
|
||||||
|
|
||||||
import java.math.BigInteger;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.regex.Matcher;
|
|
||||||
import java.util.regex.Pattern;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author qidaijie
|
|
||||||
*/
|
|
||||||
class TransFunction {
|
|
||||||
private static final Log logger = LogFactory.get();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 校验数字正则
|
|
||||||
*/
|
|
||||||
private static final Pattern PATTERN = Pattern.compile("[0-9]*");
|
|
||||||
|
|
||||||
/**
|
|
||||||
* IP定位库工具类
|
|
||||||
*/
|
|
||||||
private static IpLookupV2 ipLookup = new IpLookupV2.Builder(false)
|
|
||||||
.loadDataFileV4(FlowWriteConfig.TOOLS_LIBRARY + "ip_v4_built_in.mmdb")
|
|
||||||
.loadDataFileV6(FlowWriteConfig.TOOLS_LIBRARY + "ip_v6_built_in.mmdb")
|
|
||||||
.loadDataFilePrivateV4(FlowWriteConfig.TOOLS_LIBRARY + "ip_v4_user_defined.mmdb")
|
|
||||||
.loadDataFilePrivateV6(FlowWriteConfig.TOOLS_LIBRARY + "ip_v6_user_defined.mmdb")
|
|
||||||
.loadAsnDataFile(FlowWriteConfig.TOOLS_LIBRARY + "asn_v4.mmdb")
|
|
||||||
.loadAsnDataFileV6(FlowWriteConfig.TOOLS_LIBRARY + "asn_v6.mmdb")
|
|
||||||
.build();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 生成当前时间戳的操作
|
|
||||||
*/
|
|
||||||
static long getCurrentTime() {
|
|
||||||
|
|
||||||
return System.currentTimeMillis() / 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* CityHash64算法
|
|
||||||
* 版本规划暂不实现-TSG22.01
|
|
||||||
*
|
|
||||||
* @param data 原始数据
|
|
||||||
* @return 散列结果
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
static BigInteger getDecimalHash(long data) {
|
|
||||||
byte[] dataBytes = String.valueOf(data).getBytes();
|
|
||||||
long hashValue = CityHash.CityHash64(dataBytes, 0, dataBytes.length);
|
|
||||||
String decimalValue = Long.toUnsignedString(hashValue, 10);
|
|
||||||
BigInteger result = new BigInteger(decimalValue);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 根据clientIp获取location信息
|
|
||||||
*
|
|
||||||
* @param ip client IP
|
|
||||||
* @return ip地址详细信息
|
|
||||||
*/
|
|
||||||
static String getGeoIpDetail(String ip) {
|
|
||||||
return ipLookup.cityLookupDetail(ip);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 根据ip获取asn信息
|
|
||||||
*
|
|
||||||
* @param ip client/server IP
|
|
||||||
* @return ASN
|
|
||||||
*/
|
|
||||||
static String getGeoAsn(String ip) {
|
|
||||||
return ipLookup.asnLookup(ip);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 根据ip获取country信息
|
|
||||||
*
|
|
||||||
* @param ip server IP
|
|
||||||
* @return 国家
|
|
||||||
*/
|
|
||||||
static String getGeoIpCountry(String ip) {
|
|
||||||
|
|
||||||
return ipLookup.countryLookup(ip);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* radius借助HBase补齐
|
|
||||||
*
|
|
||||||
* @param ip client IP
|
|
||||||
* @return account
|
|
||||||
*/
|
|
||||||
static String radiusMatch(String ip) {
|
|
||||||
return HBaseUtils.getAccount(ip.trim());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* appId与缓存中对应关系补全appName
|
|
||||||
*
|
|
||||||
* @param appIds app id 列表
|
|
||||||
* @return appName
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
static String appMatch(String appIds) {
|
|
||||||
try {
|
|
||||||
String appId = StrUtil.split(appIds, FlowWriteConfig.FORMAT_SPLITTER, true, true).get(0);
|
|
||||||
return AppUtils.getAppName(Integer.parseInt(appId));
|
|
||||||
} catch (NumberFormatException | ClassCastException exception) {
|
|
||||||
logger.error("APP ID列表分割转换异常,异常APP ID列表:" + appIds);
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 解析顶级域名
|
|
||||||
*
|
|
||||||
* @param domain 初始域名
|
|
||||||
* @return 顶级域名
|
|
||||||
*/
|
|
||||||
static String getTopDomain(String domain) {
|
|
||||||
try {
|
|
||||||
return FormatUtils.getTopPrivateDomain(domain);
|
|
||||||
} catch (StringIndexOutOfBoundsException outException) {
|
|
||||||
logger.error("解析顶级域名异常,异常域名:" + domain);
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 根据编码解码base64
|
|
||||||
*
|
|
||||||
* @param message base64
|
|
||||||
* @param charset 编码
|
|
||||||
* @return 解码字符串
|
|
||||||
*/
|
|
||||||
static String decodeBase64(String message, Object charset) {
|
|
||||||
String result = "";
|
|
||||||
try {
|
|
||||||
if (StringUtil.isNotBlank(message)) {
|
|
||||||
if (charset == null) {
|
|
||||||
result = Base64.decodeStr(message, FlowWriteConfig.MAIL_DEFAULT_CHARSET);
|
|
||||||
} else {
|
|
||||||
result = Base64.decodeStr(message, charset.toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (RuntimeException rune) {
|
|
||||||
logger.error("解析 Base64 异常,异常信息:" + rune);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 根据表达式解析json
|
|
||||||
*
|
|
||||||
* @param message json
|
|
||||||
* @param expr 解析表达式
|
|
||||||
* @return 解析结果
|
|
||||||
*/
|
|
||||||
static String flattenSpec(String message, String expr) {
|
|
||||||
String flattenResult = "";
|
|
||||||
try {
|
|
||||||
if (StringUtil.isNotBlank(expr)) {
|
|
||||||
ArrayList<String> read = JsonPath.parse(message).read(expr);
|
|
||||||
if (read.size() >= 1) {
|
|
||||||
flattenResult = read.get(0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (ClassCastException | InvalidPathException | ArrayIndexOutOfBoundsException e) {
|
|
||||||
logger.error("设备标签解析异常,[ " + expr + " ]解析表达式错误" + e);
|
|
||||||
}
|
|
||||||
return flattenResult;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 判断是否为日志字段,是则返回对应value,否则返回原始字符串
|
|
||||||
*
|
|
||||||
* @param object 内存实体类
|
|
||||||
* @param param 字段名/普通字符串
|
|
||||||
* @return JSON.Value or String
|
|
||||||
*/
|
|
||||||
static Object isJsonValue(Object object, String param) {
|
|
||||||
if (param.contains(FlowWriteConfig.IS_JSON_KEY_TAG)) {
|
|
||||||
return JsonParseUtil.getValue(object, param.substring(2));
|
|
||||||
} else {
|
|
||||||
return param;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 判断是否为日志字段,是则返回对应value,否则返回原始字符串
|
|
||||||
*
|
|
||||||
* @param jsonMap 内存实体类
|
|
||||||
* @param param 字段名/普通字符串
|
|
||||||
* @return JSON.Value or String
|
|
||||||
*/
|
|
||||||
static Object isJsonValue(Map<String, Object> jsonMap, String param) {
|
|
||||||
if (param.contains(FlowWriteConfig.IS_JSON_KEY_TAG)) {
|
|
||||||
return JsonParseUtil.getValue(jsonMap, param.substring(2));
|
|
||||||
} else {
|
|
||||||
return param;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* IF函数实现,解析日志构建三目运算;包含判断是否为数字若为数字则转换为long类型返回结果。
|
|
||||||
*
|
|
||||||
* @param object 内存实体类
|
|
||||||
* @param ifParam 字段名/普通字符串
|
|
||||||
* @return resultA or resultB or null
|
|
||||||
*/
|
|
||||||
static Object condition(Object object, String ifParam) {
|
|
||||||
Object result = null;
|
|
||||||
try {
|
|
||||||
String[] split = ifParam.split(FlowWriteConfig.FORMAT_SPLITTER);
|
|
||||||
if (split.length == FlowWriteConfig.IF_PARAM_LENGTH) {
|
|
||||||
String[] norms = split[0].split(FlowWriteConfig.IF_CONDITION_SPLITTER);
|
|
||||||
Object direction = isJsonValue(object, norms[0]);
|
|
||||||
Object resultA = isJsonValue(object, split[1]);
|
|
||||||
Object resultB = isJsonValue(object, split[2]);
|
|
||||||
if (direction instanceof Number) {
|
|
||||||
result = TypeUtils.castToIfFunction((Integer.parseInt(direction.toString()) == Integer.parseInt(norms[1])) ? resultA : resultB);
|
|
||||||
} else if (direction instanceof String) {
|
|
||||||
result = TypeUtils.castToIfFunction(direction.equals(norms[1]) ? resultA : resultB);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("IF 函数执行异常,异常信息:" + e);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* IF函数实现,解析日志构建三目运算;包含判断是否为数字若为数字则转换为long类型返回结果。
|
|
||||||
*
|
|
||||||
* @param jsonMap 内存实体类
|
|
||||||
* @param ifParam 字段名/普通字符串
|
|
||||||
* @return resultA or resultB or null
|
|
||||||
*/
|
|
||||||
static Object condition(Map<String, Object> jsonMap, String ifParam) {
|
|
||||||
Object result = null;
|
|
||||||
try {
|
|
||||||
String[] split = ifParam.split(FlowWriteConfig.FORMAT_SPLITTER);
|
|
||||||
if (split.length == FlowWriteConfig.IF_PARAM_LENGTH) {
|
|
||||||
String[] norms = split[0].split(FlowWriteConfig.IF_CONDITION_SPLITTER);
|
|
||||||
Object direction = isJsonValue(jsonMap, norms[0]);
|
|
||||||
Object resultA = isJsonValue(jsonMap, split[1]);
|
|
||||||
Object resultB = isJsonValue(jsonMap, split[2]);
|
|
||||||
if (direction instanceof Number) {
|
|
||||||
result = (Integer.parseInt(direction.toString()) == Integer.parseInt(norms[1])) ? resultA : resultB;
|
|
||||||
} else if (direction instanceof String) {
|
|
||||||
result = direction.equals(norms[1]) ? resultA : resultB;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("IF 函数执行异常,异常信息:" + e);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 设置固定值函数 若为数字则转为long返回
|
|
||||||
*
|
|
||||||
* @param param 默认值
|
|
||||||
* @return 返回数字或字符串
|
|
||||||
*/
|
|
||||||
static Object setValue(String param) {
|
|
||||||
try {
|
|
||||||
Matcher isNum = PATTERN.matcher(param);
|
|
||||||
if (isNum.matches()) {
|
|
||||||
return Long.parseLong(param);
|
|
||||||
} else {
|
|
||||||
return param;
|
|
||||||
}
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("SetValue 函数异常,异常信息:" + e);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,208 +0,0 @@
|
|||||||
package com.zdjizhi.utils.hbase;
|
|
||||||
|
|
||||||
import cn.hutool.log.Log;
|
|
||||||
import cn.hutool.log.LogFactory;
|
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
|
||||||
import org.apache.hadoop.hbase.TableName;
|
|
||||||
import org.apache.hadoop.hbase.client.*;
|
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
|
||||||
import java.util.concurrent.ScheduledExecutorService;
|
|
||||||
import java.util.concurrent.ScheduledThreadPoolExecutor;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* HBase 工具类
|
|
||||||
*
|
|
||||||
* @author qidaijie
|
|
||||||
*/
|
|
||||||
|
|
||||||
public class HBaseUtils {
|
|
||||||
private static final Log logger = LogFactory.get();
|
|
||||||
private static Map<String, String> subIdMap = new ConcurrentHashMap<>(16);
|
|
||||||
private static Connection connection;
|
|
||||||
private static Long time;
|
|
||||||
|
|
||||||
private static HBaseUtils hBaseUtils;
|
|
||||||
|
|
||||||
private static void getInstance() {
|
|
||||||
hBaseUtils = new HBaseUtils();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 构造函数-新
|
|
||||||
*/
|
|
||||||
private HBaseUtils() {
|
|
||||||
//获取连接
|
|
||||||
getConnection();
|
|
||||||
//拉取所有
|
|
||||||
getAll();
|
|
||||||
//定时更新
|
|
||||||
updateCache();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void getConnection() {
|
|
||||||
try {
|
|
||||||
// 管理Hbase的配置信息
|
|
||||||
Configuration configuration = HBaseConfiguration.create();
|
|
||||||
// 设置zookeeper节点
|
|
||||||
configuration.set("hbase.zookeeper.quorum", FlowWriteConfig.HBASE_ZOOKEEPER_SERVERS);
|
|
||||||
configuration.set("hbase.client.retries.number", "3");
|
|
||||||
configuration.set("hbase.bulkload.retries.number", "3");
|
|
||||||
configuration.set("zookeeper.recovery.retry", "3");
|
|
||||||
connection = ConnectionFactory.createConnection(configuration);
|
|
||||||
time = System.currentTimeMillis();
|
|
||||||
logger.warn("HBaseUtils get HBase connection,now to getAll().");
|
|
||||||
} catch (IOException ioe) {
|
|
||||||
logger.error("HBaseUtils getHbaseConn() IOException===>{" + ioe + "}<===");
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("HBaseUtils getHbaseConn() Exception===>{" + e + "}<===");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 更新变量
|
|
||||||
*/
|
|
||||||
private static void change() {
|
|
||||||
if (hBaseUtils == null) {
|
|
||||||
getInstance();
|
|
||||||
}
|
|
||||||
long nowTime = System.currentTimeMillis();
|
|
||||||
timestampsFilter(time - 1000, nowTime + 500);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取变更内容
|
|
||||||
*
|
|
||||||
* @param startTime 开始时间
|
|
||||||
* @param endTime 结束时间
|
|
||||||
*/
|
|
||||||
private static void timestampsFilter(Long startTime, Long endTime) {
|
|
||||||
Long begin = System.currentTimeMillis();
|
|
||||||
Table table = null;
|
|
||||||
ResultScanner scanner = null;
|
|
||||||
Scan scan2 = new Scan();
|
|
||||||
try {
|
|
||||||
table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_TABLE_NAME));
|
|
||||||
scan2.setTimeRange(startTime, endTime);
|
|
||||||
scanner = table.getScanner(scan2);
|
|
||||||
for (Result result : scanner) {
|
|
||||||
int acctStatusType = getAcctStatusType(result);
|
|
||||||
String framedIp = Bytes.toString(result.getValue(Bytes.toBytes("radius"), Bytes.toBytes("framed_ip"))).trim();
|
|
||||||
String account = Bytes.toString(result.getValue(Bytes.toBytes("radius"), Bytes.toBytes("account"))).trim();
|
|
||||||
if (acctStatusType == 1) {
|
|
||||||
if (subIdMap.containsKey(framedIp)) {
|
|
||||||
boolean same = account.equals(subIdMap.get(framedIp));
|
|
||||||
if (!same) {
|
|
||||||
subIdMap.put(framedIp, account);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
subIdMap.put(framedIp, account);
|
|
||||||
}
|
|
||||||
} else if (acctStatusType == 2) {
|
|
||||||
subIdMap.remove(framedIp);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Long end = System.currentTimeMillis();
|
|
||||||
logger.warn("HBaseUtils Now subIdMap.keySet().size() is: " + subIdMap.keySet().size());
|
|
||||||
logger.warn("HBaseUtils Update cache timeConsuming is: " + (end - begin) + ",BeginTime: " + startTime + ",EndTime: " + endTime);
|
|
||||||
time = endTime;
|
|
||||||
} catch (IOException ioe) {
|
|
||||||
logger.error("HBaseUtils timestampsFilter is IOException===>{" + ioe + "}<===");
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("HBaseUtils timestampsFilter is Exception===>{" + e + "}<===");
|
|
||||||
} finally {
|
|
||||||
if (scanner != null) {
|
|
||||||
scanner.close();
|
|
||||||
}
|
|
||||||
if (table != null) {
|
|
||||||
try {
|
|
||||||
table.close();
|
|
||||||
} catch (IOException e) {
|
|
||||||
logger.error("HBase Table Close ERROR! Exception message is:" + e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取所有的 key value
|
|
||||||
*/
|
|
||||||
private static void getAll() {
|
|
||||||
long begin = System.currentTimeMillis();
|
|
||||||
try {
|
|
||||||
Table table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_TABLE_NAME));
|
|
||||||
Scan scan2 = new Scan();
|
|
||||||
ResultScanner scanner = table.getScanner(scan2);
|
|
||||||
for (Result result : scanner) {
|
|
||||||
int acctStatusType = getAcctStatusType(result);
|
|
||||||
String framedIp = Bytes.toString(result.getValue(Bytes.toBytes("radius"), Bytes.toBytes("framed_ip")));
|
|
||||||
String account = Bytes.toString(result.getValue(Bytes.toBytes("radius"), Bytes.toBytes("account")));
|
|
||||||
if (acctStatusType == 1) {
|
|
||||||
subIdMap.put(framedIp, account);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
logger.warn("HBaseUtils Get fullAmount List size->subIdMap.size(): " + subIdMap.size());
|
|
||||||
logger.warn("HBaseUtils Get fullAmount List size->subIdMap.size() timeConsuming is: " + (System.currentTimeMillis() - begin));
|
|
||||||
scanner.close();
|
|
||||||
} catch (IOException ioe) {
|
|
||||||
logger.error("HBaseUtils getAll() is IOException===>{" + ioe + "}<===");
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("HBaseUtils getAll() is Exception===>{" + e + "}<===");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 验证定时器,每隔一段时间验证一次-验证获取新的Cookie
|
|
||||||
*/
|
|
||||||
private void updateCache() {
|
|
||||||
// ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1,
|
|
||||||
// new BasicThreadFactory.Builder().namingPattern("hbase-change-pool-%d").daemon(true).build());
|
|
||||||
ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1);
|
|
||||||
executorService.scheduleAtFixedRate(new Runnable() {
|
|
||||||
@Override
|
|
||||||
public void run() {
|
|
||||||
try {
|
|
||||||
if (FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS != 0) {
|
|
||||||
change();
|
|
||||||
}
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, 1, FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS, TimeUnit.SECONDS);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取 account
|
|
||||||
*
|
|
||||||
* @param clientIp client_ip
|
|
||||||
* @return account
|
|
||||||
*/
|
|
||||||
public static String getAccount(String clientIp) {
|
|
||||||
|
|
||||||
if (hBaseUtils == null) {
|
|
||||||
getInstance();
|
|
||||||
}
|
|
||||||
return subIdMap.get(clientIp);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private static int getAcctStatusType(Result result) {
|
|
||||||
boolean hasType = result.containsColumn(Bytes.toBytes("radius"), Bytes.toBytes("acct_status_type"));
|
|
||||||
if (hasType) {
|
|
||||||
return Bytes.toInt(result.getValue(Bytes.toBytes("radius"), Bytes.toBytes("acct_status_type")));
|
|
||||||
} else {
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
package com.zdjizhi.utils.http;
|
|
||||||
|
|
||||||
import cn.hutool.log.Log;
|
|
||||||
import cn.hutool.log.LogFactory;
|
|
||||||
import org.apache.commons.io.IOUtils;
|
|
||||||
import org.apache.http.HttpEntity;
|
|
||||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
|
||||||
import org.apache.http.client.methods.HttpGet;
|
|
||||||
import org.apache.http.impl.client.CloseableHttpClient;
|
|
||||||
import org.apache.http.impl.client.HttpClients;
|
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStreamReader;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取网关schema的工具类
|
|
||||||
*
|
|
||||||
* @author qidaijie
|
|
||||||
*/
|
|
||||||
public class HttpClientUtil {
|
|
||||||
private static final Log logger = LogFactory.get();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 请求网关获取schema
|
|
||||||
*
|
|
||||||
* @param http 网关url
|
|
||||||
* @return schema
|
|
||||||
*/
|
|
||||||
public static String requestByGetMethod(String http) {
|
|
||||||
CloseableHttpClient httpClient = HttpClients.createDefault();
|
|
||||||
StringBuilder entityStringBuilder;
|
|
||||||
|
|
||||||
HttpGet get = new HttpGet(http);
|
|
||||||
BufferedReader bufferedReader = null;
|
|
||||||
CloseableHttpResponse httpResponse = null;
|
|
||||||
try {
|
|
||||||
httpResponse = httpClient.execute(get);
|
|
||||||
HttpEntity entity = httpResponse.getEntity();
|
|
||||||
entityStringBuilder = new StringBuilder();
|
|
||||||
if (null != entity) {
|
|
||||||
bufferedReader = new BufferedReader(new InputStreamReader(httpResponse.getEntity().getContent(), "UTF-8"), 8 * 1024);
|
|
||||||
int intC;
|
|
||||||
while ((intC = bufferedReader.read()) != -1) {
|
|
||||||
char c = (char) intC;
|
|
||||||
if (c == '\n') {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
entityStringBuilder.append(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
return entityStringBuilder.toString();
|
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
|
||||||
logger.error("Get Schema from Query engine ERROR! Exception message is:" + e);
|
|
||||||
} finally {
|
|
||||||
if (httpClient != null) {
|
|
||||||
try {
|
|
||||||
httpClient.close();
|
|
||||||
} catch (IOException e) {
|
|
||||||
logger.error("Close HTTP Client ERROR! Exception messgae is:" + e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (httpResponse != null) {
|
|
||||||
try {
|
|
||||||
httpResponse.close();
|
|
||||||
} catch (IOException e) {
|
|
||||||
logger.error("Close httpResponse ERROR! Exception messgae is:" + e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (bufferedReader != null) {
|
|
||||||
IOUtils.closeQuietly(bufferedReader);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,372 +0,0 @@
|
|||||||
package com.zdjizhi.utils.json;
|
|
||||||
|
|
||||||
import cn.hutool.log.Log;
|
|
||||||
import cn.hutool.log.LogFactory;
|
|
||||||
import com.alibaba.fastjson.JSON;
|
|
||||||
import com.alibaba.fastjson.JSONArray;
|
|
||||||
import com.alibaba.fastjson.JSONObject;
|
|
||||||
import com.alibaba.nacos.api.NacosFactory;
|
|
||||||
import com.alibaba.nacos.api.PropertyKeyConst;
|
|
||||||
import com.alibaba.nacos.api.config.ConfigService;
|
|
||||||
import com.alibaba.nacos.api.config.listener.Listener;
|
|
||||||
import com.alibaba.nacos.api.exception.NacosException;
|
|
||||||
import com.jayway.jsonpath.JsonPath;
|
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
|
||||||
import com.zdjizhi.utils.StringUtil;
|
|
||||||
import net.sf.cglib.beans.BeanMap;
|
|
||||||
|
|
||||||
import java.util.*;
|
|
||||||
import java.util.concurrent.Executor;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 使用FastJson解析json的工具类
|
|
||||||
*
|
|
||||||
* @author qidaijie
|
|
||||||
*/
|
|
||||||
public class JsonParseUtil {
|
|
||||||
private static final Log logger = LogFactory.get();
|
|
||||||
private static Properties propNacos = new Properties();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取需要删除字段的列表
|
|
||||||
*/
|
|
||||||
private static ArrayList<String> dropList = new ArrayList<>();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 在内存中加载反射类用的map
|
|
||||||
*/
|
|
||||||
private static HashMap<String, Class> jsonFieldsMap;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取任务列表
|
|
||||||
* list的每个元素是一个四元字符串数组 (有format标识的字段,补全的字段,用到的功能函数,用到的参数),例如:
|
|
||||||
* (mail_subject mail_subject decode_of_base64 mail_subject_charset)
|
|
||||||
*/
|
|
||||||
private static ArrayList<String[]> jobList;
|
|
||||||
|
|
||||||
static {
|
|
||||||
propNacos.setProperty(PropertyKeyConst.SERVER_ADDR, FlowWriteConfig.NACOS_SERVER);
|
|
||||||
propNacos.setProperty(PropertyKeyConst.NAMESPACE, FlowWriteConfig.NACOS_SCHEMA_NAMESPACE);
|
|
||||||
propNacos.setProperty(PropertyKeyConst.USERNAME, FlowWriteConfig.NACOS_USERNAME);
|
|
||||||
propNacos.setProperty(PropertyKeyConst.PASSWORD, FlowWriteConfig.NACOS_PIN);
|
|
||||||
try {
|
|
||||||
ConfigService configService = NacosFactory.createConfigService(propNacos);
|
|
||||||
String dataId = FlowWriteConfig.NACOS_DATA_ID;
|
|
||||||
String group = FlowWriteConfig.NACOS_GROUP;
|
|
||||||
String schema = configService.getConfig(dataId, group, 5000);
|
|
||||||
if (StringUtil.isNotBlank(schema)) {
|
|
||||||
jsonFieldsMap = getMapFromHttp(schema);
|
|
||||||
jobList = getJobListFromHttp(schema);
|
|
||||||
}
|
|
||||||
configService.addListener(dataId, group, new Listener() {
|
|
||||||
@Override
|
|
||||||
public Executor getExecutor() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void receiveConfigInfo(String configMsg) {
|
|
||||||
if (StringUtil.isNotBlank(configMsg)) {
|
|
||||||
clearCache();
|
|
||||||
jsonFieldsMap = getMapFromHttp(configMsg);
|
|
||||||
jobList = getJobListFromHttp(configMsg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} catch (NacosException e) {
|
|
||||||
logger.error("Get Schema config from Nacos error,The exception message is :" + e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 模式匹配,给定一个类型字符串返回一个类类型
|
|
||||||
*
|
|
||||||
* @param type 类型
|
|
||||||
* @return 类类型
|
|
||||||
*/
|
|
||||||
|
|
||||||
private static Class getClassName(String type) {
|
|
||||||
Class clazz;
|
|
||||||
|
|
||||||
switch (type) {
|
|
||||||
case "int":
|
|
||||||
clazz = Integer.class;
|
|
||||||
break;
|
|
||||||
case "string":
|
|
||||||
clazz = String.class;
|
|
||||||
break;
|
|
||||||
case "long":
|
|
||||||
clazz = long.class;
|
|
||||||
break;
|
|
||||||
case "array":
|
|
||||||
clazz = List.class;
|
|
||||||
break;
|
|
||||||
case "double":
|
|
||||||
clazz = double.class;
|
|
||||||
break;
|
|
||||||
case "float":
|
|
||||||
clazz = float.class;
|
|
||||||
break;
|
|
||||||
case "char":
|
|
||||||
clazz = char.class;
|
|
||||||
break;
|
|
||||||
case "byte":
|
|
||||||
clazz = byte.class;
|
|
||||||
break;
|
|
||||||
case "boolean":
|
|
||||||
clazz = boolean.class;
|
|
||||||
break;
|
|
||||||
case "short":
|
|
||||||
clazz = short.class;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
clazz = String.class;
|
|
||||||
}
|
|
||||||
return clazz;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取属性值的方法
|
|
||||||
*
|
|
||||||
* @param obj 对象
|
|
||||||
* @param property key
|
|
||||||
* @return 属性的值
|
|
||||||
*/
|
|
||||||
public static Object getValue(Object obj, String property) {
|
|
||||||
try {
|
|
||||||
BeanMap beanMap = BeanMap.create(obj);
|
|
||||||
return beanMap.get(property);
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("获取json-value异常,异常key:" + property + "异常信息为:" + e);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 获取属性值的方法
|
|
||||||
*
|
|
||||||
* @param jsonMap 原始日志
|
|
||||||
* @param property key
|
|
||||||
* @return 属性的值
|
|
||||||
*/
|
|
||||||
public static Object getValue(Map<String, Object> jsonMap, String property) {
|
|
||||||
try {
|
|
||||||
return jsonMap.getOrDefault(property, null);
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("获取json-value异常,异常key:" + property + "异常信息为:" + e);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 更新属性值的方法
|
|
||||||
*
|
|
||||||
* @param jsonMap 原始日志json map
|
|
||||||
* @param property 更新的key
|
|
||||||
* @param value 更新的值
|
|
||||||
*/
|
|
||||||
public static void setValue(Map<String, Object> jsonMap, String property, Object value) {
|
|
||||||
try {
|
|
||||||
jsonMap.put(property, value);
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("赋予实体类错误类型数据", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 更新属性值的方法
|
|
||||||
*
|
|
||||||
* @param obj 对象
|
|
||||||
* @param property 更新的key
|
|
||||||
* @param value 更新的值
|
|
||||||
*/
|
|
||||||
public static void setValue(Object obj, String property, Object value) {
|
|
||||||
try {
|
|
||||||
BeanMap beanMap = BeanMap.create(obj);
|
|
||||||
beanMap.put(property, value);
|
|
||||||
} catch (ClassCastException e) {
|
|
||||||
logger.error("赋予实体类错误类型数据", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 类型转换
|
|
||||||
*
|
|
||||||
* @param jsonMap 原始日志map
|
|
||||||
*/
|
|
||||||
public static Map<String, Object> typeTransform(Map<String, Object> jsonMap) throws RuntimeException {
|
|
||||||
JsonParseUtil.dropJsonField(jsonMap);
|
|
||||||
HashMap<String, Object> tmpMap = new HashMap<>(192);
|
|
||||||
for (String key : jsonMap.keySet()) {
|
|
||||||
if (jsonFieldsMap.containsKey(key)) {
|
|
||||||
String simpleName = jsonFieldsMap.get(key).getSimpleName();
|
|
||||||
switch (simpleName) {
|
|
||||||
case "String":
|
|
||||||
tmpMap.put(key, JsonTypeUtil.checkString(jsonMap.get(key)));
|
|
||||||
break;
|
|
||||||
case "Integer":
|
|
||||||
tmpMap.put(key, JsonTypeUtil.getIntValue(jsonMap.get(key)));
|
|
||||||
break;
|
|
||||||
case "long":
|
|
||||||
tmpMap.put(key, JsonTypeUtil.checkLongValue(jsonMap.get(key)));
|
|
||||||
break;
|
|
||||||
case "List":
|
|
||||||
tmpMap.put(key, JsonTypeUtil.checkArray(jsonMap.get(key)));
|
|
||||||
break;
|
|
||||||
case "Map":
|
|
||||||
tmpMap.put(key, JsonTypeUtil.checkObject(jsonMap.get(key)));
|
|
||||||
break;
|
|
||||||
case "double":
|
|
||||||
tmpMap.put(key, JsonTypeUtil.checkDouble(jsonMap.get(key)));
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
tmpMap.put(key, JsonTypeUtil.checkString(jsonMap.get(key)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return tmpMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static ArrayList<String[]> getJobList() {
|
|
||||||
return jobList;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 通过获取String类型的网关schema链接来获取map,用于生成一个Object类型的对象
|
|
||||||
* <p>
|
|
||||||
* // * @param http 网关schema地址
|
|
||||||
*
|
|
||||||
* @return 用于反射生成schema类型的对象的一个map集合
|
|
||||||
*/
|
|
||||||
private static HashMap<String, Class> getMapFromHttp(String schema) {
|
|
||||||
HashMap<String, Class> map = new HashMap<>(16);
|
|
||||||
|
|
||||||
//获取fields,并转化为数组,数组的每个元素都是一个name doc type
|
|
||||||
JSONObject schemaJson = JSON.parseObject(schema);
|
|
||||||
JSONArray fields = (JSONArray) schemaJson.get("fields");
|
|
||||||
|
|
||||||
for (Object field : fields) {
|
|
||||||
String filedStr = field.toString();
|
|
||||||
if (checkKeepField(filedStr)) {
|
|
||||||
String name = JsonPath.read(filedStr, "$.name").toString();
|
|
||||||
String type = JsonPath.read(filedStr, "$.type").toString();
|
|
||||||
if (type.contains("{")) {
|
|
||||||
type = JsonPath.read(filedStr, "$.type.type").toString();
|
|
||||||
}
|
|
||||||
//组合用来生成实体类的map
|
|
||||||
map.put(name, getClassName(type));
|
|
||||||
} else {
|
|
||||||
dropList.add(filedStr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 判断字段是否需要保留
|
|
||||||
*
|
|
||||||
* @param message 单个field-json
|
|
||||||
* @return true or false
|
|
||||||
*/
|
|
||||||
private static boolean checkKeepField(String message) {
|
|
||||||
boolean isKeepField = true;
|
|
||||||
boolean isHiveDoc = JSON.parseObject(message).containsKey("doc");
|
|
||||||
if (isHiveDoc) {
|
|
||||||
boolean isHiveVi = JsonPath.read(message, "$.doc").toString().contains("visibility");
|
|
||||||
if (isHiveVi) {
|
|
||||||
String visibility = JsonPath.read(message, "$.doc.visibility").toString();
|
|
||||||
if (FlowWriteConfig.VISIBILITY.equals(visibility)) {
|
|
||||||
isKeepField = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return isKeepField;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 删除schema内指定的无效字段(jackson)
|
|
||||||
*
|
|
||||||
* @param jsonMap
|
|
||||||
*/
|
|
||||||
public static void dropJsonField(Map<String, Object> jsonMap) {
|
|
||||||
for (String field : dropList) {
|
|
||||||
jsonMap.remove(field);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 解析schema,解析之后返回一个任务列表 (useList toList funcList paramlist)
|
|
||||||
*
|
|
||||||
* @param schema 日志schema
|
|
||||||
* @return 任务列表
|
|
||||||
*/
|
|
||||||
private static ArrayList<String[]> getJobListFromHttp(String schema) {
|
|
||||||
ArrayList<String[]> list = new ArrayList<>();
|
|
||||||
|
|
||||||
//获取fields,并转化为数组,数组的每个元素都是一个name doc type
|
|
||||||
JSONObject schemaJson = JSON.parseObject(schema);
|
|
||||||
JSONArray fields = (JSONArray) schemaJson.get("fields");
|
|
||||||
|
|
||||||
for (Object field : fields) {
|
|
||||||
|
|
||||||
if (JSON.parseObject(field.toString()).containsKey("doc")) {
|
|
||||||
Object doc = JSON.parseObject(field.toString()).get("doc");
|
|
||||||
|
|
||||||
if (JSON.parseObject(doc.toString()).containsKey("format")) {
|
|
||||||
String name = JSON.parseObject(field.toString()).get("name").toString();
|
|
||||||
Object format = JSON.parseObject(doc.toString()).get("format");
|
|
||||||
JSONObject formatObject = JSON.parseObject(format.toString());
|
|
||||||
|
|
||||||
String functions = formatObject.get("functions").toString();
|
|
||||||
String appendTo = null;
|
|
||||||
String params = null;
|
|
||||||
|
|
||||||
if (formatObject.containsKey("appendTo")) {
|
|
||||||
appendTo = formatObject.get("appendTo").toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (formatObject.containsKey("param")) {
|
|
||||||
params = formatObject.get("param").toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
if (StringUtil.isNotBlank(appendTo) && StringUtil.isBlank(params)) {
|
|
||||||
String[] functionArray = functions.split(FlowWriteConfig.FORMAT_SPLITTER);
|
|
||||||
String[] appendToArray = appendTo.split(FlowWriteConfig.FORMAT_SPLITTER);
|
|
||||||
|
|
||||||
for (int i = 0; i < functionArray.length; i++) {
|
|
||||||
list.add(new String[]{name, appendToArray[i], functionArray[i], null});
|
|
||||||
}
|
|
||||||
|
|
||||||
} else if (StringUtil.isNotBlank(appendTo) && StringUtil.isNotBlank(params)) {
|
|
||||||
String[] functionArray = functions.split(FlowWriteConfig.FORMAT_SPLITTER);
|
|
||||||
String[] appendToArray = appendTo.split(FlowWriteConfig.FORMAT_SPLITTER);
|
|
||||||
String[] paramArray = params.split(FlowWriteConfig.FORMAT_SPLITTER);
|
|
||||||
|
|
||||||
for (int i = 0; i < functionArray.length; i++) {
|
|
||||||
list.add(new String[]{name, appendToArray[i], functionArray[i], paramArray[i]});
|
|
||||||
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
list.add(new String[]{name, name, functions, params});
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
return list;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 在配置变动时,清空缓存重新获取
|
|
||||||
*/
|
|
||||||
private static void clearCache() {
|
|
||||||
jobList.clear();
|
|
||||||
jsonFieldsMap.clear();
|
|
||||||
dropList.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,129 +0,0 @@
|
|||||||
package com.zdjizhi.utils.json;
|
|
||||||
|
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
|
||||||
import com.zdjizhi.utils.JsonMapper;
|
|
||||||
import com.zdjizhi.utils.exception.FlowWriteException;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author qidaijie
|
|
||||||
* @Package PACKAGE_NAME
|
|
||||||
* @Description:
|
|
||||||
* @date 2021/7/1217:34
|
|
||||||
*/
|
|
||||||
public class JsonTypeUtil {
|
|
||||||
/**
|
|
||||||
* 类型转换
|
|
||||||
*
|
|
||||||
* @param jsonMap 原始日志map
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* String 类型检验转换方法
|
|
||||||
*
|
|
||||||
* @param value json value
|
|
||||||
* @return String value
|
|
||||||
*/
|
|
||||||
static String checkString(Object value) {
|
|
||||||
if (value == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof Map) {
|
|
||||||
return JsonMapper.toJsonString(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof List) {
|
|
||||||
return JsonMapper.toJsonString(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
return value.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* array 类型检验转换方法
|
|
||||||
*
|
|
||||||
* @param value json value
|
|
||||||
* @return List value
|
|
||||||
*/
|
|
||||||
static Map checkObject(Object value) {
|
|
||||||
if (value == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof Map) {
|
|
||||||
return (Map) value;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new FlowWriteException("can not cast to map, value : " + value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* array 类型检验转换方法
|
|
||||||
*
|
|
||||||
* @param value json value
|
|
||||||
* @return List value
|
|
||||||
*/
|
|
||||||
static List checkArray(Object value) {
|
|
||||||
if (value == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof List) {
|
|
||||||
return (List) value;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new FlowWriteException("can not cast to List, value : " + value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* long 类型检验转换方法,若为空返回基础值
|
|
||||||
*
|
|
||||||
* @param value json value
|
|
||||||
* @return Long value
|
|
||||||
*/
|
|
||||||
static long checkLongValue(Object value) {
|
|
||||||
Long longVal = TypeUtils.castToLong(value);
|
|
||||||
|
|
||||||
if (longVal == null) {
|
|
||||||
return 0L;
|
|
||||||
}
|
|
||||||
|
|
||||||
return longVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Double 类型校验转换方法
|
|
||||||
*
|
|
||||||
* @param value json value
|
|
||||||
* @return Double value
|
|
||||||
*/
|
|
||||||
static Double checkDouble(Object value) {
|
|
||||||
if (value == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return TypeUtils.castToDouble(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* int 类型检验转换方法,若为空返回基础值
|
|
||||||
*
|
|
||||||
* @param value json value
|
|
||||||
* @return int value
|
|
||||||
*/
|
|
||||||
static int getIntValue(Object value) {
|
|
||||||
|
|
||||||
Integer intVal = TypeUtils.castToInt(value);
|
|
||||||
if (intVal == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return intVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,171 +0,0 @@
|
|||||||
package com.zdjizhi.utils.json;
|
|
||||||
|
|
||||||
import cn.hutool.log.Log;
|
|
||||||
import cn.hutool.log.LogFactory;
|
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
|
||||||
import com.zdjizhi.utils.StringUtil;
|
|
||||||
import com.zdjizhi.utils.exception.FlowWriteException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author qidaijie
|
|
||||||
* @Package PACKAGE_NAME
|
|
||||||
* @Description:
|
|
||||||
* @date 2021/7/1218:20
|
|
||||||
*/
|
|
||||||
public class TypeUtils {
|
|
||||||
private static final Log logger = LogFactory.get();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Integer 类型判断方法
|
|
||||||
*
|
|
||||||
* @param value json value
|
|
||||||
* @return Integer value or null
|
|
||||||
*/
|
|
||||||
public static Object castToIfFunction(Object value) {
|
|
||||||
if (value == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof String) {
|
|
||||||
return value.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof Integer) {
|
|
||||||
return ((Number) value).intValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof Long) {
|
|
||||||
return ((Number) value).longValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
// if (value instanceof Map) {
|
|
||||||
// return (Map) value;
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// if (value instanceof List) {
|
|
||||||
// return Collections.singletonList(value.toString());
|
|
||||||
// }
|
|
||||||
|
|
||||||
if (value instanceof Boolean) {
|
|
||||||
return (Boolean) value ? 1 : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new FlowWriteException("can not cast to int, value : " + value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Integer 类型判断方法
|
|
||||||
*
|
|
||||||
* @param value json value
|
|
||||||
* @return Integer value or null
|
|
||||||
*/
|
|
||||||
static Integer castToInt(Object value) {
|
|
||||||
if (value == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof Integer) {
|
|
||||||
return (Integer) value;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof Number) {
|
|
||||||
return ((Number) value).intValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof String) {
|
|
||||||
String strVal = (String) value;
|
|
||||||
if (StringUtil.isBlank(strVal)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
//将 10,20 类数据转换为10
|
|
||||||
if (strVal.contains(FlowWriteConfig.FORMAT_SPLITTER)) {
|
|
||||||
strVal = strVal.split(FlowWriteConfig.FORMAT_SPLITTER)[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
return Integer.parseInt(strVal);
|
|
||||||
} catch (NumberFormatException ex) {
|
|
||||||
logger.error("String change Integer Error,The error Str is:" + strVal);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof Boolean) {
|
|
||||||
return (Boolean) value ? 1 : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new FlowWriteException("can not cast to int, value : " + value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Double类型判断方法
|
|
||||||
*
|
|
||||||
* @param value json value
|
|
||||||
* @return double value or null
|
|
||||||
*/
|
|
||||||
static Double castToDouble(Object value) {
|
|
||||||
|
|
||||||
if (value instanceof Number) {
|
|
||||||
return ((Number) value).doubleValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof String) {
|
|
||||||
String strVal = (String) value;
|
|
||||||
|
|
||||||
if (StringUtil.isBlank(strVal)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
//将 10,20 类数据转换为10
|
|
||||||
if (strVal.contains(FlowWriteConfig.FORMAT_SPLITTER)) {
|
|
||||||
strVal = strVal.split(FlowWriteConfig.FORMAT_SPLITTER)[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
return Double.parseDouble(strVal);
|
|
||||||
} catch (NumberFormatException ex) {
|
|
||||||
logger.error("String change Double Error,The error Str is:" + strVal);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new FlowWriteException("can not cast to double, value : " + value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Long类型判断方法
|
|
||||||
*
|
|
||||||
* @param value json value
|
|
||||||
* @return (Long)value or null
|
|
||||||
*/
|
|
||||||
static Long castToLong(Object value) {
|
|
||||||
if (value == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof Number) {
|
|
||||||
return ((Number) value).longValue();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value instanceof String) {
|
|
||||||
String strVal = (String) value;
|
|
||||||
|
|
||||||
if (StringUtil.isBlank(strVal)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
//将 10,20 类数据转换为10
|
|
||||||
if (strVal.contains(FlowWriteConfig.FORMAT_SPLITTER)) {
|
|
||||||
strVal = strVal.split(FlowWriteConfig.FORMAT_SPLITTER)[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
return Long.parseLong(strVal);
|
|
||||||
} catch (NumberFormatException ex) {
|
|
||||||
logger.error("String change Long Error,The error Str is:" + strVal);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new FlowWriteException("can not cast to long, value : " + value);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,82 +0,0 @@
|
|||||||
package com.zdjizhi.utils.kafka;
|
|
||||||
|
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
|
||||||
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
|
||||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
|
|
||||||
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.Properties;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author qidaijie
|
|
||||||
* @Package com.zdjizhi.utils.kafka
|
|
||||||
* @Description:
|
|
||||||
* @date 2021/6/814:04
|
|
||||||
*/
|
|
||||||
public class KafkaProducer {
|
|
||||||
|
|
||||||
|
|
||||||
private static Properties createPercentProducerConfig() {
|
|
||||||
Properties properties = new Properties();
|
|
||||||
properties.put("bootstrap.servers", FlowWriteConfig.PERCENT_SINK_KAFKA_SERVERS);
|
|
||||||
properties.put("acks", FlowWriteConfig.PRODUCER_ACK);
|
|
||||||
properties.put("retries", FlowWriteConfig.RETRIES);
|
|
||||||
properties.put("linger.ms", FlowWriteConfig.LINGER_MS);
|
|
||||||
properties.put("request.timeout.ms", FlowWriteConfig.REQUEST_TIMEOUT_MS);
|
|
||||||
properties.put("batch.size", FlowWriteConfig.BATCH_SIZE);
|
|
||||||
properties.put("buffer.memory", FlowWriteConfig.BUFFER_MEMORY);
|
|
||||||
properties.put("max.request.size", FlowWriteConfig.MAX_REQUEST_SIZE);
|
|
||||||
properties.put("compression.type", FlowWriteConfig.PRODUCER_KAFKA_COMPRESSION_TYPE);
|
|
||||||
|
|
||||||
CertUtils.chooseCert(FlowWriteConfig.PERCENT_SINK_KAFKA_SERVERS, properties);
|
|
||||||
|
|
||||||
return properties;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
private static Properties createTrafficFileMetaProducerConfig() {
|
|
||||||
Properties properties = new Properties();
|
|
||||||
properties.put("bootstrap.servers", FlowWriteConfig.FILE_DATA_SINK_KAFKA_SERVERS);
|
|
||||||
properties.put("acks", FlowWriteConfig.PRODUCER_ACK);
|
|
||||||
properties.put("retries", FlowWriteConfig.RETRIES);
|
|
||||||
properties.put("linger.ms", FlowWriteConfig.LINGER_MS);
|
|
||||||
properties.put("request.timeout.ms", FlowWriteConfig.REQUEST_TIMEOUT_MS);
|
|
||||||
properties.put("batch.size", FlowWriteConfig.BATCH_SIZE);
|
|
||||||
properties.put("buffer.memory", FlowWriteConfig.BUFFER_MEMORY);
|
|
||||||
properties.put("max.request.size", FlowWriteConfig.MAX_REQUEST_SIZE);
|
|
||||||
properties.put("compression.type", FlowWriteConfig.PRODUCER_KAFKA_COMPRESSION_TYPE);
|
|
||||||
|
|
||||||
CertUtils.chooseCert(FlowWriteConfig.FILE_DATA_SINK_KAFKA_SERVERS, properties);
|
|
||||||
|
|
||||||
return properties;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
public static FlinkKafkaProducer<String> getPercentKafkaProducer() {
|
|
||||||
FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<String>(
|
|
||||||
FlowWriteConfig.PERCENT_KAFKA_TOPIC,
|
|
||||||
new SimpleStringSchema(),
|
|
||||||
createPercentProducerConfig(), Optional.empty());
|
|
||||||
|
|
||||||
kafkaProducer.setLogFailuresOnly(false);
|
|
||||||
|
|
||||||
// kafkaProducer.setWriteTimestampToKafka(true);
|
|
||||||
|
|
||||||
return kafkaProducer;
|
|
||||||
}
|
|
||||||
public static FlinkKafkaProducer<String> getTrafficFileMetaKafkaProducer() {
|
|
||||||
FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<String>(
|
|
||||||
FlowWriteConfig.FILE_DATA_SINK_KAFKA_TOPIC,
|
|
||||||
new SimpleStringSchema(),
|
|
||||||
createTrafficFileMetaProducerConfig(), Optional.empty());
|
|
||||||
|
|
||||||
kafkaProducer.setLogFailuresOnly(false);
|
|
||||||
|
|
||||||
// kafkaProducer.setWriteTimestampToKafka(true);
|
|
||||||
|
|
||||||
return kafkaProducer;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
package com.zdjizhi.utils.kafka;
|
|
||||||
|
|
||||||
import cn.hutool.log.Log;
|
|
||||||
import cn.hutool.log.LogFactory;
|
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
|
||||||
import com.zdjizhi.utils.JsonMapper;
|
|
||||||
import org.apache.flink.api.common.typeinfo.TypeInformation;
|
|
||||||
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
|
|
||||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author qidaijie
|
|
||||||
* @Package com.zdjizhi.utils.kafka
|
|
||||||
* @Description:
|
|
||||||
* @date 2022/3/89:42
|
|
||||||
*/
|
|
||||||
public class TimestampDeserializationSchema implements KafkaDeserializationSchema {
|
|
||||||
private static final Log logger = LogFactory.get();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public TypeInformation getProducedType() {
|
|
||||||
return TypeInformation.of(Map.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isEndOfStream(Object nextElement) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public Map<String, Object> deserialize(ConsumerRecord record) throws Exception {
|
|
||||||
if (record != null) {
|
|
||||||
try {
|
|
||||||
long timestamp = record.timestamp() / 1000;
|
|
||||||
String value = new String((byte[]) record.value(), FlowWriteConfig.ENCODING);
|
|
||||||
Map<String, Object> json = (Map<String, Object>) JsonMapper.fromJsonString(value, Map.class);
|
|
||||||
json.put("common_ingestion_time", timestamp);
|
|
||||||
return json;
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
logger.error("KafkaConsumer Deserialize failed,The exception is : " + e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,84 +0,0 @@
|
|||||||
package com.zdjizhi.utils.system;
|
|
||||||
|
|
||||||
import com.alibaba.nacos.api.NacosFactory;
|
|
||||||
import com.alibaba.nacos.api.PropertyKeyConst;
|
|
||||||
import com.alibaba.nacos.api.config.ConfigService;
|
|
||||||
import com.alibaba.nacos.api.exception.NacosException;
|
|
||||||
import com.zdjizhi.common.FlowWriteConfig;
|
|
||||||
import com.zdjizhi.utils.StringUtil;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.StringReader;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Properties;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author Administrator
|
|
||||||
*/
|
|
||||||
|
|
||||||
public final class FlowWriteConfigurations {
|
|
||||||
|
|
||||||
private static Properties propKafka = new Properties();
|
|
||||||
private static Properties propService = new Properties();
|
|
||||||
private static Properties propfiletype = new Properties();
|
|
||||||
private static Map<String, String> fileTypeMap;
|
|
||||||
|
|
||||||
public static boolean judgeFileType(String filetype){
|
|
||||||
return fileTypeMap.containsKey(filetype);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String getStringProperty(Integer type, String key) {
|
|
||||||
if (type == 0) {
|
|
||||||
return propService.getProperty(key);
|
|
||||||
} else if (type == 1) {
|
|
||||||
return propKafka.getProperty(key);
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Integer getIntProperty(Integer type, String key) {
|
|
||||||
if (type == 0) {
|
|
||||||
return Integer.parseInt(propService.getProperty(key));
|
|
||||||
} else if (type == 1) {
|
|
||||||
return Integer.parseInt(propKafka.getProperty(key));
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Long getLongProperty(Integer type, String key) {
|
|
||||||
if (type == 0) {
|
|
||||||
return Long.parseLong(propService.getProperty(key));
|
|
||||||
} else if (type == 1) {
|
|
||||||
return Long.parseLong(propKafka.getProperty(key));
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Boolean getBooleanProperty(Integer type, String key) {
|
|
||||||
if (type == 0) {
|
|
||||||
return StringUtil.equals(propService.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
|
|
||||||
} else if (type == 1) {
|
|
||||||
return StringUtil.equals(propKafka.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static {
|
|
||||||
try {
|
|
||||||
propService.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("service_flow_config.properties"));
|
|
||||||
propKafka.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("default_config.properties"));
|
|
||||||
propfiletype.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("file_type.properties"));
|
|
||||||
fileTypeMap = new HashMap<String, String>((Map) propfiletype);
|
|
||||||
} catch (IOException | RuntimeException e) {
|
|
||||||
propKafka = null;
|
|
||||||
propService = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
#Log4j
|
#Log4j
|
||||||
log4j.rootLogger=info,console,file
|
log4j.rootLogger=warn,console,file
|
||||||
# 控制台日志设置
|
# 控制台日志设置
|
||||||
log4j.appender.console=org.apache.log4j.ConsoleAppender
|
log4j.appender.console=org.apache.log4j.ConsoleAppender
|
||||||
log4j.appender.console.Threshold=info
|
log4j.appender.console.Threshold=warn
|
||||||
log4j.appender.console.layout=org.apache.log4j.PatternLayout
|
log4j.appender.console.layout=org.apache.log4j.PatternLayout
|
||||||
log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
|
log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
|
||||||
|
|
||||||
@@ -18,8 +18,8 @@ log4j.appender.file.layout=org.apache.log4j.PatternLayout
|
|||||||
#log4j.appender.file.layout.ConversionPattern=%d{HH:mm:ss} %X{ip} [%t] %5p %c{1} %m%n
|
#log4j.appender.file.layout.ConversionPattern=%d{HH:mm:ss} %X{ip} [%t] %5p %c{1} %m%n
|
||||||
log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] %X{ip} [Thread\:%t] %l %x - %m%n
|
log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] %X{ip} [Thread\:%t] %l %x - %m%n
|
||||||
#MyBatis 配置,com.nis.web.dao是mybatis接口所在包
|
#MyBatis 配置,com.nis.web.dao是mybatis接口所在包
|
||||||
log4j.logger.com.nis.web.dao=debug
|
log4j.logger.com.nis.web.dao=info
|
||||||
#bonecp数据源配置
|
#bonecp数据源配置
|
||||||
log4j.category.com.jolbox=debug,console
|
log4j.category.com.jolbox=info,console
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,42 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<configuration>
|
|
||||||
|
|
||||||
<!-- 格式化输出:%date表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度 %msg:日志消息,%n是换行符-->
|
|
||||||
<property name="LOG_PATTERN" value="%date{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n" />
|
|
||||||
<!-- 定义日志存储的路径,不要配置相对路径 -->
|
|
||||||
<property name="LOG_FILE_PATH" value="E:/logs/demo.%d{yyyy-MM-dd}.%i.log" />
|
|
||||||
|
|
||||||
<!-- 控制台输出日志 -->
|
|
||||||
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
|
|
||||||
<encoder>
|
|
||||||
<!-- 按照上面配置的LOG_PATTERN来打印日志 -->
|
|
||||||
<pattern>${LOG_PATTERN}</pattern>
|
|
||||||
</encoder>
|
|
||||||
</appender>
|
|
||||||
|
|
||||||
<!--每天生成一个日志文件,保存30天的日志文件。rollingFile是用来切分文件的 -->
|
|
||||||
<appender name="FILE"
|
|
||||||
class="ch.qos.logback.core.rolling.RollingFileAppender">
|
|
||||||
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
|
|
||||||
<fileNamePattern>${LOG_FILE_PATH}</fileNamePattern>
|
|
||||||
<!-- keep 15 days' worth of history -->
|
|
||||||
<maxHistory>30</maxHistory>
|
|
||||||
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
|
|
||||||
<!-- 日志文件的最大大小 -->
|
|
||||||
<maxFileSize>20MB</maxFileSize>
|
|
||||||
</timeBasedFileNamingAndTriggeringPolicy>
|
|
||||||
</rollingPolicy>
|
|
||||||
|
|
||||||
<encoder>
|
|
||||||
<pattern>${LOG_PATTERN}</pattern>
|
|
||||||
</encoder>
|
|
||||||
</appender>
|
|
||||||
<!-- project default level项目输出的日志级别 -->
|
|
||||||
<logger name="com.example.demo" level="DEBUG" />
|
|
||||||
|
|
||||||
<!-- 日志输出级别 常用的日志级别按照从高到低依次为:ERROR、WARN、INFO、DEBUG。 -->
|
|
||||||
<root level="INFO">
|
|
||||||
<appender-ref ref="CONSOLE" />
|
|
||||||
<appender-ref ref="FILE" /><!--对应appender name="FILE"。 -->
|
|
||||||
</root>
|
|
||||||
</configuration>
|
|
||||||
75
src/test/java/com/zdjizhi/function/Base64Test.java
Normal file
75
src/test/java/com/zdjizhi/function/Base64Test.java
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
package com.zdjizhi.function;
|
||||||
|
|
||||||
|
import cn.hutool.core.codec.Base64;
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.UnsupportedEncodingException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.function
|
||||||
|
* @Description:
|
||||||
|
* @date 2022/11/39:36
|
||||||
|
*/
|
||||||
|
public class Base64Test {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据编码解码base64(hutool)
|
||||||
|
|
||||||
|
* @return 解码字符串
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void decodeBase64Hutool() {
|
||||||
|
try {
|
||||||
|
System.out.println(Base64.decodeStr("bWFpbF90ZXN0X2VuZ2xpc2gudHh0"));
|
||||||
|
System.out.println(Base64.decodeStr("aGVsbG8="));
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("Resolve Base64 exception, exception information:" + e.getMessage());
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据编码解码base64
|
||||||
|
|
||||||
|
* @return 解码字符串
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void encodeBase64() {
|
||||||
|
try {
|
||||||
|
System.out.println(java.util.Base64.getUrlEncoder().encodeToString("runoob?java8".getBytes("ISO-8859-1")));
|
||||||
|
System.out.println(java.util.Base64.getUrlEncoder().encodeToString("runoob?java8".getBytes("utf-8")));
|
||||||
|
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("Resolve Base64 exception, exception information:" + e.getMessage());
|
||||||
|
e.printStackTrace();
|
||||||
|
} catch (UnsupportedEncodingException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据编码解码base64
|
||||||
|
|
||||||
|
* @return 解码字符串
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void decodeBase64() {
|
||||||
|
try {
|
||||||
|
byte[] base64decodedBytes = java.util.Base64.getDecoder().decode("bWFpbF90ZXN0X2VuZ2xpc2gudHh0");
|
||||||
|
|
||||||
|
System.out.println("原始字符串: " + new String(base64decodedBytes, "utf-8"));
|
||||||
|
System.out.println("原始字符串: " + new String(base64decodedBytes));
|
||||||
|
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("Resolve Base64 exception, exception information:" + e.getMessage());
|
||||||
|
e.printStackTrace();
|
||||||
|
} catch (UnsupportedEncodingException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
53
src/test/java/com/zdjizhi/function/EncryptorTest.java
Normal file
53
src/test/java/com/zdjizhi/function/EncryptorTest.java
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
package com.zdjizhi.function;
|
||||||
|
|
||||||
|
import org.jasypt.encryption.pbe.StandardPBEStringEncryptor;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi
|
||||||
|
* @Description:
|
||||||
|
* @date 2022/3/1610:55
|
||||||
|
*/
|
||||||
|
public class EncryptorTest {
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void passwordTest(){
|
||||||
|
StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor();
|
||||||
|
// 配置加密解密的密码/salt值
|
||||||
|
encryptor.setPassword("galaxy");
|
||||||
|
// 对"raw_password"进行加密:S5kR+Y7CI8k7MaecZpde25yK8NKUnd6p
|
||||||
|
String kafkaUser = encryptor.encrypt("admin");
|
||||||
|
String kafkaPin = encryptor.encrypt("galaxy2019");
|
||||||
|
String nacosPin = encryptor.encrypt("nacos");
|
||||||
|
String nacosUser = encryptor.encrypt("nacos");
|
||||||
|
|
||||||
|
System.out.println("Kafka:\n"+"The username is: "+kafkaUser);
|
||||||
|
System.out.println("The pin is: "+kafkaPin);
|
||||||
|
System.out.println("Nacos:\n"+"The username is: "+nacosUser);
|
||||||
|
System.out.println("The pin is: "+nacosPin);
|
||||||
|
// 再进行解密:raw_password
|
||||||
|
System.out.println("Kafka:\n"+"The username is: "+encryptor.decrypt(kafkaUser));
|
||||||
|
System.out.println("The pin is: "+encryptor.decrypt(kafkaPin));
|
||||||
|
|
||||||
|
System.out.println("Nacos:\n"+"The username is: "+encryptor.decrypt(nacosUser));
|
||||||
|
System.out.println("The pin is: "+encryptor.decrypt(nacosPin));
|
||||||
|
System.out.println("------------------------------------------------------");
|
||||||
|
|
||||||
|
System.out.println("The vknRT6U4I739rLIha9CvojM+4uFyXZLEYpO2HZayLnRak1HPW0K2yZ3vnQBA2foo decryption result is: "+encryptor.decrypt("vknRT6U4I739rLIha9CvojM+4uFyXZLEYpO2HZayLnRak1HPW0K2yZ3vnQBA2foo"));
|
||||||
|
System.out.println("The SU05WiYTLLrF+cVZ410gJdrfEgxgnYG9RGsI+3dZZq54XFLOXEzOvRuMAvLlBf4k decryption result is: "+encryptor.decrypt("SU05WiYTLLrF+cVZ410gJdrfEgxgnYG9RGsI+3dZZq54XFLOXEzOvRuMAvLlBf4k"));
|
||||||
|
System.out.println("The qUA355VopKSx6kwwwXZwqWWEYSu76Slz decryption result is: "+encryptor.decrypt("qUA355VopKSx6kwwwXZwqWWEYSu76Slz"));
|
||||||
|
System.out.println("The sJ9iiTeE/7moI2hKn8asMg== decryption result is: "+encryptor.decrypt("sJ9iiTeE/7moI2hKn8asMg=="));
|
||||||
|
System.out.println("The 63aTpwv2vH0vPikW+3Jjig== decryption result is: "+encryptor.decrypt("63aTpwv2vH0vPikW+3Jjig=="));
|
||||||
|
System.out.println("The Ei1P4R1e5KTdJR+ZVnBmug== decryption result is: "+encryptor.decrypt("Ei1P4R1e5KTdJR+ZVnBmug=="));
|
||||||
|
System.out.println("The iW8ekP1SZC6v/7cfJKAqXXrjApJox+cH decryption result is: "+encryptor.decrypt("iW8ekP1SZC6v/7cfJKAqXXrjApJox+cH"));
|
||||||
|
System.out.println("The TV7Jm4dQCE/LJznp4iTm4ICkBscquv9G decryption result is: "+encryptor.decrypt("TV7Jm4dQCE/LJznp4iTm4ICkBscquv9G"));
|
||||||
|
System.out.println("The LDEb2OekU7iZWiFw6pUYBSozVKP27r1y decryption result is: "+encryptor.decrypt("LDEb2OekU7iZWiFw6pUYBSozVKP27r1y"));
|
||||||
|
|
||||||
|
StandardPBEStringEncryptor encryptorCM = new StandardPBEStringEncryptor();
|
||||||
|
encryptorCM.setPassword("bifang-api");
|
||||||
|
System.out.println("The Zp65tFN3CsAXwpNfgfjZ0PrquSrokM1c decryption result is: "+encryptorCM.decrypt("Zp65tFN3CsAXwpNfgfjZ0PrquSrokM1c"));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
47
src/test/java/com/zdjizhi/function/GtpcTest.java
Normal file
47
src/test/java/com/zdjizhi/function/GtpcTest.java
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
package com.zdjizhi.function;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import com.zdjizhi.tools.connections.hbase.HBaseUtils;
|
||||||
|
import com.zdjizhi.tools.json.JsonPathUtil;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
|
||||||
|
public class GtpcTest {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void gtpcMatch() {
|
||||||
|
String param = "$.[?(@.tunnels_schema_type=='GTP')].gtp_endpoint_a2b_teid,$.[?(@.tunnels_schema_type=='GTP')].gtp_endpoint_b2a_teid";
|
||||||
|
String logValue = "[{\"tunnels_schema_type\":\"GTP\",\"gtp_endpoint_a2b_teid\":4129335432,\"gtp_endpoint_b2a_teid\":4129335434,\"gtp_sgw_ip\":\"120.36.3.97\",\"gtp_pgw_ip\":\"43.224.53.100\",\"gtp_sgw_port\":2152,\"gtp_pgw_port\":51454},{\"tunnels_schema_type\":\"ETHERNET\",\"source_mac\":\"80:69:33:ea:a5:57\",\"destination_mac\":\"14:09:dc:df:a3:40\"}]";
|
||||||
|
String appendToKey = "common_imsi,common_imei,common_phone_number";
|
||||||
|
|
||||||
|
try {
|
||||||
|
String teid = null;
|
||||||
|
String[] exprs = param.split(FlowWriteConfig.FORMAT_SPLITTER);
|
||||||
|
for (String expr : exprs) {
|
||||||
|
String value = JsonPathUtil.analysis(logValue, expr).toString();
|
||||||
|
if (value != null) {
|
||||||
|
teid = value;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
System.out.println(teid);
|
||||||
|
if (teid != null) {
|
||||||
|
String[] appendToKeys = appendToKey.split(FlowWriteConfig.FORMAT_SPLITTER);
|
||||||
|
HashMap<String, Object> userData = HBaseUtils.getGtpData(teid);
|
||||||
|
if (userData != null) {
|
||||||
|
for (String key : appendToKeys) {
|
||||||
|
System.out.println(userData.get(key).toString());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.warn("Description The user whose TEID is " + teid + " was not matched!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (RuntimeException re) {
|
||||||
|
logger.error("An exception occurred in teid type conversion or parsing of user information!" + re);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
239
src/test/java/com/zdjizhi/function/HBaseTest.java
Normal file
239
src/test/java/com/zdjizhi/function/HBaseTest.java
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
package com.zdjizhi.function;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import com.geedgenetworks.utils.StringUtil;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
|
import org.apache.hadoop.hbase.TableName;
|
||||||
|
import org.apache.hadoop.hbase.client.*;
|
||||||
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi
|
||||||
|
* @Description:
|
||||||
|
* @date 2021/12/310:42
|
||||||
|
*/
|
||||||
|
public class HBaseTest {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
private static Map<String, String> radiusMap = new ConcurrentHashMap<>(16);
|
||||||
|
|
||||||
|
private static Map<String,HashMap<String, Object>> gtpcMap = new ConcurrentHashMap<>(16);
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void getColumn() {
|
||||||
|
// 管理Hbase的配置信息
|
||||||
|
Configuration configuration = HBaseConfiguration.create();
|
||||||
|
// 设置zookeeper节点
|
||||||
|
configuration.set("hbase.zookeeper.quorum", "192.168.44.12:2181");
|
||||||
|
configuration.set("hbase.client.retries.number", "1");
|
||||||
|
configuration.set("hbase.client.pause", "50");
|
||||||
|
configuration.set("hbase.rpc.timeout", "3000");
|
||||||
|
configuration.set("zookeeper.recovery.retry", "1");
|
||||||
|
configuration.set("zookeeper.recovery.retry.intervalmill", "200");
|
||||||
|
try {
|
||||||
|
System.out.println(System.currentTimeMillis());
|
||||||
|
Connection connection = ConnectionFactory.createConnection(configuration);
|
||||||
|
Table table = connection.getTable(TableName.valueOf("tsg_galaxy:relation_framedip_account"));
|
||||||
|
Scan scan2 = new Scan();
|
||||||
|
ResultScanner scanner = table.getScanner(scan2);
|
||||||
|
for (Result result : scanner) {
|
||||||
|
int acctStatusType;
|
||||||
|
boolean hasType = result.containsColumn(Bytes.toBytes("radius"), Bytes.toBytes("acct_status_type"));
|
||||||
|
if (hasType) {
|
||||||
|
acctStatusType = Bytes.toInt(result.getValue(Bytes.toBytes("radius"), Bytes.toBytes("acct_status_type")));
|
||||||
|
} else {
|
||||||
|
acctStatusType = 3;
|
||||||
|
}
|
||||||
|
String framedIp = Bytes.toString(result.getValue(Bytes.toBytes("radius"), Bytes.toBytes("framed_ip")));
|
||||||
|
String account = Bytes.toString(result.getValue(Bytes.toBytes("radius"), Bytes.toBytes("account")));
|
||||||
|
System.out.println("status" + acctStatusType + "key:" + framedIp + "value:" + account);
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}finally {
|
||||||
|
System.out.println(System.currentTimeMillis());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void getGtpcData() {
|
||||||
|
// 管理Hbase的配置信息
|
||||||
|
Configuration configuration = HBaseConfiguration.create();
|
||||||
|
// 设置zookeeper节点
|
||||||
|
configuration.set("hbase.zookeeper.quorum", "192.168.44.12:2181");
|
||||||
|
configuration.set("hbase.client.retries.number", "1");
|
||||||
|
configuration.set("hbase.client.pause", "50");
|
||||||
|
configuration.set("hbase.rpc.timeout", "3000");
|
||||||
|
configuration.set("zookeeper.recovery.retry", "1");
|
||||||
|
configuration.set("zookeeper.recovery.retry.intervalmill", "200");
|
||||||
|
long begin = System.currentTimeMillis();
|
||||||
|
ResultScanner scanner = null;
|
||||||
|
try {
|
||||||
|
Connection connection = ConnectionFactory.createConnection(configuration);
|
||||||
|
Table table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_GTPC_TABLE_NAME));
|
||||||
|
Scan scan2 = new Scan();
|
||||||
|
scanner = table.getScanner(scan2);
|
||||||
|
for (Result result : scanner) {
|
||||||
|
String upLinkTeid = getTeid(result, "uplink_teid");
|
||||||
|
String downLinkTeid = getTeid(result, "downlink_teid");
|
||||||
|
String phoneNumber = getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "phone_number").trim();
|
||||||
|
String imsi = getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imsi").trim();
|
||||||
|
String imei = getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imei").trim();
|
||||||
|
Long lastUpdateTime = getLong(result, FlowWriteConfig.GTPC_FAMILY_NAME, "last_update_time");
|
||||||
|
|
||||||
|
HashMap<String, Object> buildUserData = buildUserData(phoneNumber, imsi, imei, lastUpdateTime);
|
||||||
|
|
||||||
|
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
|
||||||
|
String vsysId = getVsysId(result).trim();
|
||||||
|
updateCache(gtpcMap, upLinkTeid+vsysId, buildUserData, lastUpdateTime);
|
||||||
|
updateCache(gtpcMap, downLinkTeid+vsysId, buildUserData, lastUpdateTime);
|
||||||
|
} else {
|
||||||
|
updateCache(gtpcMap, upLinkTeid, buildUserData, lastUpdateTime);
|
||||||
|
updateCache(gtpcMap, downLinkTeid, buildUserData, lastUpdateTime);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.warn("The obtain the number of GTP-C relationships : " + gtpcMap.size());
|
||||||
|
logger.warn("The time spent to obtain GTP-C relationships : " + (System.currentTimeMillis() - begin));
|
||||||
|
} catch (IOException | RuntimeException e) {
|
||||||
|
logger.error("The relationship between USER and TEID obtained from HBase is abnormal! message is :" + e);
|
||||||
|
e.printStackTrace();
|
||||||
|
} finally {
|
||||||
|
if (scanner != null) {
|
||||||
|
scanner.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String key : gtpcMap.keySet()){
|
||||||
|
System.out.println(key +"---"+gtpcMap.get(key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取HBase内String类型的值
|
||||||
|
*
|
||||||
|
* @param result 结果集
|
||||||
|
* @param familyName 列族名称
|
||||||
|
* @param columnName 列名称
|
||||||
|
* @return 结果数据
|
||||||
|
*/
|
||||||
|
private static String getString(Result result, String familyName, String columnName) {
|
||||||
|
byte[] familyBytes = Bytes.toBytes(familyName);
|
||||||
|
byte[] columnBytes = Bytes.toBytes(columnName);
|
||||||
|
boolean contains = result.containsColumn(familyBytes, columnBytes);
|
||||||
|
if (contains) {
|
||||||
|
String data = Bytes.toString(result.getValue(familyBytes, columnBytes)).trim();
|
||||||
|
if (StringUtil.isNotBlank(data)) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取HBase内String类型的值
|
||||||
|
*
|
||||||
|
* @param result 结果集
|
||||||
|
* @param columnName 列名称
|
||||||
|
* @return 结果数据
|
||||||
|
*/
|
||||||
|
private static Long getLong(Result result, String familyName, String columnName) {
|
||||||
|
byte[] familyBytes = Bytes.toBytes(familyName);
|
||||||
|
byte[] columnBytes = Bytes.toBytes(columnName);
|
||||||
|
boolean contains = result.containsColumn(familyBytes, columnBytes);
|
||||||
|
if (contains) {
|
||||||
|
return Bytes.toLong(result.getValue(familyBytes, columnBytes));
|
||||||
|
}
|
||||||
|
return 0L;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取HBase内String类型的值
|
||||||
|
*
|
||||||
|
* @param result 结果集
|
||||||
|
* @param columnName 列名称
|
||||||
|
* @return 结果数据
|
||||||
|
*/
|
||||||
|
private static String getTeid(Result result, String columnName) {
|
||||||
|
byte[] familyBytes = Bytes.toBytes(FlowWriteConfig.GTPC_FAMILY_NAME);
|
||||||
|
byte[] columnBytes = Bytes.toBytes(columnName);
|
||||||
|
boolean contains = result.containsColumn(familyBytes, columnBytes);
|
||||||
|
if (contains) {
|
||||||
|
String data = String.valueOf(Bytes.toLong(result.getValue(familyBytes, columnBytes))).trim();
|
||||||
|
if (StringUtil.isNotBlank(data)) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "0";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 构建用户信息
|
||||||
|
*
|
||||||
|
* @param phoneNumber 手机号
|
||||||
|
* @param imsi 用户标识
|
||||||
|
* @param imei 设备标识
|
||||||
|
* @return 用户信息
|
||||||
|
*/
|
||||||
|
private static HashMap<String, Object> buildUserData(String phoneNumber, String imsi, String imei, Long lastUpdateTime) {
|
||||||
|
HashMap<String, Object> tmpMap = new HashMap<>(4);
|
||||||
|
tmpMap.put("common_phone_number", phoneNumber);
|
||||||
|
tmpMap.put("common_imsi", imsi);
|
||||||
|
tmpMap.put("common_imei", imei);
|
||||||
|
tmpMap.put("last_update_time", lastUpdateTime);
|
||||||
|
return tmpMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取HBase内String类型的值
|
||||||
|
*
|
||||||
|
* @param result 结果集
|
||||||
|
* @return 结果数据
|
||||||
|
*/
|
||||||
|
static String getVsysId(Result result) {
|
||||||
|
byte[] familyBytes = Bytes.toBytes("common");
|
||||||
|
byte[] columnBytes = Bytes.toBytes("vsys_id");
|
||||||
|
boolean contains = result.containsColumn(familyBytes, columnBytes);
|
||||||
|
if (contains) {
|
||||||
|
String data = String.valueOf(Bytes.toInt(result.getValue(familyBytes, columnBytes))).trim();
|
||||||
|
if (StringUtil.isNotBlank(data)) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "1";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 判断缓存与新获取的数据时间戳大小,若大于缓存内记录的时间戳;则更新缓存
|
||||||
|
*
|
||||||
|
* @param gtpcMap 缓存集合
|
||||||
|
* @param key 上下行teid
|
||||||
|
* @param userData 获取HBase内的用户信息
|
||||||
|
* @param lastUpdateTime 该用户信息最后更新时间
|
||||||
|
*/
|
||||||
|
private static void updateCache(Map<String, HashMap<String, Object>> gtpcMap, String key, HashMap<String, Object> userData, Long lastUpdateTime) {
|
||||||
|
if (StringUtil.isNotBlank(key)){
|
||||||
|
if (gtpcMap.containsKey(key)) {
|
||||||
|
Long oldUpdateTime = Long.parseLong(gtpcMap.get(key).get("last_update_time").toString());
|
||||||
|
if (lastUpdateTime > oldUpdateTime) {
|
||||||
|
gtpcMap.put(key, userData);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
gtpcMap.put(key, userData);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
165
src/test/java/com/zdjizhi/function/IpLookupTest.java
Normal file
165
src/test/java/com/zdjizhi/function/IpLookupTest.java
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
package com.zdjizhi.function;
|
||||||
|
|
||||||
|
import cn.hutool.core.io.IoUtil;
|
||||||
|
import cn.hutool.core.io.file.FileReader;
|
||||||
|
import cn.hutool.crypto.digest.DigestUtil;
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.geedgenetworks.utils.GalaxyDataBaseReader;
|
||||||
|
import com.geedgenetworks.utils.IpLookupV2;
|
||||||
|
import com.maxmind.db.CHMCache;
|
||||||
|
import com.maxmind.db.Reader;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import com.zdjizhi.tools.connections.http.HttpClientService;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
import java.net.InetAddress;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi
|
||||||
|
* @Description:
|
||||||
|
* @date 2021/11/611:38
|
||||||
|
*/
|
||||||
|
public class IpLookupTest {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
private static final String DAT_FILEPATH = "D:\\workerspace\\dat\\";
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void ipLookupDatabaseTest() {
|
||||||
|
|
||||||
|
IpLookupV2.Builder ipLookupBuilder = new IpLookupV2.Builder(false);
|
||||||
|
|
||||||
|
ipLookupBuilder.loadDataFileV4(DAT_FILEPATH + "ip_v4_built_in.mmdb");
|
||||||
|
// ipLookupBuilder.loadDataFileV6(DAT_FILEPATH + "ip_v6_built_in.mmdb");
|
||||||
|
// ipLookupBuilder.loadDataFilePrivateV4(DAT_FILEPATH + "ip_v4_user_defined.mmdb");
|
||||||
|
// ipLookupBuilder.loadDataFilePrivateV6(DAT_FILEPATH + "ip_v6_user_defined.mmdb");
|
||||||
|
// ipLookupBuilder.loadAsnDataFile(DAT_FILEPATH + "asn_v4.mmdb");
|
||||||
|
// ipLookupBuilder.loadAsnDataFileV6(DAT_FILEPATH + "asn_v6.mmdb");
|
||||||
|
IpLookupV2 ipLookup = ipLookupBuilder.build();
|
||||||
|
|
||||||
|
|
||||||
|
// String ip = "23.200.74.224";
|
||||||
|
String ip = "121.14.89.209";
|
||||||
|
|
||||||
|
try {
|
||||||
|
System.out.println(ipLookup.cityLookup(ip));
|
||||||
|
System.out.println(ipLookup.cityLookupDetail(ip));
|
||||||
|
System.out.println(ipLookup.cityLatLngLookup(ip));
|
||||||
|
System.out.println(ipLookup.provinceLookup(ip));
|
||||||
|
System.out.println(ipLookup.administrativeAreaLookupDetail(ip));
|
||||||
|
System.out.println(ipLookup.countryLookup(ip));
|
||||||
|
System.out.println(ipLookup.locationLookupDetail(ip));
|
||||||
|
System.out.println(ipLookup.administrativeAreaLookupDetail(ip));
|
||||||
|
System.out.println(ipLookup.infoLookupToJSONString(ip));
|
||||||
|
|
||||||
|
//ASN
|
||||||
|
System.out.println(ipLookup.asnLookup(ip));
|
||||||
|
System.out.println(ipLookup.asnLookupInfo(ip));
|
||||||
|
System.out.println(ipLookup.asnLookupDetail(ip));
|
||||||
|
System.out.println(ipLookup.asnLookupOrganization(ip));
|
||||||
|
|
||||||
|
} catch (NullPointerException npe) {
|
||||||
|
logger.error("The MMDB file is not loaded or IP is null! " + npe.getMessage());
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("Get clientIP location error! " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void ipLookupForBytesTest() {
|
||||||
|
InputStream inputStream = null;
|
||||||
|
try {
|
||||||
|
String url = "http://192.168.44.12:9098/hos/knowledge_base_hos_bucket/d2ab3313-1941-4847-84fa-5dbbd8a9007f-aXBfdjRfYnVpbHRfaW4=.mmdb";
|
||||||
|
HttpClientService httpClientService = new HttpClientService();
|
||||||
|
inputStream = httpClientService.httpGetInputStream(url, 3000);
|
||||||
|
Reader reader = new Reader(inputStream, new CHMCache());
|
||||||
|
InetAddress ipAddress = InetAddress.getByName("121.14.89.209");
|
||||||
|
Map map = reader.get(ipAddress, Map.class);
|
||||||
|
System.out.println(map.toString());
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
} finally {
|
||||||
|
IoUtil.close(inputStream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void ipLookupInputStreamTest() {
|
||||||
|
InputStream asnInputStream = null;
|
||||||
|
InputStream ipv4InputStream = null;
|
||||||
|
String ip = "114.64.231.114";
|
||||||
|
try {
|
||||||
|
HttpClientService httpClientService = new HttpClientService();
|
||||||
|
String asnUrl = "http://192.168.44.12:9098/hos/knowledge_base_hos_bucket/1b96764c-59dd-4d6b-8edb-623705f708a5-YXNuX3Y0.mmdb";
|
||||||
|
String ipv4Url = "http://192.168.44.12:9098/hos/knowledge_base_hos_bucket/24cb6a74-f048-4672-988f-112858427a3b-aXBfdjRfYnVpbHRfaW4=.mmdb";
|
||||||
|
asnInputStream = httpClientService.httpGetInputStream(asnUrl, 3000);
|
||||||
|
ipv4InputStream = httpClientService.httpGetInputStream(ipv4Url, 3000);
|
||||||
|
|
||||||
|
IpLookupV2 ipLookup = new IpLookupV2.Builder(false)
|
||||||
|
.loadDataFileV4(ipv4InputStream)
|
||||||
|
.loadAsnDataFileV4(asnInputStream)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
System.out.println(ipLookup.cityLookup(ip));
|
||||||
|
System.out.println(ipLookup.cityLookupDetail(ip));
|
||||||
|
System.out.println(ipLookup.cityLatLngLookup(ip));
|
||||||
|
System.out.println(ipLookup.provinceLookup(ip));
|
||||||
|
System.out.println(ipLookup.administrativeAreaLookupDetail(ip));
|
||||||
|
System.out.println(ipLookup.countryLookup(ip));
|
||||||
|
System.out.println(ipLookup.locationLookupDetail(ip));
|
||||||
|
System.out.println(ipLookup.asnLookup(ip));
|
||||||
|
System.out.println(ipLookup.administrativeAreaLookupDetail(ip));
|
||||||
|
|
||||||
|
} catch (NullPointerException npe) {
|
||||||
|
logger.error("The MMDB file is not loaded or IP is null! " + npe.getMessage());
|
||||||
|
} catch (RuntimeException e) {
|
||||||
|
logger.error("Get clientIP location error! " + e.getMessage());
|
||||||
|
} finally {
|
||||||
|
IoUtil.close(asnInputStream);
|
||||||
|
IoUtil.close(ipv4InputStream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void inputLookupTest() {
|
||||||
|
// String ip = "121.14.89.209";
|
||||||
|
//
|
||||||
|
byte[] localFile = new FileReader(DAT_FILEPATH + "built_in_ip_location.mmdb").readBytes();
|
||||||
|
String localFileSha256 = DigestUtil.sha256Hex(localFile);
|
||||||
|
System.out.println("本地文件SHA256:"+localFileSha256);
|
||||||
|
//
|
||||||
|
// IpLookupV2 ipLookup = new IpLookupV2.Builder(false)
|
||||||
|
// .loadDataFileV4(DAT_FILEPATH + "built_in_ip_location.mmdb").build();
|
||||||
|
//
|
||||||
|
// System.out.println(ipLookup.infoLookup(ip));
|
||||||
|
|
||||||
|
try {
|
||||||
|
HttpClientService httpClientService = new HttpClientService();
|
||||||
|
InputStream inputStream = httpClientService.httpGetInputStream("http://192.168.44.55:9098/hos/knowledge_base_hos_bucket/9b1ce6b4-024d-4343-80d5-6e6dc0ad0863-aXA0.mmdb", FlowWriteConfig.HTTP_SOCKET_TIMEOUT);
|
||||||
|
byte[] bytes = IOUtils.toByteArray(inputStream);
|
||||||
|
String downloadFileSha256 = DigestUtil.sha256Hex(bytes);
|
||||||
|
InputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
|
||||||
|
|
||||||
|
System.out.println("HOS下载文件2 SHA256:" + downloadFileSha256);
|
||||||
|
System.out.println("HOS下载文件2 size:" + bytes.length);
|
||||||
|
|
||||||
|
// IpLookupV2 ipLookup2 = new IpLookupV2.Builder(false)
|
||||||
|
// .loadDataFileV4(byteArrayInputStream).build();
|
||||||
|
new GalaxyDataBaseReader.Builder(byteArrayInputStream).withCache(new CHMCache()).build();
|
||||||
|
InputStream inputStream1 = httpClientService.httpGetInputStream("http://192.168.44.55:9098/hos/knowledge_base_hos_bucket/9b1ce6b4-024d-4343-80d5-6e6dc0ad0863-aXA0.mmdb", FlowWriteConfig.HTTP_SOCKET_TIMEOUT);
|
||||||
|
new GalaxyDataBaseReader.Builder(inputStream1).withCache(new CHMCache()).build();
|
||||||
|
// System.out.println(ipLookup2.infoLookup(ip));
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
89
src/test/java/com/zdjizhi/function/TimestampTest.java
Normal file
89
src/test/java/com/zdjizhi/function/TimestampTest.java
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
package com.zdjizhi.function;
|
||||||
|
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.time.*;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
public class TimestampTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void timestampToDate() {
|
||||||
|
System.out.println(getLocalDateTime(1693905281L, 0, "Asia/Shanghai"));
|
||||||
|
System.out.println(getLocalDateTime(1693905281048L, 3, "Asia/Shanghai"));
|
||||||
|
System.out.println(getLocalDateTime(1693905281048L, 6, "Asia/Shanghai"));
|
||||||
|
System.out.println(getLocalDateTime(1693905281048L, 9, "UTC+03:00"));
|
||||||
|
System.out.println(getZoneDateTime(1693905281L, 3, "Asia/Shanghai"));
|
||||||
|
System.out.println(getZoneDateTime(1693905281048L, 6, "Asia/Shanghai"));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void timestampConversion() {
|
||||||
|
long nanosTimestamp = 1630988475000000000L; // 纳秒级时间戳
|
||||||
|
System.out.println("纳秒级时间戳: " + timestampToSeconds(nanosTimestamp));
|
||||||
|
|
||||||
|
long microsTimestamp = 1630988475000000L; // 微秒级时间戳
|
||||||
|
System.out.println("微秒级时间戳: " + timestampToSeconds(microsTimestamp));
|
||||||
|
|
||||||
|
long millisTimestamp = 1693969952127L; // 微秒级时间戳
|
||||||
|
System.out.println("毫秒级时间戳: " + timestampToSeconds(millisTimestamp));
|
||||||
|
|
||||||
|
long errorTimestamp = 169396995L; // 微秒级时间戳
|
||||||
|
System.out.println("异常时间戳: " + timestampToSeconds(errorTimestamp));
|
||||||
|
}
|
||||||
|
|
||||||
|
private Long timestampToSeconds(long timestamp) {
|
||||||
|
int timestampLength = Long.toString(timestamp).length();
|
||||||
|
switch (timestampLength) {
|
||||||
|
case 13:
|
||||||
|
return TimeUnit.MILLISECONDS.toSeconds(timestamp);
|
||||||
|
case 16:
|
||||||
|
return TimeUnit.MICROSECONDS.toSeconds(timestamp);
|
||||||
|
case 19:
|
||||||
|
return TimeUnit.NANOSECONDS.toSeconds(timestamp);
|
||||||
|
default:
|
||||||
|
// throw new RuntimeException("This timestamp:" + timestamp + " format is not nanosecond, microsecond, millisecond, or second");
|
||||||
|
return timestamp;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getLocalDateTime(Long timestamp, int precision, String timeZone) {
|
||||||
|
boolean isMillis = String.valueOf(timestamp).length() > 10;
|
||||||
|
String timePattern = "yyyy-MM-dd HH:mm:ss";
|
||||||
|
if (precision > 0) {
|
||||||
|
String s = String.join("", Collections.nCopies(precision, "S"));
|
||||||
|
timePattern = String.join(".", timePattern, s);
|
||||||
|
}
|
||||||
|
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(timePattern);
|
||||||
|
|
||||||
|
Instant instant;
|
||||||
|
if (isMillis) {
|
||||||
|
instant = Instant.ofEpochMilli(timestamp);
|
||||||
|
} else {
|
||||||
|
instant = Instant.ofEpochSecond(timestamp);
|
||||||
|
}
|
||||||
|
return LocalDateTime.ofInstant(instant, ZoneId.of(timeZone)).format(formatter);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private String getZoneDateTime(Long timestamp, int precision, String timeZone) {
|
||||||
|
boolean isMillis = String.valueOf(timestamp).length() > 10;
|
||||||
|
String timePattern = "yyyy-MM-dd'T'HH:mm:ss.";
|
||||||
|
if (precision > 0) {
|
||||||
|
String s = String.join("", Collections.nCopies(precision, "S"));
|
||||||
|
timePattern = String.join("", timePattern, s, "XXX");
|
||||||
|
}
|
||||||
|
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(timePattern);
|
||||||
|
|
||||||
|
Instant instant;
|
||||||
|
if (isMillis) {
|
||||||
|
instant = Instant.ofEpochMilli(timestamp);
|
||||||
|
} else {
|
||||||
|
instant = Instant.ofEpochSecond(timestamp);
|
||||||
|
}
|
||||||
|
return ZonedDateTime.ofInstant(instant, ZoneId.of(timeZone)).format(formatter);
|
||||||
|
}
|
||||||
|
}
|
||||||
39
src/test/java/com/zdjizhi/function/TopDomainTest.java
Normal file
39
src/test/java/com/zdjizhi/function/TopDomainTest.java
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
package com.zdjizhi.function;
|
||||||
|
|
||||||
|
import com.google.common.net.InternetDomainName;
|
||||||
|
import com.geedgenetworks.utils.FormatUtils;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.function
|
||||||
|
* @Description:
|
||||||
|
* @date 2023/7/2413:55
|
||||||
|
*/
|
||||||
|
public class TopDomainTest {
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void getTopDomainTest() {
|
||||||
|
String host = "heartsofsteel-.tumblr.com";
|
||||||
|
System.out.println(FormatUtils.getTopPrivateDomain(host));
|
||||||
|
host = "heartsofsteel.tumblr.com";
|
||||||
|
System.out.println(FormatUtils.getTopPrivateDomain(host));
|
||||||
|
|
||||||
|
|
||||||
|
// String sni = "www.googleapis.com";
|
||||||
|
String sni = "juicebox-.tumblr.com";
|
||||||
|
String domain = FormatUtils.getDomain(sni);
|
||||||
|
System.out.println(domain);
|
||||||
|
System.out.println(FormatUtils.getTopPrivateDomain(sni));
|
||||||
|
|
||||||
|
System.out.println(InternetDomainName.isValid(sni));
|
||||||
|
System.out.println(InternetDomainName.isValid(domain));
|
||||||
|
|
||||||
|
InternetDomainName internetDomainName = InternetDomainName.from(domain);
|
||||||
|
if (internetDomainName.isUnderPublicSuffix()) {
|
||||||
|
System.out.println(internetDomainName.topPrivateDomain().toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
75
src/test/java/com/zdjizhi/hdfs/FileUtilsTest.java
Normal file
75
src/test/java/com/zdjizhi/hdfs/FileUtilsTest.java
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
package com.zdjizhi.hdfs;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.*;
|
||||||
|
import org.apache.hadoop.io.IOUtils;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.tools.connections.hadoop
|
||||||
|
* @Description:
|
||||||
|
* @date 2022/11/217:57
|
||||||
|
*/
|
||||||
|
public class FileUtilsTest {
|
||||||
|
private static final Log logger = LogFactory.get();
|
||||||
|
|
||||||
|
private static FileSystem fileSystem;
|
||||||
|
|
||||||
|
static {
|
||||||
|
Configuration configuration = new Configuration();
|
||||||
|
try {
|
||||||
|
configuration.set("fs.defaultFS","hdfs://ns1");
|
||||||
|
configuration.set("hadoop.proxyuser.root.hosts","*");
|
||||||
|
configuration.set("hadoop.proxyuser.root.groups","*");
|
||||||
|
configuration.set("ha.zookeeper.quorum","192.168.44.83:2181,192.168.44.84:2181,192.168.44.85:2181");
|
||||||
|
configuration.set("dfs.nameservices","ns1");
|
||||||
|
configuration.set("dfs.ha.namenodes.ns1","nn1,nn2");
|
||||||
|
configuration.set("dfs.namenode.rpc-address.ns1.nn1","192.168.44.85:9000");
|
||||||
|
configuration.set("dfs.namenode.rpc-address.ns1.nn2","192.168.44.86:9000");
|
||||||
|
configuration.set("dfs.client.failover.proxy.provider.ns1","org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
|
||||||
|
//创建fileSystem,用于连接hdfs
|
||||||
|
fileSystem = FileSystem.get(configuration);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void mkdir() throws Exception{
|
||||||
|
fileSystem.mkdirs(new Path("/knowledgebase/test"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void create() throws Exception{
|
||||||
|
FSDataOutputStream outputStream = fileSystem.create(new Path("/knowledgebase/test/test.txt"));
|
||||||
|
outputStream.write("Hello World".getBytes());
|
||||||
|
outputStream.flush();
|
||||||
|
outputStream.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void cat() throws Exception{
|
||||||
|
FSDataInputStream inputStream = fileSystem.open(new Path("/knowledgebase/test/test.txt"));
|
||||||
|
IOUtils.copyBytes(inputStream, System.out, 1024);
|
||||||
|
inputStream.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void rename() throws Exception{
|
||||||
|
fileSystem.rename(new Path("/knowledgebase/test/test.txt"), new Path("/knowledgebase/test/test1.txt"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void delete() throws Exception{
|
||||||
|
fileSystem.delete(new Path("/knowledgebase/test"),true);//是否递归删除
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
100
src/test/java/com/zdjizhi/hos/hosUtilsTest.java
Normal file
100
src/test/java/com/zdjizhi/hos/hosUtilsTest.java
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
package com.zdjizhi.hos;
|
||||||
|
|
||||||
|
import cn.hutool.core.io.IoUtil;
|
||||||
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
|
import com.google.common.base.Joiner;
|
||||||
|
import com.maxmind.db.CHMCache;
|
||||||
|
import com.maxmind.db.Reader;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import com.zdjizhi.tools.connections.http.HttpClientService;
|
||||||
|
import com.geedgenetworks.utils.StringUtil;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.net.InetAddress;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.hos
|
||||||
|
* @Description:
|
||||||
|
* @date 2022/11/713:55
|
||||||
|
*/
|
||||||
|
public class hosUtilsTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void downloadToLocalTest() {
|
||||||
|
FileOutputStream outputStream = null;
|
||||||
|
InputStream inputStream = null;
|
||||||
|
try {
|
||||||
|
String url = "http://192.168.44.242:9098/hos/knowledge_base_hos_bucket/0773846a-87df-4869-a5b7-815eda384b92-aXBfdjRfYnVpbHRfaW4=.mmdb";
|
||||||
|
|
||||||
|
HttpClientService httpClientService = new HttpClientService();
|
||||||
|
inputStream = httpClientService.httpGetInputStream(url, 3000);
|
||||||
|
|
||||||
|
File file = new File(FlowWriteConfig.TOOLS_LIBRARY.concat(File.separator).concat("ip_v4_built_in.mmdb"));
|
||||||
|
if (!file.getParentFile().exists()) {
|
||||||
|
file.getParentFile().mkdir();
|
||||||
|
}
|
||||||
|
outputStream = new FileOutputStream(file);
|
||||||
|
IoUtil.copy(inputStream, outputStream);
|
||||||
|
} catch (IOException | RuntimeException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
} finally {
|
||||||
|
IoUtil.close(inputStream);
|
||||||
|
IoUtil.close(outputStream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void locationTest() {
|
||||||
|
InputStream inputStream = null;
|
||||||
|
try {
|
||||||
|
// Header header = new BasicHeader("token", FlowWriteConfig.HOS_TOKEN);
|
||||||
|
String url = "http://192.168.44.12:9098/hos/knowledge_base_hos_bucket/ac0ef83d-f23a-41a0-9c66-51c4e2b4cbc0-aXBfdjRfYnVpbHRfaW4=.mmdb";
|
||||||
|
HttpClientService httpClientService = new HttpClientService();
|
||||||
|
inputStream = httpClientService.httpGetInputStream(url, 3000);
|
||||||
|
Reader reader = new Reader(inputStream, new CHMCache());
|
||||||
|
InetAddress ipAddress = InetAddress.getByName("114.64.231.114");
|
||||||
|
Map map = reader.get(ipAddress, Map.class);
|
||||||
|
if (map != null) {
|
||||||
|
System.out.println(StringUtil.setDefaultIfEmpty(map.toString(), "unkonw").toString());
|
||||||
|
System.out.println(Joiner.on(".").useForNull("").join(map.get("COUNTRY"),
|
||||||
|
map.get("SUPER_ADMINISTRATIVE_AREA"), map.get("ADMINISTRATIVE_AREA")));
|
||||||
|
System.out.println(Joiner.on(".").useForNull("").join(map.get("COUNTRY"),
|
||||||
|
map.get("SUPER_ADMINISTRATIVE_AREA"), map.get("ADMINISTRATIVE_AREA")).replace("\"", ""));
|
||||||
|
}
|
||||||
|
} catch (IOException | RuntimeException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
} finally {
|
||||||
|
IoUtil.close(inputStream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void asnTest() {
|
||||||
|
InputStream inputStream = null;
|
||||||
|
try {
|
||||||
|
String url = "http://192.168.44.12:9098/hos/knowledge_base_hos_bucket/95b20b83-e6bd-4d28-85d5-3b4e32da9a3f-aXBfdjRfYnVpbHRfaW4=.mmdb";
|
||||||
|
HttpClientService httpClientService = new HttpClientService();
|
||||||
|
inputStream = httpClientService.httpGetInputStream(url, 3000);
|
||||||
|
Reader reader = new Reader(inputStream, new CHMCache());
|
||||||
|
InetAddress ipAddress = InetAddress.getByName("23.200.74.224");
|
||||||
|
Map map = reader.get(ipAddress, Map.class);
|
||||||
|
if (map != null) {
|
||||||
|
System.out.println(StringUtil.setDefaultIfEmpty(map.get("ASN"), "unkonw").toString());
|
||||||
|
System.out.println(StringUtil.setDefaultIfEmpty(map.get("ASN"), "unkonw").toString().replace("\"", ""));
|
||||||
|
System.out.println(StringUtil.setDefaultIfEmpty(map.toString(), "unkonw").toString());
|
||||||
|
}
|
||||||
|
} catch (IOException | RuntimeException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
} finally {
|
||||||
|
IoUtil.close(inputStream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
98
src/test/java/com/zdjizhi/json/FastJsonTest.java
Normal file
98
src/test/java/com/zdjizhi/json/FastJsonTest.java
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
package com.zdjizhi.json;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson2.JSON;
|
||||||
|
import com.alibaba.fastjson2.JSONArray;
|
||||||
|
import com.alibaba.fastjson2.JSONObject;
|
||||||
|
import com.zdjizhi.json.pojo.UserList;
|
||||||
|
import com.zdjizhi.json.pojo.UserMap;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.json
|
||||||
|
* @Description:
|
||||||
|
* @date 2023/5/2014:02
|
||||||
|
*/
|
||||||
|
public class FastJsonTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void pojoTest() {
|
||||||
|
//all right
|
||||||
|
String message = "{\"name\":\"zhangsan\",\"age\":50,\"idcard\":\"140303199999999999\",\"previousaddress\":{\"first\":\"北京\",\"second\":\"上海\"}}";
|
||||||
|
//int error (yes)
|
||||||
|
// String message = "{\"name\":\"zhangsan\",\"age\":\"123\",\"idCard\":\"140303199999999999\",\"previousaddress\":{\"first\":\"北京\",\"second\":\"上海\"}}";
|
||||||
|
//string error (yes)
|
||||||
|
// String message = "{\"name\":123,\"age\":123,\"idCard\":\"140303199999999999\",\"previousaddress\":{\"first\":\"北京\",\"second\":\"上海\"}}";
|
||||||
|
//json error (no)
|
||||||
|
// String message = "{\"name\":\"zhangsan\",\"age\":50,\"idCard\":\"140303199999999999\",\"previousaddress\":\"{\\\"first\\\":\\\"北京\\\",\\\"second\\\":\\\"上海\\\"}\"}";
|
||||||
|
|
||||||
|
UserMap user = JSON.parseObject(message, UserMap.class);
|
||||||
|
System.out.println(user.getName());
|
||||||
|
System.out.println(user.getDevicetag());
|
||||||
|
System.out.println(JSON.toJSONString(user));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void typeCheckTest() {
|
||||||
|
//jsonobject
|
||||||
|
String message = "{\"name\":\"zhangsan\",\"age\":50,\"idcard\":\"123456789\",\"devicetag\":\"{\\\"tags\\\":[{\\\"tag\\\":\\\"group\\\",\\\"value\\\":\\\"7400\\\"},{\\\"tag\\\":\\\"center\\\",\\\"value\\\":\\\"7400\\\"}]}\"}";
|
||||||
|
JSONObject objectTest = JSONObject.parseObject(message);
|
||||||
|
|
||||||
|
for (Map.Entry<String, Object> entry : objectTest.entrySet()) {
|
||||||
|
System.out.println("key:" + entry.getKey() + "————value:" + entry.getValue() + "————class: " + entry.getValue().getClass());
|
||||||
|
}
|
||||||
|
System.out.println("\n输出原始日志:" + objectTest.toString());
|
||||||
|
|
||||||
|
Object previousMap = objectTest.get("devicetag");
|
||||||
|
if (previousMap.getClass() != Map.class) {
|
||||||
|
JSONObject previousObject = JSONObject.parseObject(previousMap.toString());
|
||||||
|
objectTest.put("devicetag", previousObject);
|
||||||
|
}
|
||||||
|
System.out.println("输出转换map类型后的日志:" + objectTest.toString());
|
||||||
|
|
||||||
|
UserMap userMap = objectTest.toJavaObject(UserMap.class);
|
||||||
|
System.out.println(JSON.toJSONString(userMap));
|
||||||
|
|
||||||
|
System.out.println("\n-----------------------------------------------\n");
|
||||||
|
|
||||||
|
//jsonarray
|
||||||
|
message = "{\"name\":\"zhangsan\",\"age\":50,\"idcard\":\"123456789\",\"devicetag\":\"[{\\\"tag\\\":\\\"group\\\",\\\"value\\\":\\\"7400\\\"},{\\\"tag\\\":\\\"center\\\",\\\"value\\\":\\\"7400\\\"}]\"}";
|
||||||
|
JSONObject arrayTest = JSONObject.parseObject(message);
|
||||||
|
for (Map.Entry<String, Object> entry : arrayTest.entrySet()) {
|
||||||
|
System.out.println("key:" + entry.getKey() + "————value:" + entry.getValue() + "————class: " + entry.getValue().getClass());
|
||||||
|
}
|
||||||
|
System.out.println("\n输出原始日志:" + arrayTest.toString());
|
||||||
|
|
||||||
|
Object previousList = arrayTest.get("devicetag");
|
||||||
|
if (previousList.getClass() != List.class) {
|
||||||
|
JSONArray previousArray = JSONArray.parseArray(previousList.toString());
|
||||||
|
arrayTest.put("devicetag", previousArray);
|
||||||
|
}
|
||||||
|
System.out.println("输出转换list类型后的日志:" + arrayTest.toString());
|
||||||
|
|
||||||
|
UserList userList = arrayTest.toJavaObject(UserList.class);
|
||||||
|
System.out.println(JSON.toJSONString(userList));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void typeTest() {
|
||||||
|
String message = "{\"name\":\"zhangsan\",\"age\":12,\"object\":{\"name\":\"a\",\"age\":12},\"array\":[{\"one\":1},{\"two\":2}]}";
|
||||||
|
JSONObject objectTest = JSONObject.parseObject(message);
|
||||||
|
|
||||||
|
for (Map.Entry<String, Object> entry : objectTest.entrySet()) {
|
||||||
|
String key = entry.getKey();
|
||||||
|
Class<?> aClass = entry.getValue().getClass();
|
||||||
|
|
||||||
|
System.out.println(key + "---------" + aClass.getSimpleName());
|
||||||
|
}
|
||||||
|
Object bbb = objectTest.get("bbb");
|
||||||
|
if (bbb == null){
|
||||||
|
System.out.println("null");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
102
src/test/java/com/zdjizhi/json/JsonPathTest.java
Normal file
102
src/test/java/com/zdjizhi/json/JsonPathTest.java
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
package com.zdjizhi.json;
|
||||||
|
|
||||||
|
import cn.hutool.log.Log;
|
||||||
|
import cn.hutool.log.LogFactory;
|
||||||
|
import com.alibaba.fastjson2.*;
|
||||||
|
import com.alibaba.nacos.api.NacosFactory;
|
||||||
|
import com.alibaba.nacos.api.PropertyKeyConst;
|
||||||
|
import com.alibaba.nacos.api.config.ConfigService;
|
||||||
|
import com.alibaba.nacos.api.exception.NacosException;
|
||||||
|
import com.zdjizhi.common.CommonConfig;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.json
|
||||||
|
* @Description:
|
||||||
|
* @date 2022/3/2410:22
|
||||||
|
*/
|
||||||
|
public class JsonPathTest {
|
||||||
|
|
||||||
|
private static Properties properties = new Properties();
|
||||||
|
|
||||||
|
|
||||||
|
static {
|
||||||
|
properties.setProperty(PropertyKeyConst.SERVER_ADDR, "192.168.44.12:8848");
|
||||||
|
properties.setProperty(PropertyKeyConst.USERNAME, "nacos");
|
||||||
|
properties.setProperty(PropertyKeyConst.PASSWORD, "nacos");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void konwledgeBaseTest() {
|
||||||
|
try {
|
||||||
|
ConfigService configService = NacosFactory.createConfigService(properties);
|
||||||
|
String schema = configService.getConfig("knowledge_base.json", "DEFAULT_GROUP", 5000);
|
||||||
|
// String KNOWLEDGE_EXPR = "[?(@.version=='latest' && (@.name in ('ip_v4_built_in','ip_v6_built_in','ip_v4_user_defined','ip_v6_user_defined','asn_v4','asn_v6')))]";
|
||||||
|
// String KNOWLEDGE_EXPR = "[?(@.version=='latest' && (@.type in ('ip_location','asn','ip')))]";
|
||||||
|
// String KNOWLEDGE_EXPR = "[?(@.version=='latest' && (@.name in ('ip_v4_built_in','ip_v6_built_in','ip_v4_user_defined','ip_v6_user_defined','asn_v4','asn_v6')) && (@.type in ('ip_location','asn','ip')))]";
|
||||||
|
// String types = "[?(@.type in ('ip_location','asn','ip_user_defined'))]";
|
||||||
|
// String names = "[?(@.name in ('ip_v4_built_in','ip_v6_built_in','asn_v4','asn_v6','内置IP定位库'))]";
|
||||||
|
// String expr = "[?(@.version=='latest')][?(@.type in ('ip_location','asn','ip_user_defined'))]";
|
||||||
|
|
||||||
|
// String expr = "[?(@.version=='latest')][?(@.name == 'QQQ' || (@.type == 'ip_user_defined'))]";
|
||||||
|
String expr = "[?(@.version=='latest')][@.type in ('ip_location','asn','ip')][?(@.name in ('QQQ'))]";
|
||||||
|
|
||||||
|
// JSONPath jsonPath = JSONPath.of(combinationFilterList());
|
||||||
|
JSONPath jsonPath = JSONPath.of(expr);
|
||||||
|
|
||||||
|
String extract = jsonPath.extract(JSONReader.of(schema)).toString();
|
||||||
|
|
||||||
|
|
||||||
|
JSONArray jsonArray = JSON.parseArray(extract);
|
||||||
|
for (int i = 0; i < jsonArray.size(); i++) {
|
||||||
|
System.out.println(jsonArray.getString(i));
|
||||||
|
// KnowlegeBaseMeta knowlegeBaseMeta = JSONObject.parseObject(jsonArray.getString(i), KnowlegeBaseMeta.class);
|
||||||
|
// System.out.println(knowlegeBaseMeta.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (NacosException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String combinationFilterList() {
|
||||||
|
String[] typeList = CommonConfig.KNOWLEDGEBASE_TYPE_LIST.split(",");
|
||||||
|
String[] nameList = CommonConfig.KNOWLEDGEBASE_NAME_LIST.split(",");
|
||||||
|
String expr = "[?(@.version=='latest')]";
|
||||||
|
// ip_location > 'ip_location'
|
||||||
|
|
||||||
|
if (typeList.length > 1) {
|
||||||
|
StringBuilder typeBuilder = new StringBuilder();
|
||||||
|
typeBuilder.append("[?(@.type in (");
|
||||||
|
for (int i = 0; i < typeList.length; i++) {
|
||||||
|
if (i == typeList.length - 1) {
|
||||||
|
typeBuilder.append("'").append(typeList[i]).append("'))]");
|
||||||
|
} else {
|
||||||
|
typeBuilder.append("'").append(typeList[i]).append("',");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expr = expr + typeBuilder.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nameList.length > 1) {
|
||||||
|
StringBuilder nameBuilder = new StringBuilder();
|
||||||
|
nameBuilder.append("[?(@.name in (");
|
||||||
|
for (int i = 0; i < nameList.length; i++) {
|
||||||
|
if (i == nameList.length - 1) {
|
||||||
|
nameBuilder.append("'").append(nameList[i]).append("'))]");
|
||||||
|
} else {
|
||||||
|
nameBuilder.append("'").append(nameList[i]).append("',");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expr = expr + nameBuilder.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
return expr;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
223
src/test/java/com/zdjizhi/json/NewSchemaTest.java
Normal file
223
src/test/java/com/zdjizhi/json/NewSchemaTest.java
Normal file
@@ -0,0 +1,223 @@
|
|||||||
|
package com.zdjizhi.json;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson2.JSONArray;
|
||||||
|
import com.alibaba.fastjson2.JSONObject;
|
||||||
|
import com.alibaba.nacos.api.NacosFactory;
|
||||||
|
import com.alibaba.nacos.api.PropertyKeyConst;
|
||||||
|
import com.alibaba.nacos.api.config.ConfigService;
|
||||||
|
import com.alibaba.nacos.api.exception.NacosException;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Applicable to schemas >= TSG22.08
|
||||||
|
*
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.nacos
|
||||||
|
* @Description:
|
||||||
|
* @date 2022/3/1714:57
|
||||||
|
*/
|
||||||
|
public class NewSchemaTest {
|
||||||
|
|
||||||
|
private static Properties properties = new Properties();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取需要删除字段的列表
|
||||||
|
*/
|
||||||
|
private static ArrayList<String> dropList = new ArrayList<>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取schema指定的有效字段及类型
|
||||||
|
*/
|
||||||
|
private static HashMap<String, Class> jsonFieldsMap;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取包含默认值的字段
|
||||||
|
*/
|
||||||
|
private static HashMap<String, Object> defaultFieldsMap = new HashMap<>(16);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
static {
|
||||||
|
properties.setProperty(PropertyKeyConst.SERVER_ADDR, "192.168.44.11:8848");
|
||||||
|
properties.setProperty(PropertyKeyConst.NAMESPACE, "f507879a-8b1b-4330-913e-83d4fcdc14bb");
|
||||||
|
properties.setProperty(PropertyKeyConst.USERNAME, "nacos");
|
||||||
|
properties.setProperty(PropertyKeyConst.PASSWORD, "nacos");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void newSchemaTest() {
|
||||||
|
try {
|
||||||
|
|
||||||
|
ConfigService configService = NacosFactory.createConfigService(properties);
|
||||||
|
String dataId = "session_record.json";
|
||||||
|
String group = "Galaxy";
|
||||||
|
String schema = configService.getConfig(dataId, group, 5000);
|
||||||
|
|
||||||
|
ArrayList<String[]> newJobList = getNewJobList(schema);
|
||||||
|
for (String[] job : newJobList) {
|
||||||
|
System.out.println(Arrays.toString(job));
|
||||||
|
}
|
||||||
|
|
||||||
|
HashMap<String, Class> fieldsFromSchema = getFieldsFromSchema(schema);
|
||||||
|
for (String key : fieldsFromSchema.keySet()) {
|
||||||
|
System.out.println("fileName:" + key + " Class:" + fieldsFromSchema.get(key));
|
||||||
|
|
||||||
|
}
|
||||||
|
} catch (NacosException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 模式匹配,给定一个类型字符串返回一个类类型
|
||||||
|
*
|
||||||
|
* @param type 类型
|
||||||
|
* @return 类类型
|
||||||
|
*/
|
||||||
|
|
||||||
|
private static Class getClassName(String type) {
|
||||||
|
Class clazz;
|
||||||
|
|
||||||
|
switch (type) {
|
||||||
|
case "int":
|
||||||
|
clazz = Integer.class;
|
||||||
|
break;
|
||||||
|
case "string":
|
||||||
|
clazz = String.class;
|
||||||
|
break;
|
||||||
|
case "long":
|
||||||
|
clazz = long.class;
|
||||||
|
break;
|
||||||
|
case "array":
|
||||||
|
clazz = List.class;
|
||||||
|
break;
|
||||||
|
case "double":
|
||||||
|
clazz = double.class;
|
||||||
|
break;
|
||||||
|
case "float":
|
||||||
|
clazz = float.class;
|
||||||
|
break;
|
||||||
|
case "char":
|
||||||
|
clazz = char.class;
|
||||||
|
break;
|
||||||
|
case "byte":
|
||||||
|
clazz = byte.class;
|
||||||
|
break;
|
||||||
|
case "boolean":
|
||||||
|
clazz = boolean.class;
|
||||||
|
break;
|
||||||
|
case "short":
|
||||||
|
clazz = short.class;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
clazz = String.class;
|
||||||
|
}
|
||||||
|
return clazz;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 通过schema来获取所需的字段及字段类型。
|
||||||
|
*
|
||||||
|
* @return 用于反射生成schema类型的对象的一个map集合
|
||||||
|
*/
|
||||||
|
private static HashMap<String, Class> getFieldsFromSchema(String schema) {
|
||||||
|
HashMap<String, Class> map = new HashMap<>(256);
|
||||||
|
|
||||||
|
//获取fields,并转化为数组,数组的每个元素都是一个name doc type
|
||||||
|
com.alibaba.fastjson2.JSONObject schemaJson = com.alibaba.fastjson2.JSONObject.parseObject(schema);
|
||||||
|
com.alibaba.fastjson2.JSONArray fields = schemaJson.getJSONArray("fields");
|
||||||
|
|
||||||
|
for (Object field : fields) {
|
||||||
|
String filedStr = field.toString();
|
||||||
|
if (checkKeepField(filedStr)) {
|
||||||
|
com.alibaba.fastjson2.JSONObject fieldJson = com.alibaba.fastjson2.JSONObject.parseObject(filedStr);
|
||||||
|
String name = fieldJson.getString("name");
|
||||||
|
String type = fieldJson.getString("type");
|
||||||
|
if (type.contains("{")) {
|
||||||
|
com.alibaba.fastjson2.JSONObject types = com.alibaba.fastjson2.JSONObject.parseObject(type);
|
||||||
|
type = types.getString("type");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fieldJson.containsKey("default")) {
|
||||||
|
defaultFieldsMap.put(name, fieldJson.get("default"));
|
||||||
|
}
|
||||||
|
//组合用来生成实体类的map
|
||||||
|
map.put(name, getClassName(type));
|
||||||
|
} else {
|
||||||
|
dropList.add(filedStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 判断字段是否需要保留
|
||||||
|
*
|
||||||
|
* @param message 单个field-json
|
||||||
|
* @return true or false
|
||||||
|
*/
|
||||||
|
private static boolean checkKeepField(String message) {
|
||||||
|
boolean isKeepField = true;
|
||||||
|
com.alibaba.fastjson2.JSONObject fieldJson = com.alibaba.fastjson2.JSONObject.parseObject(message);
|
||||||
|
boolean hasDoc = fieldJson.containsKey("doc");
|
||||||
|
if (hasDoc) {
|
||||||
|
com.alibaba.fastjson2.JSONObject doc = com.alibaba.fastjson2.JSONObject.parseObject(fieldJson.getString("doc"));
|
||||||
|
if (doc.containsKey("visibility")) {
|
||||||
|
String visibility = doc.getString("visibility");
|
||||||
|
if ("disabled".equals(visibility)) {
|
||||||
|
isKeepField = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return isKeepField;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据http链接获取schema,解析之后返回一个任务列表 (useList toList funcList paramlist)
|
||||||
|
*
|
||||||
|
* @return 任务列表
|
||||||
|
*/
|
||||||
|
private static ArrayList<String[]> getNewJobList(String schema) {
|
||||||
|
ArrayList<String[]> list = new ArrayList<>();
|
||||||
|
|
||||||
|
JSONObject schemaJson = JSONObject.parseObject(schema);
|
||||||
|
JSONArray fields = schemaJson.getJSONArray("fields");
|
||||||
|
for (Object field : fields) {
|
||||||
|
JSONObject fieldJson = JSONObject.parseObject(field.toString());
|
||||||
|
boolean hasDoc = fieldJson.containsKey("doc");
|
||||||
|
if (hasDoc) {
|
||||||
|
JSONObject docJson = JSONObject.parseObject(fieldJson.getString("doc"));
|
||||||
|
boolean hasFormat = docJson.containsKey("format");
|
||||||
|
if (hasFormat) {
|
||||||
|
String name = fieldJson.getString("name");
|
||||||
|
JSONArray formatList = docJson.getJSONArray("format");
|
||||||
|
for (Object format : formatList) {
|
||||||
|
JSONObject formatJson = JSONObject.parseObject(format.toString());
|
||||||
|
String function = formatJson.getString("function");
|
||||||
|
String appendTo;
|
||||||
|
String params = null;
|
||||||
|
|
||||||
|
if (formatJson.containsKey("appendTo")) {
|
||||||
|
appendTo = formatJson.getString("appendTo");
|
||||||
|
} else {
|
||||||
|
appendTo = name;
|
||||||
|
}
|
||||||
|
if (formatJson.containsKey("param")) {
|
||||||
|
params = formatJson.getString("param");
|
||||||
|
}
|
||||||
|
list.add(new String[]{name, appendTo, function, params});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
121
src/test/java/com/zdjizhi/json/OldSchemaTest.java
Normal file
121
src/test/java/com/zdjizhi/json/OldSchemaTest.java
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
package com.zdjizhi.json;
|
||||||
|
|
||||||
|
|
||||||
|
import com.alibaba.fastjson.JSON;
|
||||||
|
import com.alibaba.fastjson.JSONArray;
|
||||||
|
import com.alibaba.fastjson.JSONObject;
|
||||||
|
import com.alibaba.nacos.api.NacosFactory;
|
||||||
|
import com.alibaba.nacos.api.PropertyKeyConst;
|
||||||
|
import com.alibaba.nacos.api.config.ConfigService;
|
||||||
|
import com.alibaba.nacos.api.exception.NacosException;
|
||||||
|
import com.zdjizhi.common.FlowWriteConfig;
|
||||||
|
import com.geedgenetworks.utils.StringUtil;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Applicable to schemas < TSG22.08
|
||||||
|
*
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.nacos
|
||||||
|
* @Description:
|
||||||
|
* @date 2022/3/1714:57
|
||||||
|
*/
|
||||||
|
public class OldSchemaTest {
|
||||||
|
|
||||||
|
private static Properties properties = new Properties();
|
||||||
|
|
||||||
|
|
||||||
|
static {
|
||||||
|
properties.setProperty(PropertyKeyConst.SERVER_ADDR, "192.168.44.12:8848");
|
||||||
|
properties.setProperty(PropertyKeyConst.NAMESPACE, "prod");
|
||||||
|
properties.setProperty(PropertyKeyConst.USERNAME, "nacos");
|
||||||
|
properties.setProperty(PropertyKeyConst.PASSWORD, "nacos");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void oldSchemaTest() {
|
||||||
|
|
||||||
|
try {
|
||||||
|
ConfigService configService = NacosFactory.createConfigService(properties);
|
||||||
|
String dataId = "session_record.json";
|
||||||
|
String group = "Galaxy";
|
||||||
|
String schema = configService.getConfig(dataId, group, 5000);
|
||||||
|
|
||||||
|
ArrayList<String[]> oldJobList = getOldJobList(schema);
|
||||||
|
|
||||||
|
for (String[] job : oldJobList) {
|
||||||
|
System.out.println(Arrays.toString(job));
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (NacosException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 解析schema,解析之后返回一个任务列表 (useList toList funcList paramlist)
|
||||||
|
*
|
||||||
|
* @param schema 日志schema
|
||||||
|
* @return 任务列表
|
||||||
|
*/
|
||||||
|
private static ArrayList<String[]> getOldJobList(String schema) {
|
||||||
|
ArrayList<String[]> list = new ArrayList<>();
|
||||||
|
|
||||||
|
//获取fields,并转化为数组,数组的每个元素都是一个name doc type
|
||||||
|
JSONObject schemaJson = JSON.parseObject(schema);
|
||||||
|
JSONArray fields = (JSONArray) schemaJson.get("fields");
|
||||||
|
|
||||||
|
for (Object field : fields) {
|
||||||
|
|
||||||
|
if (JSON.parseObject(field.toString()).containsKey("doc")) {
|
||||||
|
Object doc = JSON.parseObject(field.toString()).get("doc");
|
||||||
|
|
||||||
|
if (JSON.parseObject(doc.toString()).containsKey("format")) {
|
||||||
|
String name = JSON.parseObject(field.toString()).get("name").toString();
|
||||||
|
Object format = JSON.parseObject(doc.toString()).get("format");
|
||||||
|
JSONObject formatObject = JSON.parseObject(format.toString());
|
||||||
|
|
||||||
|
String functions = formatObject.get("transform").toString();
|
||||||
|
String appendTo = null;
|
||||||
|
String params = null;
|
||||||
|
|
||||||
|
if (formatObject.containsKey("appendTo")) {
|
||||||
|
appendTo = formatObject.get("appendTo").toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (formatObject.containsKey("param")) {
|
||||||
|
params = formatObject.get("param").toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if (StringUtil.isNotBlank(appendTo) && StringUtil.isBlank(params)) {
|
||||||
|
String[] functionArray = functions.split(FlowWriteConfig.FORMAT_SPLITTER);
|
||||||
|
String[] appendToArray = appendTo.split(FlowWriteConfig.FORMAT_SPLITTER);
|
||||||
|
|
||||||
|
for (int i = 0; i < functionArray.length; i++) {
|
||||||
|
list.add(new String[]{name, appendToArray[i], functionArray[i], null});
|
||||||
|
}
|
||||||
|
|
||||||
|
} else if (StringUtil.isNotBlank(appendTo) && StringUtil.isNotBlank(params)) {
|
||||||
|
String[] functionArray = functions.split(FlowWriteConfig.FORMAT_SPLITTER);
|
||||||
|
String[] appendToArray = appendTo.split(FlowWriteConfig.FORMAT_SPLITTER);
|
||||||
|
String[] paramArray = params.split(FlowWriteConfig.FORMAT_SPLITTER);
|
||||||
|
|
||||||
|
for (int i = 0; i < functionArray.length; i++) {
|
||||||
|
list.add(new String[]{name, appendToArray[i], functionArray[i], paramArray[i]});
|
||||||
|
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
list.add(new String[]{name, name, functions, params});
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
}
|
||||||
63
src/test/java/com/zdjizhi/json/pojo/KnowlegeBaseMeta.java
Normal file
63
src/test/java/com/zdjizhi/json/pojo/KnowlegeBaseMeta.java
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
package com.zdjizhi.json.pojo;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.json
|
||||||
|
* @Description:
|
||||||
|
* @date 2023/5/1918:42
|
||||||
|
*/
|
||||||
|
public class KnowlegeBaseMeta {
|
||||||
|
private String name;
|
||||||
|
private String sha256;
|
||||||
|
private String format;
|
||||||
|
private String path;
|
||||||
|
|
||||||
|
public KnowlegeBaseMeta(String name, String sha256, String format, String path) {
|
||||||
|
this.name = name;
|
||||||
|
this.sha256 = sha256;
|
||||||
|
this.format = format;
|
||||||
|
this.path = path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSha256() {
|
||||||
|
return sha256;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSha256(String sha256) {
|
||||||
|
this.sha256 = sha256;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getFormat() {
|
||||||
|
return format;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setFormat(String format) {
|
||||||
|
this.format = format;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPath() {
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPath(String path) {
|
||||||
|
this.path = path;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "KnowlegeBaseMeta{" +
|
||||||
|
"name='" + name + '\'' +
|
||||||
|
", sha256='" + sha256 + '\'' +
|
||||||
|
", format='" + format + '\'' +
|
||||||
|
", path='" + path + '\'' +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
}
|
||||||
56
src/test/java/com/zdjizhi/json/pojo/UserList.java
Normal file
56
src/test/java/com/zdjizhi/json/pojo/UserList.java
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
package com.zdjizhi.json.pojo;
|
||||||
|
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.json.pojo
|
||||||
|
* @Description:
|
||||||
|
* @date 2023/5/2014:06
|
||||||
|
*/
|
||||||
|
public class UserList {
|
||||||
|
private String name;
|
||||||
|
private Integer age;
|
||||||
|
private Long idcard;
|
||||||
|
private List devicetag;
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getAge() {
|
||||||
|
return age;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAge(Integer age) {
|
||||||
|
this.age = age;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getIdcard() {
|
||||||
|
return idcard;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setIdcard(Long idcard) {
|
||||||
|
this.idcard = idcard;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List getDevicetag() {
|
||||||
|
return devicetag;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDevicetag(List devicetag) {
|
||||||
|
this.devicetag = devicetag;
|
||||||
|
}
|
||||||
|
|
||||||
|
public UserList(String name, Integer age, Long idcard, List devicetag) {
|
||||||
|
this.name = name;
|
||||||
|
this.age = age;
|
||||||
|
this.idcard = idcard;
|
||||||
|
this.devicetag = devicetag;
|
||||||
|
}
|
||||||
|
}
|
||||||
55
src/test/java/com/zdjizhi/json/pojo/UserMap.java
Normal file
55
src/test/java/com/zdjizhi/json/pojo/UserMap.java
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
package com.zdjizhi.json.pojo;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.json.pojo
|
||||||
|
* @Description:
|
||||||
|
* @date 2023/5/2014:06
|
||||||
|
*/
|
||||||
|
public class UserMap {
|
||||||
|
private String name;
|
||||||
|
private Integer age;
|
||||||
|
private Long idcard;
|
||||||
|
private Map devicetag;
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getAge() {
|
||||||
|
return age;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAge(Integer age) {
|
||||||
|
this.age = age;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Long getIdcard() {
|
||||||
|
return idcard;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setIdcard(Long idcard) {
|
||||||
|
this.idcard = idcard;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map getDevicetag() {
|
||||||
|
return devicetag;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDevicetag(Map devicetag) {
|
||||||
|
this.devicetag = devicetag;
|
||||||
|
}
|
||||||
|
|
||||||
|
public UserMap(String name, Integer age, Long idcard, Map devicetag) {
|
||||||
|
this.name = name;
|
||||||
|
this.age = age;
|
||||||
|
this.idcard = idcard;
|
||||||
|
this.devicetag = devicetag;
|
||||||
|
}
|
||||||
|
}
|
||||||
102
src/test/java/com/zdjizhi/nacos/NacosTest.java
Normal file
102
src/test/java/com/zdjizhi/nacos/NacosTest.java
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
package com.zdjizhi.nacos;
|
||||||
|
|
||||||
|
import com.alibaba.nacos.api.NacosFactory;
|
||||||
|
import com.alibaba.nacos.api.PropertyKeyConst;
|
||||||
|
import com.alibaba.nacos.api.config.ConfigService;
|
||||||
|
import com.alibaba.nacos.api.config.listener.Listener;
|
||||||
|
import com.alibaba.nacos.api.exception.NacosException;
|
||||||
|
import com.alibaba.nacos.client.config.impl.LocalConfigInfoProcessor;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.util.Properties;
|
||||||
|
import java.util.concurrent.Executor;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi
|
||||||
|
* @Description:
|
||||||
|
* @date 2022/3/1016:58
|
||||||
|
*/
|
||||||
|
public class NacosTest {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <dependency>
|
||||||
|
* <groupId>com.alibaba.nacos</groupId>
|
||||||
|
* <artifactId>nacos-client</artifactId>
|
||||||
|
* <version>1.2.0</version>
|
||||||
|
* </dependency>
|
||||||
|
*/
|
||||||
|
|
||||||
|
private static Properties properties = new Properties();
|
||||||
|
/**
|
||||||
|
* config data id = config name
|
||||||
|
*/
|
||||||
|
private static final String DATA_ID = "dos_detection.properties";
|
||||||
|
/**
|
||||||
|
* config group
|
||||||
|
*/
|
||||||
|
private static final String GROUP = "Galaxy";
|
||||||
|
|
||||||
|
private void getProperties() {
|
||||||
|
properties.setProperty(PropertyKeyConst.SERVER_ADDR, "192.168.44.241:8848");
|
||||||
|
properties.setProperty(PropertyKeyConst.NAMESPACE, "prod");
|
||||||
|
properties.setProperty(PropertyKeyConst.USERNAME, "nacos");
|
||||||
|
properties.setProperty(PropertyKeyConst.PASSWORD, "nacos");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void GetConfigurationTest() {
|
||||||
|
try {
|
||||||
|
getProperties();
|
||||||
|
ConfigService configService = NacosFactory.createConfigService(properties);
|
||||||
|
String content = configService.getConfig(DATA_ID, GROUP, 5000);
|
||||||
|
Properties nacosConfigMap = new Properties();
|
||||||
|
nacosConfigMap.load(new StringReader(content));
|
||||||
|
System.out.println(nacosConfigMap.getProperty("source.kafka.servers"));
|
||||||
|
System.out.println(content);
|
||||||
|
} catch (NacosException | IOException e) {
|
||||||
|
// TODO Auto-generated catch block
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void ListenerConfigurationTest() {
|
||||||
|
getProperties();
|
||||||
|
try {
|
||||||
|
//first get config
|
||||||
|
ConfigService configService = NacosFactory.createConfigService(properties);
|
||||||
|
String config = configService.getConfig(DATA_ID, GROUP, 5000);
|
||||||
|
System.out.println(config);
|
||||||
|
|
||||||
|
//start listenner
|
||||||
|
configService.addListener(DATA_ID, GROUP, new Listener() {
|
||||||
|
@Override
|
||||||
|
public Executor getExecutor() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void receiveConfigInfo(String configMsg) {
|
||||||
|
System.out.println(configMsg);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (NacosException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
|
||||||
|
//keep running,change nacos config,print new config
|
||||||
|
for (int i = 0; i < 3; i++) {
|
||||||
|
try {
|
||||||
|
Thread.sleep(5000);
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
117
src/test/java/com/zdjizhi/nacos/SchemaListenerTest.java
Normal file
117
src/test/java/com/zdjizhi/nacos/SchemaListenerTest.java
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
package com.zdjizhi.nacos;
|
||||||
|
|
||||||
|
import cn.hutool.json.JSONArray;
|
||||||
|
import cn.hutool.json.JSONObject;
|
||||||
|
import com.alibaba.nacos.api.NacosFactory;
|
||||||
|
import com.alibaba.nacos.api.PropertyKeyConst;
|
||||||
|
import com.alibaba.nacos.api.config.ConfigService;
|
||||||
|
import com.alibaba.nacos.api.config.listener.Listener;
|
||||||
|
import com.alibaba.nacos.api.exception.NacosException;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Properties;
|
||||||
|
import java.util.concurrent.Executor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author qidaijie
|
||||||
|
* @Package com.zdjizhi.nacos
|
||||||
|
* @Description:
|
||||||
|
* @date 2022/3/1714:57
|
||||||
|
*/
|
||||||
|
public class SchemaListenerTest {
|
||||||
|
|
||||||
|
private static Properties properties = new Properties();
|
||||||
|
private static ArrayList<String[]> jobList;
|
||||||
|
|
||||||
|
|
||||||
|
static {
|
||||||
|
properties.setProperty(PropertyKeyConst.SERVER_ADDR, "192.168.44.12:8848");
|
||||||
|
properties.setProperty(PropertyKeyConst.NAMESPACE, "prod");
|
||||||
|
properties.setProperty(PropertyKeyConst.USERNAME, "nacos");
|
||||||
|
properties.setProperty(PropertyKeyConst.PASSWORD, "nacos");
|
||||||
|
|
||||||
|
try {
|
||||||
|
ConfigService configService = NacosFactory.createConfigService(properties);
|
||||||
|
String dataId = "session_record.json";
|
||||||
|
String group = "Galaxy";
|
||||||
|
jobList = getJobListFromHttp(configService.getConfig(dataId, group, 5000));
|
||||||
|
System.out.println(jobList);
|
||||||
|
configService.addListener(dataId, group, new Listener() {
|
||||||
|
@Override
|
||||||
|
public Executor getExecutor() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void receiveConfigInfo(String configMsg) {
|
||||||
|
jobList = getJobListFromHttp(configMsg);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (NacosException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void dealCommonMessage() {
|
||||||
|
//keep running,change nacos config,print new config
|
||||||
|
for (int i = 0; i < 1; i++) {
|
||||||
|
try {
|
||||||
|
for (String[] job : jobList) {
|
||||||
|
System.out.println(Arrays.toString(job));
|
||||||
|
}
|
||||||
|
Thread.sleep(5000);
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 根据http链接获取schema,解析之后返回一个任务列表 (useList toList funcList paramlist)
|
||||||
|
*
|
||||||
|
* @return 任务列表
|
||||||
|
*/
|
||||||
|
private static ArrayList<String[]> getJobListFromHttp(String schema) {
|
||||||
|
ArrayList<String[]> list = new ArrayList<>();
|
||||||
|
|
||||||
|
JSONObject schemaJson = new JSONObject(schema, false, true);
|
||||||
|
JSONArray fields = schemaJson.getJSONArray("fields");
|
||||||
|
for (Object field : fields) {
|
||||||
|
JSONObject fieldJson = new JSONObject(field, false, true);
|
||||||
|
boolean hasDoc = fieldJson.containsKey("doc");
|
||||||
|
if (hasDoc) {
|
||||||
|
JSONObject docJson = fieldJson.getJSONObject("doc");
|
||||||
|
boolean hasFormat = docJson.containsKey("format");
|
||||||
|
if (hasFormat) {
|
||||||
|
String name = fieldJson.getStr("name");
|
||||||
|
JSONArray formatList = docJson.getJSONArray("format");
|
||||||
|
for (Object format : formatList) {
|
||||||
|
JSONObject formatJson = new JSONObject(format, false, true);
|
||||||
|
String function = formatJson.getStr("function");
|
||||||
|
String appendTo = null;
|
||||||
|
String params = null;
|
||||||
|
|
||||||
|
if (formatJson.containsKey("appendTo")) {
|
||||||
|
appendTo = formatJson.getStr("appendTo");
|
||||||
|
} else {
|
||||||
|
appendTo = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (formatJson.containsKey("param")) {
|
||||||
|
params = formatJson.getStr("param");
|
||||||
|
}
|
||||||
|
|
||||||
|
list.add(new String[]{name, appendTo, function, params});
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
}
|
||||||
8
src/test/java/com/zdjizhi/testingDataset/gtpc
Normal file
8
src/test/java/com/zdjizhi/testingDataset/gtpc
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{"common_schema_type":"GTP-C","gtp_version":"v2","gtp_msg_type":"delete","gtp_downlink_teid":392955378,"gtp_uplink_teid":579533063,"gtp_phone_number":"8617239723344","gtp_imei":"86989005126503","gtp_imsi":"460077015061987","common_direction":69,"common_l7_protocol":"UNCATEGORIZED","common_app_label":"gtpv2","common_app_id":{"THIRD":[{"app_name":"gtpv2","app_id":735,"surrogate_id":0,"packet_sequence":1}]},"common_app_identify_info":[{"app_name":"gtpv2","packet_sequence":1}],"common_server_ip":"172.50.0.100","common_client_ip":"172.50.0.1","common_server_port":2123,"common_client_port":12564,"common_stream_dir":2,"common_address_type":4,"common_address_list":"12564-2123-172.50.0.1-172.50.0.100","common_start_time":1663752297,"common_end_time":1663752711,"common_con_duration_ms":305,"common_s2c_pkt_num":107,"common_s2c_byte_num":9951,"common_c2s_pkt_num":0,"common_c2s_byte_num":0,"common_client_location":"United States.Other.Other","common_server_location":"United States.Other.Other","common_stream_trace_id":"579013265830096219","common_l4_protocol":"IPv4_UDP","common_sled_ip":"192.168.40.161","common_device_id":"unknown","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","common_vsys_id":1,"common_policy_id":0,"common_service":8,"common_action":0}
|
||||||
|
|
||||||
|
{"common_schema_type":"GTP-C","gtp_version":"v2","gtp_msg_type":"modify","gtp_downlink_teid":247749709,"gtp_uplink_teid":226022706,"gtp_phone_number":"8615859199999","gtp_imei":"86941104639999","gtp_imsi":"460028591549999","common_direction":69,"common_l7_protocol":"UNCATEGORIZED","common_app_label":"gtpv2","common_app_id":{"THIRD":[{"app_name":"gtpv2","app_id":735,"surrogate_id":0,"packet_sequence":1}]},"common_app_identify_info":[{"app_name":"gtpv2","packet_sequence":1}],"common_server_ip":"172.50.0.100","common_client_ip":"172.50.0.1","common_server_port":2123,"common_client_port":12564,"common_stream_dir":2,"common_address_type":4,"common_address_list":"12564-2123-172.50.0.1-172.50.0.100","common_start_time":1663752297,"common_end_time":1663752711,"common_con_duration_ms":305,"common_s2c_pkt_num":107,"common_s2c_byte_num":9951,"common_c2s_pkt_num":0,"common_c2s_byte_num":0,"common_client_location":"United States.Other.Other","common_server_location":"United States.Other.Other","common_stream_trace_id":"579013265830096219","common_l4_protocol":"IPv4_UDP","common_sled_ip":"192.168.40.161","common_device_id":"unknown","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","common_vsys_id":1,"common_policy_id":0,"common_service":8,"common_action":0,"common_vsys_id":2}
|
||||||
|
|
||||||
|
{"common_schema_type":"GTP-C","gtp_version":"v2","gtp_msg_type":"delete","gtp_downlink_teid":247749709,"gtp_uplink_teid":226022706,"gtp_phone_number":"8615859199999","gtp_imei":"86941104639999","gtp_imsi":"460028591549999","common_direction":69,"common_l7_protocol":"UNCATEGORIZED","common_app_label":"gtpv2","common_app_id":{"THIRD":[{"app_name":"gtpv2","app_id":735,"surrogate_id":0,"packet_sequence":1}]},"common_app_identify_info":[{"app_name":"gtpv2","packet_sequence":1}],"common_server_ip":"172.50.0.100","common_client_ip":"172.50.0.1","common_server_port":2123,"common_client_port":12564,"common_stream_dir":2,"common_address_type":4,"common_address_list":"12564-2123-172.50.0.1-172.50.0.100","common_start_time":1663752297,"common_end_time":1663753228,"common_con_duration_ms":305,"common_s2c_pkt_num":107,"common_s2c_byte_num":9951,"common_c2s_pkt_num":0,"common_c2s_byte_num":0,"common_client_location":"United States.Other.Other","common_server_location":"United States.Other.Other","common_stream_trace_id":"579013265830096219","common_l4_protocol":"IPv4_UDP","common_sled_ip":"192.168.40.161","common_device_id":"unknown","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","common_vsys_id":1,"common_policy_id":0,"common_service":8,"common_action":0,"common_vsys_id":3}
|
||||||
|
|
||||||
|
{"common_schema_type":"GTP-C","gtp_version":"v2","gtp_msg_type":"modify","gtp_downlink_teid":247749709,"gtp_uplink_teid":226022706,"gtp_phone_number":"8615859199999","gtp_imei":"86941104639999","gtp_imsi":"460028591549999","common_direction":69,"common_l7_protocol":"UNCATEGORIZED","common_app_label":"gtpv2","common_app_id":{"THIRD":[{"app_name":"gtpv2","app_id":735,"surrogate_id":0,"packet_sequence":1}]},"common_app_identify_info":[{"app_name":"gtpv2","packet_sequence":1}],"common_server_ip":"172.50.0.100","common_client_ip":"172.50.0.1","common_server_port":2123,"common_client_port":12564,"common_stream_dir":2,"common_address_type":4,"common_address_list":"12564-2123-172.50.0.1-172.50.0.100","common_start_time":1663752297,"common_end_time":1663752711,"common_con_duration_ms":305,"common_s2c_pkt_num":107,"common_s2c_byte_num":9951,"common_c2s_pkt_num":0,"common_c2s_byte_num":0,"common_client_location":"United States.Other.Other","common_server_location":"United States.Other.Other","common_stream_trace_id":"579013265830096219","common_l4_protocol":"IPv4_UDP","common_sled_ip":"192.168.40.161","common_device_id":"unknown","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","common_vsys_id":1,"common_policy_id":0,"common_service":8,"common_action":0,"common_vsys_id":4}
|
||||||
|
{"common_schema_type":"GTP-C","gtp_version":"v2","gtp_msg_type":"modify","gtp_downlink_teid":247749709,"gtp_uplink_teid":226022706,"gtp_phone_number":"8615859199999","gtp_imei":"86941104639999","gtp_imsi":"460028591549999","common_direction":69,"common_l7_protocol":"UNCATEGORIZED","common_app_label":"gtpv2","common_app_id":{"THIRD":[{"app_name":"gtpv2","app_id":735,"surrogate_id":0,"packet_sequence":1}]},"common_app_identify_info":[{"app_name":"gtpv2","packet_sequence":1}],"common_server_ip":"172.50.0.100","common_client_ip":"172.50.0.1","common_server_port":2123,"common_client_port":12564,"common_stream_dir":2,"common_address_type":4,"common_address_list":"12564-2123-172.50.0.1-172.50.0.100","common_start_time":1663752297,"common_end_time":1663752711,"common_con_duration_ms":305,"common_s2c_pkt_num":107,"common_s2c_byte_num":9951,"common_c2s_pkt_num":0,"common_c2s_byte_num":0,"common_client_location":"United States.Other.Other","common_server_location":"United States.Other.Other","common_stream_trace_id":"579013265830096219","common_l4_protocol":"IPv4_UDP","common_sled_ip":"192.168.40.161","common_device_id":"unknown","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","common_vsys_id":1,"common_policy_id":0,"common_service":8,"common_action":0,"common_vsys_id":3}
|
||||||
7
src/test/java/com/zdjizhi/testingDataset/radius
Normal file
7
src/test/java/com/zdjizhi/testingDataset/radius
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{"common_stream_dir":1,"common_address_type":4,"common_client_ip":"192.168.50.26","common_server_ip":"192.168.40.190","common_client_port":62228,"common_server_port":1813,"common_c2s_pkt_num":1,"common_s2c_pkt_num":0,"common_c2s_byte_num":52,"common_s2c_byte_num":0,"common_start_time":1663754402,"common_end_time":1663754402,"common_con_duration_ms":0,"common_stream_trace_id":107327506993450,"common_l4_protocol":"IPv4_UDP","common_address_list":"62228-1813-192.168.50.26-192.168.40.190","common_policy_id":0,"common_service":162,"common_sled_ip":"192.168.40.161","common_schema_type":"RADIUS","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","radius_packet_type":4,"radius_account":"test1","radius_acct_status_type":1,"radius_acct_session_id":"10964","radius_framed_ip":"192.168.50.26","radius_event_timestamp":1663580387}
|
||||||
|
{"common_stream_dir":1,"common_address_type":4,"common_client_ip":"192.168.50.26","common_server_ip":"192.168.40.190","common_client_port":62229,"common_server_port":1813,"common_c2s_pkt_num":1,"common_s2c_pkt_num":0,"common_c2s_byte_num":52,"common_s2c_byte_num":0,"common_start_time":1663754402,"common_end_time":1663754402,"common_con_duration_ms":0,"common_stream_trace_id":107327506993452,"common_l4_protocol":"IPv4_UDP","common_address_list":"62229-1813-192.168.50.26-192.168.40.190","common_policy_id":0,"common_service":162,"common_sled_ip":"192.168.40.161","common_schema_type":"RADIUS","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","radius_packet_type":4,"radius_account":"test2","radius_acct_status_type":1,"radius_acct_session_id":"10964","radius_framed_ip":"192.168.50.16","radius_event_timestamp":1663580387,"common_vsys_id":3}
|
||||||
|
{"common_stream_dir":1,"common_address_type":4,"common_client_ip":"192.168.50.26","common_server_ip":"192.168.40.190","common_client_port":62229,"common_server_port":1813,"common_c2s_pkt_num":1,"common_s2c_pkt_num":0,"common_c2s_byte_num":52,"common_s2c_byte_num":0,"common_start_time":1663754402,"common_end_time":1663754402,"common_con_duration_ms":0,"common_stream_trace_id":107327506993452,"common_l4_protocol":"IPv4_UDP","common_address_list":"62229-1813-192.168.50.26-192.168.40.190","common_policy_id":0,"common_service":162,"common_sled_ip":"192.168.40.161","common_schema_type":"RADIUS","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","radius_packet_type":4,"radius_account":"test2","radius_acct_status_type":1,"radius_acct_session_id":"10964","radius_framed_ip":"192.168.50.16","radius_event_timestamp":1663580387,"common_vsys_id":4}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
{"common_stream_dir":1,"common_address_type":4,"common_client_ip":"192.168.50.26","common_server_ip":"192.168.40.190","common_client_port":62229,"common_server_port":1813,"common_c2s_pkt_num":1,"common_s2c_pkt_num":0,"common_c2s_byte_num":52,"common_s2c_byte_num":0,"common_start_time":1663754402,"common_end_time":1663754653,"common_con_duration_ms":0,"common_stream_trace_id":107327506993452,"common_l4_protocol":"IPv4_UDP","common_address_list":"62229-1813-192.168.50.26-192.168.40.190","common_policy_id":0,"common_service":162,"common_sled_ip":"192.168.40.161","common_schema_type":"RADIUS","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","radius_packet_type":4,"radius_account":"test2","radius_acct_status_type":2,"radius_acct_session_id":"10964","radius_framed_ip":"192.168.50.16","radius_event_timestamp":1663580387,"common_vsys_id":3}
|
||||||
2
src/test/java/com/zdjizhi/testingDataset/session
Normal file
2
src/test/java/com/zdjizhi/testingDataset/session
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
{"common_schema_type":"HTTP","common_sessions":1,"http_request_line":"GET sampleFile.html HTTP/1.1","http_host":"www.texaslotto.com","http_url":"www.texaslotto.com/sampleFile.html","http_user_agent":"xPTS/2.0","http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html","http_isn":1953597368,"http_proxy_flag":0,"http_version":"http1","http_response_latency_ms":1,"http_session_duration_ms":2,"http_sequence":80,"common_protocol_label":"ETHERNET.IPv4.UDP.GTP.IPv4.TCP","common_c2s_byte_diff":17110,"common_c2s_pkt_diff":119,"common_s2c_byte_diff":16490,"common_s2c_pkt_diff":81,"common_c2s_ipfrag_num":0,"common_s2c_ipfrag_num":0,"common_first_ttl":64,"common_c2s_tcp_unorder_num":0,"common_s2c_tcp_unorder_num":0,"common_c2s_tcp_lostlen":0,"common_s2c_tcp_lostlen":0,"common_c2s_pkt_retrans":240,"common_s2c_pkt_retrans":162,"common_c2s_byte_retrans":12800,"common_s2c_byte_retrans":18400,"common_direction":69,"common_l7_protocol":"HTTP","common_app_label":"unknown","common_app_id":{"UNKNOWN":[{"app_name":"unknown","app_id":4,"surrogate_id":0,"packet_sequence":29}]},"common_app_identify_info":[{"app_name":"unknown","packet_sequence":29}],"common_tcp_client_isn":1953597368,"common_tcp_server_isn":1950649408,"common_server_ip":"10.201.35.10","common_client_ip":"1.1.1.27","common_server_port":80,"common_client_port":2000,"common_stream_dir":3,"common_address_type":4,"common_address_list":"IPv4_TCP<2000-80-1.1.1.27-10.201.35.10>|GTP<111534000-665547833>|IPv4_UDP<2152-2152-192.56.5.2-192.56.10.20>|MAC<000c299b2fa4-000c2915b4f4>","common_start_time":1660272209,"common_end_time":1660272424,"common_con_duration_ms":215201,"common_s2c_pkt_num":243,"common_s2c_byte_num":49470,"common_c2s_pkt_num":360,"common_c2s_byte_num":51600,"common_establish_latency_ms":1,"common_client_location":"china1.beijing.beijing1","common_tunnels":[{"tunnels_schema_type":"GTP","gtp_endpoint_a2b_teid":247749709,"gtp_endpoint_b2a_teid":665547833,"gtp_sgw_ip":"192.56.5.2","gtp_pgw_ip":"192.56.10.20","gtp_sgw_port":2152,"gtp_pgw_port":2152},{"tunnels_schema_type":"MULTIPATH_ETHERNET","c2s_source_mac":"00:0c:29:9b:2f:a4","c2s_destination_mac":"00:0c:29:15:b4:f4","s2c_source_mac":"00:0c:29:15:b4:f4","s2c_destination_mac":"00:0c:29:9b:2f:a4"}],"common_stream_trace_id":"869231578438992199","common_l4_protocol":"IPv4_TCP","common_sled_ip":"192.168.40.81","common_device_id":"21426003","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg-three\"},{\"tag\":\"data_center\",\"value\":\"center-xxg-three\"}]}","common_policy_id":0,"common_service":2,"common_action":0}
|
||||||
|
{"common_schema_type":"HTTP","common_sessions":1,"http_request_line":"GET sampleFile.html HTTP/1.1","http_host":"www.texaslotto.com","http_url":"www.texaslotto.com/sampleFile.html","http_user_agent":"xPTS/2.0","http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html","http_isn":1953597368,"http_proxy_flag":0,"http_version":"http1","http_response_latency_ms":1,"http_session_duration_ms":2,"http_sequence":80,"common_protocol_label":"ETHERNET.IPv4.UDP.GTP.IPv4.TCP","common_c2s_byte_diff":17110,"common_c2s_pkt_diff":119,"common_s2c_byte_diff":16490,"common_s2c_pkt_diff":81,"common_c2s_ipfrag_num":0,"common_s2c_ipfrag_num":0,"common_first_ttl":64,"common_c2s_tcp_unorder_num":0,"common_s2c_tcp_unorder_num":0,"common_c2s_tcp_lostlen":0,"common_s2c_tcp_lostlen":0,"common_c2s_pkt_retrans":240,"common_s2c_pkt_retrans":162,"common_c2s_byte_retrans":12800,"common_s2c_byte_retrans":18400,"common_direction":69,"common_l7_protocol":"HTTP","common_app_label":"unknown","common_app_id":{"UNKNOWN":[{"app_name":"unknown","app_id":4,"surrogate_id":0,"packet_sequence":29}]},"common_app_identify_info":[{"app_name":"unknown","packet_sequence":29}],"common_tcp_client_isn":1953597368,"common_tcp_server_isn":1950649408,"common_server_ip":"10.201.35.10","common_client_ip":"1.1.1.27","common_server_port":80,"common_client_port":2000,"common_stream_dir":3,"common_address_type":4,"common_address_list":"IPv4_TCP<2000-80-1.1.1.27-10.201.35.10>|GTP<111534000-665547833>|IPv4_UDP<2152-2152-192.56.5.2-192.56.10.20>|MAC<000c299b2fa4-000c2915b4f4>","common_start_time":1660272209,"common_end_time":1660272424,"common_con_duration_ms":215201,"common_s2c_pkt_num":243,"common_s2c_byte_num":49470,"common_c2s_pkt_num":360,"common_c2s_byte_num":51600,"common_establish_latency_ms":1,"common_client_location":"china1.beijing.beijing1","common_tunnels":[{"tunnels_schema_type":"GTP","gtp_endpoint_a2b_teid":247749709,"gtp_endpoint_b2a_teid":665547833,"gtp_sgw_ip":"192.56.5.2","gtp_pgw_ip":"192.56.10.20","gtp_sgw_port":2152,"gtp_pgw_port":2152},{"tunnels_schema_type":"MULTIPATH_ETHERNET","c2s_source_mac":"00:0c:29:9b:2f:a4","c2s_destination_mac":"00:0c:29:15:b4:f4","s2c_source_mac":"00:0c:29:15:b4:f4","s2c_destination_mac":"00:0c:29:9b:2f:a4"}],"common_stream_trace_id":"869231578438992199","common_l4_protocol":"IPv4_TCP","common_sled_ip":"192.168.40.81","common_device_id":"21426003","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg-three\"},{\"tag\":\"data_center\",\"value\":\"center-xxg-three\"}]}","common_policy_id":0,"common_service":2,"common_action":0,"common_vsys_id":4}
|
||||||
Reference in New Issue
Block a user