7 Commits

Author SHA1 Message Date
wangchengcheng
c50e2436bc feat:change common_action value filling logic 2024-01-22 10:16:56 +08:00
wangchengcheng
68b4805c4f feat:Adapt to version 24.01 session-record 2024-01-17 20:02:27 +08:00
wangchengcheng
5c0a108393 1.适配TSG 23.07及以上功能,添加数据传输统计指标,并输出至pushgateway。(GAL-409)
2.原URL参数domain从http_domain字段取值,更新为从common_server_domain字段取值。(GAL-410)
2023-09-28 15:59:26 +08:00
wangchengcheng
c3ad8140e8 1.适配百分点OSS V3接口(GAL-384)。
2.将FileMeta的属性sourceList改为source_list。
2023-08-07 18:09:09 +08:00
wangchengcheng
8c94028363 补充pom.xml 2022-09-27 10:47:30 +08:00
wangchengcheng
d6226fef5c 根据04版补全程序更新P19双写程序。 2022-06-17 16:54:38 +08:00
wangchengcheng
935dcfa702 根据04版补全程序更新P19双写程序。 2022-06-17 16:53:16 +08:00
66 changed files with 6576 additions and 0 deletions

295
pom.xml Normal file
View File

@@ -0,0 +1,295 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.zdjizhi</groupId>
<artifactId>log-completion-schema</artifactId>
<version>230907</version>
<name>log-completion-schema</name>
<url>http://www.example.com</url>
<repositories>
<repository>
<id>nexus</id>
<name>Team Nexus Repository</name>
<url>http://192.168.40.153:8099/content/groups/public</url>
</repository>
<repository>
<id>maven-ali</id>
<url>http://maven.aliyun.com/nexus/content/groups/public/</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
<checksumPolicy>fail</checksumPolicy>
</snapshots>
</repository>
</repositories>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<galaxy.tools.version>1.2</galaxy.tools.version>
<zookeeper.version>3.4.10</zookeeper.version>
<flink.version>1.13.1</flink.version>
<hadoop.version>2.7.1</hadoop.version>
<kafka.version>1.0.0</kafka.version>
<hbase.version>2.2.3</hbase.version>
<nacos.version>1.2.0</nacos.version>
<fastjson.version>2.0.40</fastjson.version>
<hutool.version>5.7.17</hutool.version>
<scope.type>provided</scope.type>
<!--<scope.type>compile</scope.type>-->
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.2</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<transformers>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>com.zdjizhi.topology.LogFlowWriteTopology</mainClass>
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>io.github.zlika</groupId>
<artifactId>reproducible-build-maven-plugin</artifactId>
<version>0.2</version>
<executions>
<execution>
<goals>
<goal>strip-jar</goal>
</goals>
<phase>package</phase>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
<resources>
<resource>
<directory>properties</directory>
<includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
</includes>
<filtering>false</filtering>
</resource>
<resource>
<directory>src\main\java</directory>
<includes>
<include>log4j.properties</include>
</includes>
<filtering>false</filtering>
</resource>
</resources>
</build>
<dependencies>
<dependency>
<groupId>com.geedgenetworks</groupId>
<artifactId>galaxy</artifactId>
<version>${galaxy.tools.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>${fastjson.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-core -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-core</artifactId>
<version>${flink.version}</version>
<scope>${scope.type}</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-streaming-java -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.12</artifactId>
<version>${flink.version}</version>
<scope>${scope.type}</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-clients -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.12</artifactId>
<version>${flink.version}</version>
<scope>${scope.type}</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-connector-kafka -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_2.12</artifactId>
<version>${flink.version}</version>
<!--<scope>${scope.type}</scope>-->
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-java -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
<scope>${scope.type}</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper -->
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<version>${zookeeper.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-over-slf4j</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-client -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>${hbase.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-over-slf4j</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-over-slf4j</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-over-slf4j</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>cglib</groupId>
<artifactId>cglib-nodep</artifactId>
<version>3.2.4</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>5.3.2</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5.2</version>
</dependency>
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-all</artifactId>
<version>${hutool.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/com.alibaba.nacos/nacos-client -->
<dependency>
<groupId>com.alibaba.nacos</groupId>
<artifactId>nacos-client</artifactId>
<version>${nacos.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.jasypt/jasypt -->
<dependency>
<groupId>org.jasypt</groupId>
<artifactId>jasypt</artifactId>
<version>1.9.3</version>
</dependency>
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient_pushgateway</artifactId>
<version>0.9.0</version>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,35 @@
none=0
Monitor=1
monitor=1
Intercept=2
intercept=2
NoIntercept=3
nointercept=3
ActiveDefence=4
activedefence=4
WANNAT=8
wannat=8
Reject=16
reject=16
Deny=16
deny=16
Shaping=32
shaping=32
Manipulate=48
manipulate=48
ServiceChaining=64
servicechaining=64
Allow=96
allow=96
Bypass=96
bypass=96
Shunt=128
shunt=128
Statistics=129
statistics=129
redirect=48
replace=48
hijack=48
insert=48
edit_element=48
run_script=48

View File

@@ -0,0 +1,103 @@
#====================Kafka KafkaConsumer====================#
#kafka source connection timeout
session.timeout.ms=60000
#kafka source poll
max.poll.records=5000
#kafka source poll bytes
max.partition.fetch.bytes=31457280
#====================Kafka KafkaProducer====================#
#producer重试的次数设置
retries=0
#他的含义就是说一个Batch被创建之后最多过多久不管这个Batch有没有写满都必须发送出去了
linger.ms=10
#如果在超时之前未收到响应,客户端将在必要时重新发送请求
request.timeout.ms=30000
#producer都是按照batch进行发送的,批次大小,默认:16384
batch.size=1048576
#Producer端用于缓存消息的缓冲区大小
#128M
buffer.memory=134217728
#这个参数决定了每次发送给Kafka服务器请求的最大大小
#default: 10485760 = 10M
max.request.size=10485760
#producer ack
producer.ack=1
#====================kafka default====================#
#kafka SASL/SSL username (encryption)
kafka.user=nsyGpHKGFA4KW0zro9MDdw==
#kafka SASL/SSL pin (encryption)
kafka.pin=6MleDyA3Z73HSaXiKsDJ2k7Ys8YWLhEJ
#====================nacos default====================#
#nacos username (encryption)
nacos.username=kANxu/Zi5rBnZVxa5zAjrQ==
#nacos pin (encryption)
nacos.pin=YPIBDIXjJUtVBjjk2op0Dg==
#nacos connection timeout default 60s
nacos.connection.timeout=60000
#nacos table schema use group
nacos.schema.group=Galaxy
#nacos public use group
nacos.public.group=DEFAULT_GROUP
#public namespace名称
nacos.public.namespace=
#knowledgebase data id名称
nacos.knowledgebase.data.id=knowledge_base.json
#================= HTTP 配置 ====================#
#max connection
http.pool.max.connection=20
#one route max connection
http.pool.max.per.route=4
#connect timeout(ms)
http.pool.connect.timeout=30000
#request timeout(ms)
http.pool.request.timeout=90000
#response timeout(ms)
http.socket.timeout=90000
#====================HBase Default conf====================#
hbase.rpc.timeout=60000
#====================Topology Default====================#
#hbase radius relation table name
hbase.radius.table.name=tsg_galaxy:relation_framedip_account
#hbase gtpc relation table name
hbase.gtpc.table.name=tsg_galaxy:relation_user_teid
#0 no-operation parse JSON directly.
#1 Check fields type with schema,Do some type conversion.
log.transform.type=1
#Maximum time between two outputs(milliseconds)
buffer.timeout=-1
#The gtpc data scan max rows,0 = no limit.
hbase.gtpc.scan.max.rows=100000
#The radius data scan max rows,0 = no limit.
hbase.radius.scan.max.rows=100000
#Whether vsys_id is used as the relationship key between gtpc and radius.
#vsys or global
data.relationship.model=vsys

View File

@@ -0,0 +1,5 @@
txt
html
eml
jpg
png

View File

@@ -0,0 +1,68 @@
recv_time=common_recv_time
log_id=common_log_id
start_timestamp_ms=common_start_timestamp_ms
end_timestamp_ms=common_end_timestamp_ms
processing_time=common_processing_time
device_id=common_device_id
data_center=common_data_center
sled_ip=common_sled_ip
device_tag=common_device_tag
client_ip=common_client_ip
client_port=common_client_port
client_asn=common_client_asn
subscriber_id=common_subscriber_id
imei=common_imei
imsi=common_imsi
phone_number=common_phone_number
server_ip=common_server_ip
server_port=common_server_port
server_asn=common_server_asn
address_type=common_address_type
http_url=http_url
http_host=http_host
http_request_line=http_request_line
http_response_line=http_response_line
http_request_body=http_request_body
http_response_body=http_response_body
http_cookie=http_cookie
http_referer=http_referer
http_user_agent=http_user_agent
http_request_content_length=http_request_content_length
http_request_content_type=http_request_content_type
http_response_content_length=http_response_content_length
http_response_content_type=http_response_content_type
http_set_cookie=http_set_cookie
http_version=http_version
http_action_file_size=http_action_file_size
doh_url=doh_url
doh_host=doh_host
doh_cookie=doh_cookie
doh_referer=doh_referer
doh_user_agent=doh_user_agent
doh_version=doh_version
doh_message_id=doh_message_id
doh_qr=doh_qr
doh_opcode=doh_opcode
doh_aa=doh_aa
doh_tc=doh_tc
doh_rd=doh_rd
doh_ra=doh_ra
doh_rcode=doh_rcode
doh_qdcount=doh_qdcount
doh_ancount=doh_ancount
doh_nscount=doh_nscount
doh_arcount=doh_arcount
doh_qname=doh_qname
doh_qtype=doh_qtype
doh_qclass=doh_qclass
doh_cname=doh_cname
doh_sub=doh_sub
doh_rr=doh_rr
client_geolocation=common_client_location
server_geolocation=common_server_location
ip_protocol=common_l4_protocol
sent_bytes=common_c2s_byte_num
received_bytes=common_s2c_byte_num
decoded_as=common_schema_type
proxy_rule_list=proxy_rule_list
session_id=common_stream_trace_id

View File

@@ -0,0 +1,101 @@
recv_time=common_recv_time
log_id=common_log_id
start_timestamp_ms=common_start_timestamp_ms
end_timestamp_ms=common_end_timestamp_ms
duration_ms=common_con_duration_ms
processing_time=common_processing_time
device_id=common_device_id
data_center=common_data_center
sled_ip=common_sled_ip
device_tag=common_device_tag
client_ip=common_client_ip
client_port=common_client_port
client_asn=common_client_asn
subscriber_id=common_subscriber_id
imei=common_imei
imsi=common_imsi
phone_number=common_phone_number
server_ip=common_server_ip
server_port=common_server_port
server_asn=common_server_asn
address_type=common_address_type
http_url=http_url
http_host=http_host
http_request_body=http_request_body
http_response_body=http_response_body
http_proxy_flag=http_proxy_flag
http_sequence=http_sequence
http_cookie=http_cookie
http_referer=http_referer
http_user_agent=http_user_agent
http_request_content_length=http_request_content_length
http_request_content_type=http_request_content_type
http_response_content_length=http_response_content_length
http_response_content_type=http_response_content_type
http_set_cookie=http_set_cookie
http_version=http_version
http_response_latency_ms=http_response_latency_ms
http_session_duration_ms=http_session_duration_ms
mail_protocol_type=mail_protocol_type
mail_account=mail_account
mail_password=mail_passwd
mail_from_cmd=mail_from_cmd
mail_to_cmd=mail_to_cmd
mail_from=mail_from
mail_to=mail_to
mail_cc=mail_cc
mail_bcc=mail_bcc
mail_subject=mail_subject
mail_subject_charset=mail_subject_charset
mail_attachment_name=mail_attachment_name
mail_attachment_name_charset=mail_attachment_name_charset
mail_eml_file=mail_eml_file
dns_message_id=dns_message_id
dns_qr=dns_qr
dns_opcode=dns_opcode
dns_aa=dns_aa
dns_tc=dns_tc
dns_rd=dns_rd
dns_ra=dns_ra
dns_rcode=dns_rcode
dns_qdcount=dns_qdcount
dns_ancount=dns_ancount
dns_nscount=dns_nscount
dns_arcount=dns_arcount
dns_qname=dns_qname
dns_qtype=dns_qtype
dns_qclass=dns_qclass
dns_cname=dns_cname
dns_sub=dns_sub
dns_rr=dns_rr
quic_version=quic_version
quic_sni=quic_sni
quic_user_agent=quic_user_agent
ftp_account=ftp_account
ftp_url=ftp_url
ftp_link_type=ftp_link_type
out_link_id=common_egress_link_id
in_link_id=common_ingress_link_id
client_geolocation=common_client_location
server_geolocation=common_server_location
app=common_app_label
ip_protocol=common_l4_protocol
sent_pkts=common_c2s_pkt_num
received_pkts=common_s2c_pkt_num
sent_bytes=common_c2s_byte_num
received_bytes=common_s2c_byte_num
tcp_client_isn=common_tcp_client_isn
tcp_server_isn=common_tcp_server_isn
decoded_as=common_schema_type
session_id=common_stream_trace_id
ssl_version=ssl_version
ssl_sni=ssl_sni
ssl_san=ssl_san
ssl_cn=ssl_cn
ssl_handshake_latency_ms=ssl_con_latency_ms
ssl_ja3_hash=ssl_ja3_hash
ssl_cert_issuer=ssl_cert_issuer
ssl_cert_subject=ssl_cert_subject
security_rule_list=security_rule_list
monitor_rule_list=monitor_rule_list
tcp_handshake_latency_ms=common_establish_latency_ms

View File

@@ -0,0 +1,79 @@
#--------------------------------地址配置------------------------------#
#管理kafka地址
source.kafka.servers=192.168.44.12:9094
#source.kafka.servers=192.168.44.11:9094,192.168.44.14:9094,192.168.44.15:9094
#百分点输出kafka地址
percent.sink.kafka.servers=192.168.44.12:9094
#文件源数据topic输出kafka地址
sink.file.data.kafka.servers=192.168.44.12:9094
#用于分配log_id、连接hbase的zookeeper地址
zookeeper.servers=192.168.44.12:2181
#hdfs地址用于获取定位库
hdfs.servers=192.168.40.151:9000,192.168.40.152:9000
#--------------------------------HTTP/定位库------------------------------#
#工具库地址,存放秘钥文件等。
tools.library=D:\\workerspace\\dat\\
#--------------------------------nacos配置------------------------------#
#nacos 地址
nacos.server=192.168.44.12:8848
#schema namespace名称
nacos.schema.namespace=P19
#schema data id名称
nacos.schema.data.id=proxy_event.json
#--------------------------------Kafka消费/生产配置------------------------------#
#kafka 接收数据topic
source.kafka.topic=SESSION-RECORD
sink.percent.kafka.topic=PERCENT-RECORD
sink.file.data.kafka.topic=test-file-data
#读取topic,存储该spout id的消费offset信息可通过该拓扑命名;具体存储offset的位置确定下次读取不重复的数据
group.id=session-record-log-20211114-test-2
#--------------------------------topology配置------------------------------#
#consumer 并行度
source.parallelism=1
#转换函数并行度
transform.parallelism=1
deal.file.parallelism=1
sink.file.data.parallelism=1
sink.percent.parallelism=1
#数据中心,取值范围(0-31)
data.center.id.num=0
#hbase 更新时间如填写0则不更新缓存
hbase.tick.tuple.freq.secs=180
#--------------------------------默认值配置------------------------------#
#生产者压缩模式 none or snappy
producer.kafka.compression.type=snappy
#------------------------------------OOS配置------------------------------------#
#oos地址
oos.servers=10.3.45.100:8057
#prometheus-httpserver
prometheus.pushgateway.address=192.168.44.12:9091
pushgateway.statistics.time=300
deal.file.statistics.time=60
#------------------------------------knowledge配置------------------------------------#
knowledge.execution.minutes=600
knowledge.base.uri=http://192.168.44.12:9999
knowledge.base.path=/v1/knowledge_base
ip.user.defined.kd.id=004390bc-3135-4a6f-a492-3662ecb9e289
ip.builtin.kd.id=64af7077-eb9b-4b8f-80cf-2ceebc89bea9
asn.builtin.kd.id=f9f6bc91-2142-4673-8249-e097c00fe1ea
hos.url=http://192.168.44.12:9098/hos/traffic_file_bucket/

View File

@@ -0,0 +1,81 @@
package com.zdjizhi.common;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.api.config.listener.Listener;
import com.alibaba.nacos.api.exception.NacosException;
import com.geedgenetworks.utils.StringUtil;
import com.zdjizhi.tools.connections.nacos.NacosConnection;
import com.zdjizhi.tools.general.ConfigurationsUtils;
import java.io.IOException;
import java.io.StringReader;
import java.util.Properties;
import java.util.concurrent.Executor;
/**
* @author qidaijie
* @Package com.zdjizhi.common
* @Description:
* @date 2023/7/2811:00
*/
public class CommonConfig {
private static final Log logger = LogFactory.get();
private static final String dataId = "olap_platform.properties";
private static final Properties configProperties;
public static String KNOWLEDGEBASE_TYPE_LIST;
public static String KNOWLEDGEBASE_NAME_LIST;
public static Boolean SCHEMA_UPDATE_ENABLED;
static {
configProperties = new Properties();
NacosConnection nacosConnection = new NacosConnection();
ConfigService configService = nacosConnection.getPublicService();
try {
String group = FlowWriteConfig.NACOS_PUBLIC_GROUP;
String config = configService.getConfigAndSignListener(dataId, group, FlowWriteConfig.NACOS_CONNECTION_TIMEOUT, new Listener() {
@Override
public Executor getExecutor() {
return null;
}
@Override
public void receiveConfigInfo(String configInfo) {
if (StringUtil.isNotBlank(configInfo)) {
logger.info("Reload the common config when it changes.");
clearProperties();
updateConfigProperties(configInfo);
}
}
});
if (StringUtil.isNotBlank(config)) {
updateConfigProperties(config);
}
} catch (NacosException e) {
logger.error("Get common config from Nacos error,The exception message is :{}" + e);
}
}
private static void updateConfigProperties(String config) {
try {
configProperties.load(new StringReader(config));
KNOWLEDGEBASE_TYPE_LIST = ConfigurationsUtils.getStringProperty(configProperties, "etl.knowledgebase.type.list");
KNOWLEDGEBASE_NAME_LIST = ConfigurationsUtils.getStringProperty(configProperties, "etl.knowledgebase.name.list");
SCHEMA_UPDATE_ENABLED = ConfigurationsUtils.getBooleanProperty(configProperties, "etl.schema.update.enabled");
} catch (IOException e) {
logger.error("IOException:{}", e);
}
}
private static void clearProperties() {
configProperties.clear();
}
}

View File

@@ -0,0 +1,178 @@
package com.zdjizhi.common;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.tools.general.ConfigurationsUtils;
import org.jasypt.encryption.pbe.StandardPBEStringEncryptor;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author Administrator
*/
public class FlowWriteConfig {
private static final Log logger = LogFactory.get();
private static final StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor();
private static final Properties propDefault = new Properties();
private static final Properties propService = new Properties();
private static Properties propfiletype = new Properties();
private static Map<String, String> fileTypeMap;
static {
encryptor.setPassword("galaxy");
try {
propService.load(ConfigurationsUtils.class.getClassLoader().getResourceAsStream("service_flow_config.properties"));
propDefault.load(ConfigurationsUtils.class.getClassLoader().getResourceAsStream("default_config.properties"));
propfiletype.load(ConfigurationsUtils.class.getClassLoader().getResourceAsStream("file_type.properties"));
fileTypeMap = new HashMap<String, String>((Map) propfiletype);
} catch (IOException | RuntimeException e) {
logger.error("Loading the configuration file Failed!,The error message is:{}" + e);
System.exit(0);
}
}
public static boolean judgeFileType(String filetype){
return fileTypeMap.containsKey(filetype);
}
/**
* 默认的文件系统标识
*/
public static final String FILE_SYSTEM_TYPE = "hdfs";
/**
* 默认的切分符号
*/
public static final String FORMAT_SPLITTER = ",";
public static final String GTPC_FAMILY_NAME = "gtp";
public static final String RADIUS_FAMILY_NAME = "radius";
public static final String DEFAULT_RELATIONSHIP_MODULE = "vsys";
/**
* Nacos
*/
public static final String NACOS_SERVER = ConfigurationsUtils.getStringProperty(propService, "nacos.server");
public static final String NACOS_USERNAME = encryptor.decrypt(ConfigurationsUtils.getStringProperty(propDefault, "nacos.username"));
public static final String NACOS_PIN = encryptor.decrypt(ConfigurationsUtils.getStringProperty(propDefault, "nacos.pin"));
public static final Integer NACOS_CONNECTION_TIMEOUT = ConfigurationsUtils.getIntProperty(propDefault, "nacos.connection.timeout");
public static final String NACOS_SCHEMA_NAMESPACE = ConfigurationsUtils.getStringProperty(propService, "nacos.schema.namespace");
public static final String NACOS_SCHEMA_DATA_ID = ConfigurationsUtils.getStringProperty(propService, "nacos.schema.data.id");
public static final String NACOS_SCHEMA_GROUP = ConfigurationsUtils.getStringProperty(propDefault, "nacos.schema.group");
public static final String NACOS_PUBLIC_NAMESPACE = ConfigurationsUtils.getStringProperty(propDefault, "nacos.public.namespace");
public static final String NACOS_PUBLIC_GROUP = ConfigurationsUtils.getStringProperty(propDefault, "nacos.public.group");
public static final String NACOS_KNOWLEDGEBASE_DATA_ID = ConfigurationsUtils.getStringProperty(propDefault, "nacos.knowledgebase.data.id");
/**
* System config
*/
public static final Integer SOURCE_PARALLELISM = ConfigurationsUtils.getIntProperty(propService, "source.parallelism");
public static final Integer TRANSFORM_PARALLELISM = ConfigurationsUtils.getIntProperty(propService, "transform.parallelism");
public static final Integer DATA_CENTER_ID_NUM = ConfigurationsUtils.getIntProperty(propService, "data.center.id.num");
public static final Integer LOG_TRANSFORM_TYPE = ConfigurationsUtils.getIntProperty(propDefault, "log.transform.type");
public static final String DATA_RELATIONSHIP_MODEL = ConfigurationsUtils.getStringProperty(propDefault, "data.relationship.model");
public static final Integer BUFFER_TIMEOUT = ConfigurationsUtils.getIntProperty(propDefault, "buffer.timeout");
public static final Integer DEAL_FILE_PARALLELISM = ConfigurationsUtils.getIntProperty(propService, "deal.file.parallelism");
public static final Integer SINK_FILE_DATA_PARALLELISM = ConfigurationsUtils.getIntProperty(propService, "sink.file.data.parallelism");
public static final Integer SINK_PERCENT_PARALLELISM = ConfigurationsUtils.getIntProperty(propService, "sink.percent.parallelism");
/**
* HBase
*/
public static final Integer HBASE_TICK_TUPLE_FREQ_SECS = ConfigurationsUtils.getIntProperty(propService, "hbase.tick.tuple.freq.secs");
public static final Integer HBASE_GTPC_SCAN_MAX_ROWS = ConfigurationsUtils.getIntProperty(propDefault, "hbase.gtpc.scan.max.rows");
public static final Integer HBASE_RADIUS_SCAN_MAX_ROWS = ConfigurationsUtils.getIntProperty(propDefault, "hbase.radius.scan.max.rows");
public static final String HBASE_RADIUS_TABLE_NAME = ConfigurationsUtils.getStringProperty(propDefault, "hbase.radius.table.name");
public static final String HBASE_GTPC_TABLE_NAME = ConfigurationsUtils.getStringProperty(propDefault, "hbase.gtpc.table.name");
public static final String HBASE_RPC_TIMEOUT = ConfigurationsUtils.getStringProperty(propDefault, "hbase.rpc.timeout");
/**
* HDFS
*/
public static final String HDFS_SERVERS = ConfigurationsUtils.getStringProperty(propService, "hdfs.servers");
/**
* HTTP
*/
public static final Integer HTTP_POOL_MAX_CONNECTION = ConfigurationsUtils.getIntProperty(propDefault, "http.pool.max.connection");
public static final Integer HTTP_POOL_MAX_PER_ROUTE = ConfigurationsUtils.getIntProperty(propDefault, "http.pool.max.per.route");
public static final Integer HTTP_POOL_REQUEST_TIMEOUT = ConfigurationsUtils.getIntProperty(propDefault, "http.pool.request.timeout");
public static final Integer HTTP_POOL_CONNECT_TIMEOUT = ConfigurationsUtils.getIntProperty(propDefault, "http.pool.connect.timeout");
public static final Integer HTTP_SOCKET_TIMEOUT = ConfigurationsUtils.getIntProperty(propDefault, "http.socket.timeout");
/**
* kafka common
*/
public static final String KAFKA_SASL_JAAS_USER = encryptor.decrypt(ConfigurationsUtils.getStringProperty(propDefault, "kafka.user"));
public static final String KAFKA_SASL_JAAS_PIN = encryptor.decrypt(ConfigurationsUtils.getStringProperty(propDefault, "kafka.pin"));
/**
* kafka source config
*/
public static final String SOURCE_KAFKA_TOPIC = ConfigurationsUtils.getStringProperty(propService, "source.kafka.topic");
public static final String GROUP_ID = ConfigurationsUtils.getStringProperty(propService, "group.id");
public static final String SESSION_TIMEOUT_MS = ConfigurationsUtils.getStringProperty(propDefault, "session.timeout.ms");
public static final String MAX_POLL_RECORDS = ConfigurationsUtils.getStringProperty(propDefault, "max.poll.records");
public static final String MAX_PARTITION_FETCH_BYTES = ConfigurationsUtils.getStringProperty(propDefault, "max.partition.fetch.bytes");
/**
* kafka sink config
*/
public static final String PRODUCER_ACK = ConfigurationsUtils.getStringProperty(propDefault, "producer.ack");
public static final String PRODUCER_KAFKA_COMPRESSION_TYPE = ConfigurationsUtils.getStringProperty(propService, "producer.kafka.compression.type");
public static final String PERCENT_SINK_KAFKA_SERVERS = ConfigurationsUtils.getStringProperty(propService,"percent.sink.kafka.servers");
public static final String SINK_FILE_DATA_KAFKA_SERVERS = ConfigurationsUtils.getStringProperty(propService,"sink.file.data.kafka.servers");
public static final String SINK_PERCENT_KAFKA_TOPIC = ConfigurationsUtils.getStringProperty(propService, "sink.percent.kafka.topic");
public static final String SINK_FILE_DATA_SINK_KAFKA_TOPIC = ConfigurationsUtils.getStringProperty(propService, "sink.file.data.kafka.topic");
/**
* connection kafka
*/
public static final String RETRIES = ConfigurationsUtils.getStringProperty(propDefault, "retries");
public static final String LINGER_MS = ConfigurationsUtils.getStringProperty(propDefault, "linger.ms");
public static final Integer REQUEST_TIMEOUT_MS = ConfigurationsUtils.getIntProperty(propDefault, "request.timeout.ms");
public static final Integer BATCH_SIZE = ConfigurationsUtils.getIntProperty(propDefault, "batch.size");
public static final Integer BUFFER_MEMORY = ConfigurationsUtils.getIntProperty(propDefault, "buffer.memory");
public static final Integer MAX_REQUEST_SIZE = ConfigurationsUtils.getIntProperty(propDefault, "max.request.size");
/**
* common config
*/
public static final String SOURCE_KAFKA_SERVERS = ConfigurationsUtils.getStringProperty(propService, "source.kafka.servers");;
public static final String ZOOKEEPER_SERVERS = ConfigurationsUtils.getStringProperty(propService, "zookeeper.servers");
public static final String TOOLS_LIBRARY = ConfigurationsUtils.getStringProperty(propService, "tools.library");
/**
* OOS config
*/
public static final String OOS_SERVERS = ConfigurationsUtils.getStringProperty(propService, "oos.servers");
public static final String PROMETHEUS_PUSHGATEWAY_ADDRESS = ConfigurationsUtils.getStringProperty(propService, "prometheus.pushgateway.address");
public static final Integer PUSHGATEWAY_STATISTICS_TIME = ConfigurationsUtils.getIntProperty(propService, "pushgateway.statistics.time");
public static final Integer DEAL_FILE_STATISTICS_TIME = ConfigurationsUtils.getIntProperty(propService, "deal.file.statistics.time");
public static final String KNOWLEDGE_BASE_URL = ConfigurationsUtils.getStringProperty(propService, "knowledge.base.uri");
public static final String KNOWLEDGE_BASE_PATH = ConfigurationsUtils.getStringProperty(propService, "knowledge.base.path");
public static final String ASN_BUILTIN_KD_ID = ConfigurationsUtils.getStringProperty(propService, "asn.builtin.kd.id");
public static final String IP_BUILTIN_KD_ID = ConfigurationsUtils.getStringProperty(propService, "ip.builtin.kd.id");
public static final String IP_USER_DEFINED_KD_ID = ConfigurationsUtils.getStringProperty(propService, "ip.user.defined.kd.id");
public static final String HOS_URL = ConfigurationsUtils.getStringProperty(propService, "hos.url");
public static final Long KNOWLEDGE_EXECUTION_MINUTES = ConfigurationsUtils.getLongProperty(propService,"knowledge.execution.minutes");
}

View File

@@ -0,0 +1,52 @@
package com.zdjizhi.common.pojo;
import com.alibaba.fastjson2.JSONArray;
public class FileMeta {
private long common_log_id;
protected int common_recv_time;
private String common_schema_type;
private JSONArray source_list;
private int processing_time;
public long getCommon_log_id() {
return common_log_id;
}
public void setCommon_log_id(long common_log_id) {
this.common_log_id = common_log_id;
}
public int getCommon_recv_time() {
return common_recv_time;
}
public void setCommon_recv_time(int common_recv_time) {
this.common_recv_time = common_recv_time;
}
public String getCommon_schema_type() {
return common_schema_type;
}
public void setCommon_schema_type(String common_schema_type) {
this.common_schema_type = common_schema_type;
}
public JSONArray getSource_list() {
return source_list;
}
public void setSource_list(JSONArray source_list) {
this.source_list = source_list;
}
public int getProcessing_time() {
return processing_time;
}
public void setProcessing_time(int processing_time) {
this.processing_time = processing_time;
}
}

View File

@@ -0,0 +1,75 @@
package com.zdjizhi.common.pojo;
/**
* @author qidaijie
* @Package com.zdjizhi.common.pojo
* @Description:
* @date 2023/5/2011:18
*/
public class KnowlegeBaseMeta {
private String id;
private String name;
private String sha256;
private String format;
private String path;
public KnowlegeBaseMeta(String id, String name, String sha256, String format, String path) {
this.id = id;
this.name = name;
this.sha256 = sha256;
this.format = format;
this.path = path;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getSha256() {
return sha256;
}
public void setSha256(String sha256) {
this.sha256 = sha256;
}
public String getFormat() {
return format;
}
public void setFormat(String format) {
this.format = format;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
@Override
public String toString() {
return "KnowlegeBaseMeta{" +
"id='" + id + '\'' +
", name='" + name + '\'' +
", sha256='" + sha256 + '\'' +
", format='" + format + '\'' +
", path='" + path + '\'' +
'}';
}
}

View File

@@ -0,0 +1,22 @@
package com.zdjizhi.common.pojo;
public class SourceList {
private String destination_oss_path;
private String source_oss_path;
public String getDestination_oss_path() {
return destination_oss_path;
}
public void setDestination_oss_path(String destination_oss_path) {
this.destination_oss_path = destination_oss_path;
}
public String getSource_oss_path() {
return source_oss_path;
}
public void setSource_oss_path(String source_oss_path) {
this.source_oss_path = source_oss_path;
}
}

View File

@@ -0,0 +1,97 @@
package com.zdjizhi.operator.count;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import io.prometheus.client.CollectorRegistry;
import io.prometheus.client.Gauge;
import io.prometheus.client.exporter.PushGateway;
import org.apache.flink.api.java.tuple.Tuple5;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import java.io.IOException;
import java.util.Timer;
import java.util.TimerTask;
public class SendCountProcess extends ProcessFunction<Tuple5<Long, Long, Long, Long, Long>, String> {
private static final Log logger = LogFactory.get();
private long recordCount = 0L;
private long failedCount = 0L;
private long httpRequestCount = 0L;
private long httpResponseCount = 0L;
private long mailEmlCount = 0L;
static final Gauge recordCountsGauge = Gauge.build()
.name("recordCount").labelNames("ServerName", "Duration").help("The general intput log volume, the unit is slip").register();
static final Gauge failedCountGauge = Gauge.build()
.name("failedCount").labelNames("ServerName", "Duration").help("The general intput log volume, the unit is slip").register();
static final Gauge httpRequestCountGauge = Gauge.build()
.name("httpRequestCount").labelNames("ServerName", "Duration").help("The general intput log volume, the unit is slip").register();
static final Gauge httpResponseCountGauge = Gauge.build()
.name("httpResponseCount").labelNames("ServerName", "Duration").help("The general intput log volume, the unit is slip").register();
static final Gauge mailEmlCountCountGauge = Gauge.build()
.name("mailEmlCount").labelNames("ServerName", "Duration").help("The general intput log volume, the unit is slip").register();
@Override
public void open(Configuration parameters) {
Timer timer = new Timer();
//分钟级指标
timer.schedule(new TimerTask() {
@Override
public void run() {
if (recordCount > 0 || failedCount > 0 || httpRequestCount > 0 || httpResponseCount > 0 || mailEmlCount > 0) {
recordCountsGauge.labels("recordCount", String.valueOf(FlowWriteConfig.PUSHGATEWAY_STATISTICS_TIME / 60)).set(recordCount);
failedCountGauge.labels("failedCount", String.valueOf(FlowWriteConfig.PUSHGATEWAY_STATISTICS_TIME / 60)).set(failedCount);
httpRequestCountGauge.labels("httpRequestCount", String.valueOf(FlowWriteConfig.PUSHGATEWAY_STATISTICS_TIME / 60)).set(httpRequestCount);
httpResponseCountGauge.labels("httpResponseCount", String.valueOf(FlowWriteConfig.PUSHGATEWAY_STATISTICS_TIME / 60)).set(httpResponseCount);
mailEmlCountCountGauge.labels("mailEmlCount", String.valueOf(FlowWriteConfig.PUSHGATEWAY_STATISTICS_TIME / 60)).set(mailEmlCount);
try {
//将指标推送至pushgateway
push();
} catch (IOException e) {
e.printStackTrace();
}
recordCount = 0;
failedCount = 0;
httpRequestCount = 0;
httpResponseCount = 0;
mailEmlCount = 0;
}
}
}, 0, FlowWriteConfig.PUSHGATEWAY_STATISTICS_TIME * 1000);
}
@Override
public void processElement(Tuple5<Long, Long, Long, Long, Long> value, Context ctx, Collector<String> out) {
try {
recordCount = recordCount + value.f0;
failedCount = failedCount + value.f1;
httpRequestCount = httpRequestCount + value.f2;
httpResponseCount = httpResponseCount + value.f3;
mailEmlCount = mailEmlCount + value.f4;
} catch (Exception e) {
logger.error("统计指标处理失败,原因为" + e);
}
}
public void push() throws IOException {
CollectorRegistry registry = CollectorRegistry.defaultRegistry;
PushGateway pg = new PushGateway(FlowWriteConfig.PROMETHEUS_PUSHGATEWAY_ADDRESS);
pg.push(registry, FlowWriteConfig.SOURCE_KAFKA_TOPIC);
}
}

View File

@@ -0,0 +1,15 @@
package com.zdjizhi.operator.filter;
import com.geedgenetworks.utils.StringUtil;
import org.apache.flink.api.common.functions.FilterFunction;
/**
* @author qidaijie
* @version 2021/5/27 15:01
*/
public class FilterNull implements FilterFunction<String> {
@Override
public boolean filter(String message) {
return StringUtil.isNotBlank(message);
}
}

View File

@@ -0,0 +1,65 @@
package com.zdjizhi.operator.map;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSONObject;
import com.zdjizhi.tools.general.ConfigurationsUtils;
import com.zdjizhi.tools.logtransformation.ConvertRecordToPERCENT;
import com.zdjizhi.tools.transform.TransForm;
import com.zdjizhi.tools.json.MetaUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import java.util.Properties;
/**
* @author qidaijie
* @version 2021/5/27 15:01
*/
public class MapCompleted extends ProcessFunction<String, com.alibaba.fastjson2.JSONObject> {
private static final Log logger = LogFactory.get();
private ConvertRecordToPERCENT securityEvnetConvert;
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
try {
Properties securityProp = new Properties();
securityProp.load(ConfigurationsUtils.class.getClassLoader().getResourceAsStream("security_event_mapping_table.properties"));
securityEvnetConvert = new ConvertRecordToPERCENT(securityProp);
logger.info("ecurity_event日志Schema加载成功");
} catch (Exception e) {
logger.error("security_event日志Schema加载失败,失败原因为:" + e);
}
}
@Override
public void processElement(String message, ProcessFunction<String, com.alibaba.fastjson2.JSONObject>.Context ctx, Collector<com.alibaba.fastjson2.JSONObject> out) {
try {
JSONObject record = JSONObject.parseObject(message);
JSONObject jsonObject = null;
if (record.containsKey("security_rule_list") || record.containsKey("monitor_rule_list")) {
jsonObject = securityEvnetConvert.convertToPERCENT(record);
}
if (jsonObject != null) {
jsonObject.put("common_ingestion_time", ctx.timestamp() / 1000);
MetaUtil.dropJsonField(jsonObject);
TransForm.transformLog(jsonObject);
out.collect(jsonObject);
}
} catch (RuntimeException e) {
logger.error("TransForm log failed ( The field type is not verified ),The exception is :{}\n The error Message is:{}", e, message);
}
}
}

View File

@@ -0,0 +1,110 @@
package com.zdjizhi.operator.map;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSONArray;
import com.alibaba.fastjson2.JSONObject;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.tools.general.ConfigurationsUtils;
import com.zdjizhi.tools.json.MetaUtil;
import com.zdjizhi.tools.logtransformation.ConvertRecordToPERCENT;
import com.zdjizhi.tools.transform.TransForm;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import java.util.HashMap;
import java.util.Properties;
/**
* @author qidaijie
* @version 2021/5/27 15:01
*/
public class TypeMapCompleted extends ProcessFunction<String, JSONObject> {
private static final Log logger = LogFactory.get();
private ConvertRecordToPERCENT convertRecordToPERCENT;
Properties prop = new Properties();
Properties actionProp = new Properties();
private HashMap<String, Integer> actionMap = new HashMap<String, Integer>();
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
try {
if (FlowWriteConfig.NACOS_SCHEMA_DATA_ID.equals("security_event.json")) {
prop.load(ConfigurationsUtils.class.getClassLoader().getResourceAsStream("security_event_mapping_table.properties"));
} else if (FlowWriteConfig.NACOS_SCHEMA_DATA_ID.equals("proxy_event.json")) {
prop.load(ConfigurationsUtils.class.getClassLoader().getResourceAsStream("proxy_event_mapping_table.properties"));
}
convertRecordToPERCENT = new ConvertRecordToPERCENT(prop);
logger.info(FlowWriteConfig.NACOS_SCHEMA_DATA_ID + "日志Schema加载成功");
try {
actionProp.load(ConfigurationsUtils.class.getClassLoader().getResourceAsStream("action_definition.properties"));
for (String key : actionProp.stringPropertyNames()) {
final String action = actionProp.getProperty(key);
actionMap.put(key, Integer.valueOf(action));
}
logger.info(FlowWriteConfig.NACOS_SCHEMA_DATA_ID + "日志Schema加载成功");
} catch (Exception e) {
logger.error(FlowWriteConfig.NACOS_SCHEMA_DATA_ID + "日志Schema加载失败,失败原因为:" + e);
}
} catch (Exception e) {
logger.error(FlowWriteConfig.NACOS_SCHEMA_DATA_ID + "日志Schema加载失败,失败原因为:" + e);
}
}
@Override
public void processElement(String message, ProcessFunction<String, JSONObject>.Context ctx, Collector<JSONObject> out) {
try {
JSONObject record = JSONObject.parseObject(message);
JSONObject jsonObject = null;
if (FlowWriteConfig.NACOS_SCHEMA_DATA_ID.equals("security_event.json")) {
if (record.containsKey("security_rule_list") || record.containsKey("monitor_rule_list")) {
jsonObject = convertRecordToPERCENT.convertToPERCENT(record);
}
}
if (FlowWriteConfig.NACOS_SCHEMA_DATA_ID.equals("proxy_event.json")) {
if (record.containsKey("proxy_rule_list")) {
jsonObject = convertRecordToPERCENT.convertToPERCENT(record);
}
}
if (jsonObject != null) {
if (record.containsKey("security_rule_list")) {
jsonObject.put("common_policy_id", JSONArray.from(record.get("security_rule_list")).get(0));
jsonObject.put("common_action", actionMap.get(record.get("security_action").toString().replace(" ", "")));
}
if (record.containsKey("monitor_rule_list")) {
jsonObject.put("common_policy_id", JSONArray.from(record.get("monitor_rule_list")).get(0));
jsonObject.put("common_action", 1);
}
if (record.containsKey("proxy_rule_list")) {
jsonObject.put("common_policy_id", JSONArray.from(record.get("proxy_rule_list")).get(0));
jsonObject.put("common_action", actionMap.get(record.get("proxy_action").toString().replace(" ", "")));
if ((int) jsonObject.get("common_action") == 48) {
jsonObject.put("common_sub_action", record.get("proxy_action"));
}
}
jsonObject.put("common_ingestion_time", ctx.timestamp() / 1000);
TransForm.transformLog(jsonObject);
MetaUtil.typeTransform(jsonObject);
out.collect(jsonObject);
}
} catch (RuntimeException e) {
logger.error("TransForm logs failed( The field type is verified ),The exception is :{}\n The error Message is:{}", e, message);
}
}
}

View File

@@ -0,0 +1,153 @@
package com.zdjizhi.operator.process;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSONArray;
import com.alibaba.fastjson2.JSONObject;
import com.geedgenetworks.utils.StringUtil;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.common.pojo.FileMeta;
import com.zdjizhi.common.pojo.SourceList;
import com.zdjizhi.tools.general.FileEdit;
import org.apache.flink.api.java.tuple.Tuple5;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import java.util.Timer;
import java.util.TimerTask;
/**
* @author wangchengcheng
* @Package com.zdjizhi.utils.functions
* @Description:
* @date 2023/0928
*/
public class DealFileProcessFunction extends ProcessFunction<JSONObject, String> {
private static final Log logger = LogFactory.get();
public static final OutputTag<Tuple5<Long, Long, Long, Long, Long>> dealFileMetircTag = new OutputTag<Tuple5<Long, Long, Long, Long, Long>>("DealFileMetircTag") {
};
private String rpUrlValue;
private String rqUrlValue;
private String emailUrlValue;
private long cfgId = 0; //= common_policy_id;
private String sIp = null; // = common_client_ip;
private int sPort = 0;// = common_client_port;
private String dIp = null;//= common_server_ip;
private int dPort = 0;// = common_server_port;
private long foundTime = 0;// = common_recv_time;
private String account = null;
private String domain = null;
private String schemaType = null;
private long recordCount = 0L;
private long failedCount = 0L;
private long httpRequestCount = 0L;
private long httpResponseCount = 0L;
private long mailEmlCount = 0L;
private boolean metricSendFlag = true;
//初始化侧输流的标记
public static OutputTag<String> metaToKafa = new OutputTag<String>("metaToKafka") {
};
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
Timer timer = new Timer();
//注册定时器
timer.schedule(new TimerTask() {
@Override
public void run() {
if (!metricSendFlag) {
metricSendFlag = true;
}
}
}, 0, FlowWriteConfig.DEAL_FILE_STATISTICS_TIME * 1000);
}
@SuppressWarnings("unchecked")
@Override
public void processElement(JSONObject message, Context context, Collector<String> collector) throws Exception {
try {
//定时向下游推送指标
if (metricSendFlag) {
metricSendFlag = false;
if (recordCount > 0 || failedCount > 0 || httpRequestCount > 0 || httpResponseCount > 0 || mailEmlCount > 0) {
context.output(dealFileMetircTag, Tuple5.of(recordCount, failedCount, httpRequestCount, httpResponseCount, mailEmlCount));
recordCount = 0L;
failedCount = 0L;
httpRequestCount = 0;
httpResponseCount = 0;
mailEmlCount = 0L;
}
}
recordCount++;
if (message.size() > 0) {
rpUrlValue = (String) message.get("http_response_body");
rqUrlValue = (String) message.get("http_request_body");
emailUrlValue = (String) message.get("mail_eml_file");
if (StringUtil.isNotBlank(rpUrlValue) || StringUtil.isNotBlank(rqUrlValue) || StringUtil.isNotBlank(emailUrlValue)) {
cfgId = (long) message.getOrDefault("common_policy_id", 0L);
sIp = (String) message.get("common_client_ip");
sPort = (int) message.get("common_client_port");
dIp = (String) message.get("common_server_ip");
dPort = (int) message.get("common_server_port");
foundTime = (long) message.get("common_recv_time");
schemaType = (String) message.get("common_schema_type");
domain = (String) message.getOrDefault("common_server_domain", "");
account = (String) message.getOrDefault("common_subscribe_id", "");
FileMeta fileMeta = new FileMeta();
JSONArray jsonarray = new JSONArray();
if (StringUtil.isNotBlank(rqUrlValue)) {
String fileId = FileEdit.getFileId(rqUrlValue, "_1");
message.put("http_request_body", FileEdit.getFileDownloadUrl(fileId));
SourceList request = new SourceList();
request.setSource_oss_path(FlowWriteConfig.HOS_URL+rqUrlValue);
request.setDestination_oss_path(FileEdit.getFileUploadUrl(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, schemaType, fileId));
jsonarray.add(request);
httpRequestCount++;
}
if (StringUtil.isNotBlank(rpUrlValue)) {
String fileId = FileEdit.getFileId(rpUrlValue, "_2");
message.put("http_response_body", FileEdit.getFileDownloadUrl(fileId));
SourceList response = new SourceList();
response.setSource_oss_path(FlowWriteConfig.HOS_URL+rpUrlValue);
response.setDestination_oss_path(FileEdit.getFileUploadUrl(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, schemaType, fileId));
jsonarray.add(response);
httpResponseCount++;
}
if (StringUtil.isNotBlank(emailUrlValue)) {
String fileId = FileEdit.getFileId(emailUrlValue, "_9");
message.put("mail_eml_file", FileEdit.getFileDownloadUrl(fileId));
SourceList emailFile = new SourceList();
emailFile.setSource_oss_path(FlowWriteConfig.HOS_URL+emailUrlValue);
emailFile.setDestination_oss_path(FileEdit.getFileUploadUrl(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, schemaType, fileId));
jsonarray.add(emailFile);
mailEmlCount++;
}
fileMeta.setSource_list(jsonarray);
fileMeta.setCommon_log_id((long) message.get("common_log_id"));
fileMeta.setCommon_recv_time(Integer.parseInt(message.get("common_recv_time").toString()));
fileMeta.setCommon_schema_type((String) message.get("common_schema_type"));
fileMeta.setProcessing_time((int) (System.currentTimeMillis() / 1000));
context.output(metaToKafa, JSONObject.toJSONString(fileMeta));
}
collector.collect(JSONObject.toJSONString(message));
}
} catch (RuntimeException e) {
logger.error("处理带有非结构结构化字段的日志出错:" + e + "\n" + message);
failedCount++;
}
}
}

View File

@@ -0,0 +1,130 @@
package com.zdjizhi.tools.connections.hadoop;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.geedgenetworks.utils.StringUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.IOException;
/**
* @author qidaijie
* @version 2022/11/2 17:57
*/
public class HadoopUtils {
private static final Log logger = LogFactory.get();
private static HadoopUtils hadoopUtils;
private static FileSystem fileSystem;
private static void getInstance() {
hadoopUtils = new HadoopUtils();
}
/**
* 构造函数
*/
private HadoopUtils() {
//获取连接
getConnection();
}
private static void getConnection() {
Configuration configuration = new Configuration();
try {
//指定用户
System.setProperty("HADOOP_USER_NAME", "etl");
//配置hdfs相关信息
configuration.set("fs.defaultFS", "hdfs://ns1");
configuration.set("hadoop.proxyuser.root.hosts", "*");
configuration.set("hadoop.proxyuser.root.groups", "*");
configuration.set("dfs.nameservices", "ns1");
configuration.set("dfs.ha.namenodes.ns1", "nn1,nn2");
String[] servers = StringUtil.split(FlowWriteConfig.HDFS_SERVERS, FlowWriteConfig.FORMAT_SPLITTER);
configuration.set("dfs.namenode.rpc-address.ns1.nn1", servers[0]);
configuration.set("dfs.namenode.rpc-address.ns1.nn2", servers[1]);
configuration.set("dfs.client.failover.proxy.provider.ns1", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
//创建fileSystem,用于连接hdfs
fileSystem = FileSystem.get(configuration);
} catch (IOException | RuntimeException e) {
logger.error("Failed to create HDFS connection. message is: " + e.getMessage());
e.printStackTrace();
}
}
// /**
// * 创建hdfs连接
// */
// static {
// if (FlowWriteConfig.FILE_SYSTEM_TYPE.equals(FlowWriteConfig.KNOWLEDGEBASE_FILE_STORAGE_TYPE)) {
// Configuration configuration = new Configuration();
// try {
// //指定用户
// System.setProperty("HADOOP_USER_NAME", "etl");
// //配置hdfs相关信息
// configuration.set("fs.defaultFS", "hdfs://ns1");
// configuration.set("hadoop.proxyuser.root.hosts", "*");
// configuration.set("hadoop.proxyuser.root.groups", "*");
// configuration.set("dfs.nameservices", "ns1");
// configuration.set("dfs.ha.namenodes.ns1", "nn1,nn2");
// String[] servers = StringUtil.split(FlowWriteConfig.HDFS_SERVERS, FlowWriteConfig.FORMAT_SPLITTER);
// configuration.set("dfs.namenode.rpc-address.ns1.nn1", servers[0]);
// configuration.set("dfs.namenode.rpc-address.ns1.nn2", servers[1]);
// configuration.set("dfs.client.failover.proxy.provider.ns1", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
// //创建fileSystem,用于连接hdfs
// fileSystem = FileSystem.get(configuration);
// } catch (IOException | RuntimeException e) {
// logger.error("Failed to create HDFS connection. message is: " + e.getMessage());
// e.printStackTrace();
// }
// }
// }
/**
* 下载HDFS文件
*
* @param filePath 文件路径
* @return 文件
*/
public static byte[] downloadFileByBytes(String filePath) {
if (hadoopUtils == null) {
getInstance();
}
try (FSDataInputStream open = fileSystem.open(new Path(filePath))) {
byte[] bytes = new byte[open.available()];
open.read(0, bytes, 0, open.available());
return bytes;
} catch (IOException e) {
logger.error("An I/O exception when files are download from HDFS. Message is :" + e.getMessage());
}
return null;
}
/**
* 更新文件到HDFS
*
* @param filePath 文件路径
* @param bytes 文件
*/
public static void uploadFileByBytes(String filePath, byte[] bytes) {
if (hadoopUtils == null) {
getInstance();
}
try (FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path(filePath), true)) {
fsDataOutputStream.write(bytes);
// fsDataOutputStream.flush();
} catch (RuntimeException e) {
logger.error("Uploading files to the HDFS is abnormal. Message is :" + e.getMessage());
} catch (IOException e) {
logger.error("An I/O exception when files are uploaded to HDFS. Message is :" + e.getMessage());
}
}
}

View File

@@ -0,0 +1,192 @@
package com.zdjizhi.tools.connections.hbase;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.geedgenetworks.utils.StringUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
* @author qidaijie
* @version 2022/7/1510:12
*/
class GtpCRelation {
private static final Log logger = LogFactory.get();
/**
* 获取全量的GTpc数据
*/
static void getAllGtpCRelation(Connection connection, Map<String, HashMap<String, Object>> gtpcMap) {
long begin = System.currentTimeMillis();
ResultScanner scanner = null;
try {
Table table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_GTPC_TABLE_NAME));
Scan scan = new Scan();
if (FlowWriteConfig.HBASE_GTPC_SCAN_MAX_ROWS > 0) {
scan.setLimit(FlowWriteConfig.HBASE_GTPC_SCAN_MAX_ROWS);
}
scanner = table.getScanner(scan);
for (Result result : scanner) {
int acctStatusType = GtpCRelation.getMsgType(result);
if (acctStatusType == 1) {
String upLinkTeid = HBaseUtils.getTeid(result, "uplink_teid");
String downLinkTeid = HBaseUtils.getTeid(result, "downlink_teid");
String phoneNumber = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "phone_number").trim();
String imsi = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imsi").trim();
String imei = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imei").trim();
Long lastUpdateTime = HBaseUtils.getLong(result, FlowWriteConfig.GTPC_FAMILY_NAME, "last_update_time");
HashMap<String, Object> buildUserData = buildUserData(phoneNumber, imsi, imei, lastUpdateTime);
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = HBaseUtils.getVsysId(result).trim();
updateCache(gtpcMap, upLinkTeid+vsysId, buildUserData, lastUpdateTime);
updateCache(gtpcMap, downLinkTeid+vsysId, buildUserData, lastUpdateTime);
} else {
updateCache(gtpcMap, upLinkTeid, buildUserData, lastUpdateTime);
updateCache(gtpcMap, downLinkTeid, buildUserData, lastUpdateTime);
}
}
}
logger.warn("The obtain the number of GTP-C relationships : " + gtpcMap.size());
logger.warn("The time spent to obtain GTP-C relationships : " + (System.currentTimeMillis() - begin) + "ms");
} catch (IOException | RuntimeException e) {
logger.error("The relationship between USER and TEID obtained from HBase is abnormal! message is :" + e);
} finally {
if (scanner != null) {
scanner.close();
}
}
}
/**
* 增量更新GTP-C关系
*
* @param connection HBase连接
* @param gtpcMap gtp-c关系缓存
* @param startTime 开始时间
* @param endTime 结束时间
*/
static void upgradeGtpCRelation(Connection connection, Map<String, HashMap<String, Object>> gtpcMap, Long startTime, Long endTime) {
Long begin = System.currentTimeMillis();
Table table = null;
ResultScanner scanner = null;
Scan scan = new Scan();
try {
table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_GTPC_TABLE_NAME));
scan.setTimeRange(startTime, endTime);
if (FlowWriteConfig.HBASE_GTPC_SCAN_MAX_ROWS > 0) {
scan.setLimit(FlowWriteConfig.HBASE_GTPC_SCAN_MAX_ROWS);
}
scanner = table.getScanner(scan);
for (Result result : scanner) {
int acctStatusType = GtpCRelation.getMsgType(result);
String upLinkTeid = HBaseUtils.getTeid(result, "uplink_teid");
String downLinkTeid = HBaseUtils.getTeid(result, "downlink_teid");
if (acctStatusType == 1) {
String phoneNumber = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "phone_number").trim();
String imsi = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imsi").trim();
String imei = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imei").trim();
Long lastUpdateTime = HBaseUtils.getLong(result, FlowWriteConfig.GTPC_FAMILY_NAME, "last_update_time");
HashMap<String, Object> buildUserData = buildUserData(phoneNumber, imsi, imei, lastUpdateTime);
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = HBaseUtils.getVsysId(result).trim();
updateCache(gtpcMap, upLinkTeid+vsysId, buildUserData, lastUpdateTime);
updateCache(gtpcMap, downLinkTeid+vsysId, buildUserData, lastUpdateTime);
} else {
updateCache(gtpcMap, upLinkTeid, buildUserData, lastUpdateTime);
updateCache(gtpcMap, downLinkTeid, buildUserData, lastUpdateTime);
}
} else if (acctStatusType == 2) {
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = HBaseUtils.getVsysId(result).trim();
gtpcMap.remove(upLinkTeid+vsysId);
gtpcMap.remove(downLinkTeid+vsysId);
} else {
gtpcMap.remove(upLinkTeid);
gtpcMap.remove(downLinkTeid);
}
}
}
Long end = System.currentTimeMillis();
logger.warn("The current number of GTPC relationships is: " + gtpcMap.keySet().size());
logger.warn("The time used to update the GTPC relationship is: " + (end - begin) + "ms");
} catch (IOException | RuntimeException e) {
logger.error("GTPC relationship update exception, the content is:" + e);
} finally {
if (scanner != null) {
scanner.close();
}
if (table != null) {
try {
table.close();
} catch (IOException e) {
logger.error("HBase Table Close ERROR! Exception message is:" + e);
}
}
}
}
/**
* 获取当前用户上下线状态信息
*
* @param result HBase内获取的数据
* @return onff_type 状态 1-上线 2-下线
*/
private static int getMsgType(Result result) {
boolean hasType = result.containsColumn(Bytes.toBytes(FlowWriteConfig.GTPC_FAMILY_NAME), Bytes.toBytes("msg_type"));
if (hasType) {
return Bytes.toInt(result.getValue(Bytes.toBytes(FlowWriteConfig.GTPC_FAMILY_NAME), Bytes.toBytes("msg_type")));
} else {
return 0;
}
}
/**
* 构建用户信息
*
* @param phoneNumber 手机号
* @param imsi 用户标识
* @param imei 设备标识
* @return 用户信息
*/
private static HashMap<String, Object> buildUserData(String phoneNumber, String imsi, String imei, Long lastUpdateTime) {
HashMap<String, Object> tmpMap = new HashMap<>(4);
tmpMap.put("common_phone_number", phoneNumber);
tmpMap.put("common_imsi", imsi);
tmpMap.put("common_imei", imei);
tmpMap.put("last_update_time", lastUpdateTime);
return tmpMap;
}
/**
* 判断缓存与新获取的数据时间戳大小,若大于缓存内记录的时间戳;则更新缓存
*
* @param gtpcMap 缓存集合
* @param key 上下行teid
* @param userData 获取HBase内的用户信息
* @param lastUpdateTime 该用户信息最后更新时间
*/
private static void updateCache(Map<String, HashMap<String, Object>> gtpcMap, String key, HashMap<String, Object> userData, Long lastUpdateTime) {
if (StringUtil.isNotBlank(key)){
if (gtpcMap.containsKey(key)) {
Long oldUpdateTime = Long.parseLong(gtpcMap.get(key).get("last_update_time").toString());
if (lastUpdateTime > oldUpdateTime) {
gtpcMap.put(key, userData);
}
} else {
gtpcMap.put(key, userData);
}
}
}
}

View File

@@ -0,0 +1,216 @@
package com.zdjizhi.tools.connections.hbase;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.geedgenetworks.utils.StringUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* HBase 工具类
*
* @author qidaijie
*/
public class HBaseUtils {
private static final Log logger = LogFactory.get();
private static Map<String, String> radiusMap = new ConcurrentHashMap<>(16);
private static Map<String, HashMap<String, Object>> gtpcMap = new ConcurrentHashMap<>(16);
private static Connection connection;
private static Long time;
private static HBaseUtils hBaseUtils;
private static void getInstance() {
hBaseUtils = new HBaseUtils();
}
/**
* 构造函数-新
*/
private HBaseUtils() {
//获取连接
getConnection();
//拉取所有
RadiusRelation.getAllRadiusRelation(connection, radiusMap);
GtpCRelation.getAllGtpCRelation(connection, gtpcMap);
//定时更新
updateCache();
}
private static void getConnection() {
try {
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum", FlowWriteConfig.ZOOKEEPER_SERVERS);
configuration.set("hbase.client.retries.number", "1");
configuration.set("hbase.client.pause", "100");
configuration.set("hbase.rpc.timeout", FlowWriteConfig.HBASE_RPC_TIMEOUT);
configuration.set("zookeeper.recovery.retry", "1");
configuration.set("zookeeper.recovery.retry.intervalmill", "200");
connection = ConnectionFactory.createConnection(configuration);
time = System.currentTimeMillis();
logger.warn("HBaseUtils get HBase connection,now to getAll().");
} catch (IOException ioe) {
logger.error("HBaseUtils getHbaseConn() IOException===>{" + ioe + "}<===");
} catch (RuntimeException e) {
logger.error("HBaseUtils getHbaseConn() Exception===>{" + e + "}<===");
}
}
/**
* 获取HBase内String类型的值
*
* @param result 结果集
* @param familyName 列族名称
* @param columnName 列名称
* @return 结果数据
*/
static String getString(Result result, String familyName, String columnName) {
byte[] familyBytes = Bytes.toBytes(familyName);
byte[] columnBytes = Bytes.toBytes(columnName);
boolean contains = result.containsColumn(familyBytes, columnBytes);
if (contains) {
String data = Bytes.toString(result.getValue(familyBytes, columnBytes)).trim();
if (StringUtil.isNotBlank(data)) {
return data;
}
}
return "";
}
/**
* 获取HBase内String类型的值
*
* @param result 结果集
* @param columnName 列名称
* @return 结果数据
*/
static Long getLong(Result result, String familyName, String columnName) {
byte[] familyBytes = Bytes.toBytes(familyName);
byte[] columnBytes = Bytes.toBytes(columnName);
boolean contains = result.containsColumn(familyBytes, columnBytes);
if (contains) {
return Bytes.toLong(result.getValue(familyBytes, columnBytes));
}
return 0L;
}
/**
* 获取HBase内String类型的值
*
* @param result 结果集
* @param columnName 列名称
* @return 结果数据
*/
static String getTeid(Result result, String columnName) {
byte[] familyBytes = Bytes.toBytes(FlowWriteConfig.GTPC_FAMILY_NAME);
byte[] columnBytes = Bytes.toBytes(columnName);
boolean contains = result.containsColumn(familyBytes, columnBytes);
if (contains) {
String data = String.valueOf(Bytes.toLong(result.getValue(familyBytes, columnBytes))).trim();
if (StringUtil.isNotBlank(data)) {
return data;
}
}
return "0";
}
/**
* 获取HBase内String类型的值
*
* @param result 结果集
* @return 结果数据
*/
static String getVsysId(Result result) {
byte[] familyBytes = Bytes.toBytes("common");
byte[] columnBytes = Bytes.toBytes("vsys_id");
boolean contains = result.containsColumn(familyBytes, columnBytes);
if (contains) {
String data = String.valueOf(Bytes.toInt(result.getValue(familyBytes, columnBytes))).trim();
if (StringUtil.isNotBlank(data)) {
return data;
}
}
return "1";
}
/**
* 更新变量
*/
private static void change() {
if (hBaseUtils == null) {
getInstance();
}
long nowTime = System.currentTimeMillis();
RadiusRelation.upgradeRadiusRelation(connection, radiusMap, time - 1000, nowTime + 500);
GtpCRelation.upgradeGtpCRelation(connection, gtpcMap, time - 1000, nowTime + 500);
time = nowTime;
}
/**
* 验证定时器,每隔一段时间验证一次-验证获取新的Cookie
*/
private void updateCache() {
ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1);
executorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
if (FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS != 0) {
change();
}
} catch (RuntimeException e) {
logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
}
}
}, 1, FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS, TimeUnit.SECONDS);
}
/**
* 获取Radius account
*
* @param clientIp client_ip
* @return account
*/
public static String getAccount(String clientIp) {
if (FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS != 0) {
if (hBaseUtils == null) {
getInstance();
}
return radiusMap.getOrDefault(clientIp, "");
}
return "";
}
/**
* 获取GTPC用户信息
*
* @param teid TEID
* @return account
*/
public static HashMap<String, Object> getGtpData(String teid) {
if (FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS != 0) {
if (hBaseUtils == null) {
getInstance();
}
return gtpcMap.get(teid);
}
return null;
}
}

View File

@@ -0,0 +1,130 @@
package com.zdjizhi.tools.connections.hbase;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.Map;
/**
* @author qidaijie
* @version 2022/7/1510:12
*/
class RadiusRelation {
private static final Log logger = LogFactory.get();
/**
* 获取全量的Radius数据
*/
static void getAllRadiusRelation(Connection connection, Map<String, String> radiusMap) {
long begin = System.currentTimeMillis();
ResultScanner scanner = null;
try {
Table table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_RADIUS_TABLE_NAME));
Scan scan = new Scan();
if (FlowWriteConfig.HBASE_RADIUS_SCAN_MAX_ROWS > 0) {
scan.setLimit(FlowWriteConfig.HBASE_RADIUS_SCAN_MAX_ROWS);
}
scanner = table.getScanner(scan);
for (Result result : scanner) {
int acctStatusType = RadiusRelation.getAcctStatusType(result);
String framedIp = HBaseUtils.getString(result, FlowWriteConfig.RADIUS_FAMILY_NAME, "framed_ip").trim();
String account = HBaseUtils.getString(result, FlowWriteConfig.RADIUS_FAMILY_NAME, "account").trim();
if (acctStatusType == 1) {
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = HBaseUtils.getVsysId(result).trim();
radiusMap.put(framedIp + vsysId, account);
} else {
radiusMap.put(framedIp, account);
}
}
}
logger.warn("The obtain the number of RADIUS relationships : " + radiusMap.size());
logger.warn("The time spent to obtain radius relationships : " + (System.currentTimeMillis() - begin) + "ms");
} catch (IOException | RuntimeException e) {
logger.error("The relationship between framedIP and account obtained from HBase is abnormal! message is :" + e);
} finally {
if (scanner != null) {
scanner.close();
}
}
}
/**
* 增量更新Radius关系
*
* @param connection HBase连接
* @param radiusMap radius关系缓存
* @param startTime 开始时间
* @param endTime 结束时间
*/
static void upgradeRadiusRelation(Connection connection, Map<String, String> radiusMap, Long startTime, Long endTime) {
Long begin = System.currentTimeMillis();
Table table = null;
ResultScanner scanner = null;
Scan scan = new Scan();
try {
table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_RADIUS_TABLE_NAME));
scan.setTimeRange(startTime, endTime);
if (FlowWriteConfig.HBASE_RADIUS_SCAN_MAX_ROWS > 0) {
scan.setLimit(FlowWriteConfig.HBASE_RADIUS_SCAN_MAX_ROWS);
}
scanner = table.getScanner(scan);
for (Result result : scanner) {
int acctStatusType = RadiusRelation.getAcctStatusType(result);
String framedIp = HBaseUtils.getString(result, FlowWriteConfig.RADIUS_FAMILY_NAME, "framed_ip").trim();
String account = HBaseUtils.getString(result, FlowWriteConfig.RADIUS_FAMILY_NAME, "account").trim();
if (acctStatusType == 1) {
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = HBaseUtils.getVsysId(result).trim();
radiusMap.put(framedIp + vsysId, account);
} else {
radiusMap.put(framedIp, account);
}
} else if (acctStatusType == 2) {
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = HBaseUtils.getVsysId(result).trim();
radiusMap.remove(framedIp + vsysId);
} else {
radiusMap.remove(framedIp);
}
}
}
Long end = System.currentTimeMillis();
logger.warn("The current number of Radius relationships is: " + radiusMap.keySet().size());
logger.warn("The time used to update the Radius relationship is: " + (end - begin) + "ms");
} catch (IOException | RuntimeException e) {
logger.error("Radius relationship update exception, the content is:" + e);
} finally {
if (scanner != null) {
scanner.close();
}
if (table != null) {
try {
table.close();
} catch (IOException e) {
logger.error("HBase Table Close ERROR! Exception message is:" + e);
}
}
}
}
/**
* 获取当前用户上下线状态信息
*
* @param result HBase内获取的数据
* @return 状态 1-上线 2-下线
*/
private static int getAcctStatusType(Result result) {
boolean hasType = result.containsColumn(Bytes.toBytes(FlowWriteConfig.RADIUS_FAMILY_NAME), Bytes.toBytes("acct_status_type"));
if (hasType) {
return Bytes.toInt(result.getValue(Bytes.toBytes(FlowWriteConfig.RADIUS_FAMILY_NAME), Bytes.toBytes("acct_status_type")));
} else {
return 1;
}
}
}

View File

@@ -0,0 +1,329 @@
package com.zdjizhi.tools.connections.http;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.tools.exception.FlowWriteException;
import com.geedgenetworks.utils.StringUtil;
import org.apache.commons.io.IOUtils;
import org.apache.http.*;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpRequestRetryHandler;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.*;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.config.Registry;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.conn.ConnectionKeepAliveStrategy;
import org.apache.http.conn.socket.ConnectionSocketFactory;
import org.apache.http.conn.socket.PlainConnectionSocketFactory;
import org.apache.http.conn.ssl.NoopHostnameVerifier;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicHeaderElementIterator;
import org.apache.http.protocol.HTTP;
import org.apache.http.util.EntityUtils;
import javax.net.ssl.*;
import java.io.IOException;
import java.io.InputStream;
import java.io.InterruptedIOException;
import java.net.*;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.X509Certificate;
import java.util.Map;
public class HttpClientService {
private static final Log log = LogFactory.get();
public static final String ERROR_MESSAGE = "-1";
/**
* 在调用SSL之前需要重写验证方法取消检测SSL
* 创建ConnectionManager添加Connection配置信息
*
* @return HttpClient 支持https
*/
private PoolingHttpClientConnectionManager getSslClientManager() {
try {
// 在调用SSL之前需要重写验证方法取消检测SSL
X509TrustManager trustManager = new X509TrustManager() {
@Override
public X509Certificate[] getAcceptedIssuers() {
return null;
}
@Override
public void checkClientTrusted(X509Certificate[] xcs, String str) {
}
@Override
public void checkServerTrusted(X509Certificate[] xcs, String str) {
}
};
SSLContext ctx = SSLContext.getInstance(SSLConnectionSocketFactory.TLS);
ctx.init(null, new TrustManager[]{trustManager}, null);
SSLConnectionSocketFactory socketFactory = new SSLConnectionSocketFactory(ctx, NoopHostnameVerifier.INSTANCE);
Registry<ConnectionSocketFactory> socketFactoryRegistry = RegistryBuilder.<ConnectionSocketFactory>create()
.register("http", PlainConnectionSocketFactory.INSTANCE)
.register("https", socketFactory).build();
// 创建ConnectionManager添加Connection配置信息
PoolingHttpClientConnectionManager connManager = new PoolingHttpClientConnectionManager(socketFactoryRegistry);
// 设置最大连接数
connManager.setMaxTotal(FlowWriteConfig.HTTP_POOL_MAX_CONNECTION);
// 设置每个连接的路由数
connManager.setDefaultMaxPerRoute(FlowWriteConfig.HTTP_POOL_MAX_PER_ROUTE);
return connManager;
} catch (KeyManagementException | NoSuchAlgorithmException e) {
throw new FlowWriteException(e.getMessage());
}
}
/**
* 获取Http客户端连接对象
*
* @param socketTimeOut 响应超时时间
* @return Http客户端连接对象
*/
private CloseableHttpClient getHttpClient(int socketTimeOut) {
// 创建Http请求配置参数
RequestConfig requestConfig = RequestConfig.custom()
// 获取连接超时时间
.setConnectionRequestTimeout(FlowWriteConfig.HTTP_POOL_REQUEST_TIMEOUT)
// 请求超时时间
.setConnectTimeout(FlowWriteConfig.HTTP_POOL_CONNECT_TIMEOUT)
// 响应超时时间
.setSocketTimeout(socketTimeOut)
.build();
/**
* 测出超时重试机制为了防止超时不生效而设置
* 如果直接放回false,不重试
* 这里会根据情况进行判断是否重试
*/
HttpRequestRetryHandler retry = (exception, executionCount, context) -> {
if (executionCount >= 3) {// 如果已经重试了3次就放弃
return false;
}
if (exception instanceof NoHttpResponseException) {// 如果服务器丢掉了连接,那么就重试
return true;
}
if (exception instanceof SSLHandshakeException) {// 不要重试SSL握手异常
return false;
}
if (exception instanceof SocketTimeoutException) {
if (exception.getMessage().contains("Read timed out")) {
return false;
}
}
if (exception instanceof UnknownHostException) {// 目标服务器不可达
return false;
}
if (exception instanceof ConnectTimeoutException) {// 连接被拒绝
return false;
}
if (exception instanceof SSLException) {// ssl握手异常
return false;
}
if (exception instanceof InterruptedIOException) {// 超时
return true;
}
HttpClientContext clientContext = HttpClientContext.adapt(context);
HttpRequest request = clientContext.getRequest();
// 如果请求是幂等的,就再次尝试
if (!(request instanceof HttpEntityEnclosingRequest)) {
return true;
}
return false;
};
ConnectionKeepAliveStrategy myStrategy = (response, context) -> {
HeaderElementIterator it = new BasicHeaderElementIterator
(response.headerIterator(HTTP.CONN_KEEP_ALIVE));
while (it.hasNext()) {
HeaderElement he = it.nextElement();
String param = he.getName();
String value = he.getValue();
if (value != null && param.equalsIgnoreCase("timeout")) {
return Long.parseLong(value) * 1000;
}
}
return 60 * 1000;//如果没有约定则默认定义时长为60s
};
// 创建httpClient
return HttpClients.custom()
// 把请求相关的超时信息设置到连接客户端
.setDefaultRequestConfig(requestConfig)
// 把请求重试设置到连接客户端
.setRetryHandler(retry)
.setKeepAliveStrategy(myStrategy)
// 配置连接池管理对象
.setConnectionManager(getSslClientManager())
.build();
}
public InputStream httpGetInputStream(String url, int socketTimeout, Header... headers) {
InputStream result = null;
// 获取客户端连接对象
CloseableHttpClient httpClient = getHttpClient(socketTimeout);
// 创建GET请求对象
HttpGet httpGet = new HttpGet(url);
if (StringUtil.isNotEmpty(headers)) {
for (Header h : headers) {
httpGet.addHeader(h);
}
}
CloseableHttpResponse response = null;
try {
// 执行请求
response = httpClient.execute(httpGet);
// 获取响应实体
result = IOUtils.toBufferedInputStream(response.getEntity().getContent());
// 获取响应信息
EntityUtils.consume(response.getEntity());
} catch (ClientProtocolException e) {
log.error("current file: {},Protocol error:{}", url, e.getMessage());
} catch (ParseException e) {
log.error("current file: {}, Parser error:{}", url, e.getMessage());
} catch (IOException e) {
log.error("current file: {},IO error:{}", url, e.getMessage());
} finally {
if (null != response) {
try {
EntityUtils.consume(response.getEntity());
response.close();
} catch (IOException e) {
log.error("Release Connection error:{}", e.getMessage());
}
}
return result;
}
}
public byte[] httpGetByte(String url, int socketTimeout, Header... headers) {
byte[] result = null;
// 获取客户端连接对象
CloseableHttpClient httpClient = getHttpClient(socketTimeout);
// 创建GET请求对象
HttpGet httpGet = new HttpGet(url);
if (StringUtil.isNotEmpty(headers)) {
for (Header h : headers) {
httpGet.addHeader(h);
}
}
CloseableHttpResponse response = null;
try {
// 执行请求
response = httpClient.execute(httpGet);
// 获取响应实体
result = IOUtils.toByteArray(response.getEntity().getContent());
// 获取响应信息
EntityUtils.consume(response.getEntity());
} catch (ClientProtocolException e) {
log.error("current file: {},Protocol error:{}", url, e.getMessage());
} catch (ParseException e) {
log.error("current file: {}, Parser error:{}", url, e.getMessage());
} catch (IOException e) {
log.error("current file: {},IO error:{}", url, e.getMessage());
} finally {
if (null != response) {
try {
EntityUtils.consume(response.getEntity());
response.close();
} catch (IOException e) {
log.error("Release Connection error:{}", e.getMessage());
}
}
return result;
}
}
/**
* GET请求
*
* @param uri 请求地
* @return message
*/
public String httpGet(URI uri, int socketTimeout, Header... headers) {
String msg = ERROR_MESSAGE;
// 获取客户端连接对象
CloseableHttpClient httpClient = getHttpClient(socketTimeout);
CloseableHttpResponse response = null;
try {
log.info("http get uri {}", uri);
// 创建GET请求对象
HttpGet httpGet = new HttpGet(uri);
if (StringUtil.isNotEmpty(headers)) {
for (Header h : headers) {
httpGet.addHeader(h);
log.info("request header : {}", h);
}
}
// 执行请求
response = httpClient.execute(httpGet);
int statusCode = response.getStatusLine().getStatusCode();
// 获取响应实体
HttpEntity entity = response.getEntity();
// 获取响应信息
msg = EntityUtils.toString(entity, "UTF-8");
if (statusCode != HttpStatus.SC_OK) {
log.error("Http get content is :{}", msg);
}
} catch (ClientProtocolException e) {
log.error("协议错误: {}", e.getMessage());
} catch (ParseException e) {
log.error("解析错误: {}", e.getMessage());
} catch (IOException e) {
log.error("IO错误: {}", e.getMessage());
} finally {
if (null != response) {
try {
EntityUtils.consume(response.getEntity());
response.close();
} catch (IOException e) {
log.error("释放链接错误: {}", e.getMessage());
}
}
}
return msg;
}
public void setUrlWithParams(URIBuilder uriBuilder, String path, Map<String, Object> params) {
try {
uriBuilder.setPath(path);
if (params != null && !params.isEmpty()) {
for (Map.Entry<String, Object> kv : params.entrySet()) {
uriBuilder.setParameter(kv.getKey(), kv.getValue().toString());
}
}
} catch (Exception e) {
log.error("拼接url出错,uri : {}, path : {},参数: {}", uriBuilder.toString(), path, params);
}
}
}

View File

@@ -0,0 +1,46 @@
package com.zdjizhi.tools.connections.kafka;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.kafka.common.config.SslConfigs;
import java.util.Properties;
/**
* @author qidaijie
* @version 2021/9/610:37
*/
class CertUtils {
/**
* Kafka SASL认证端口
*/
private static final String SASL_PORT = "9094";
/**
* Kafka SSL认证端口
*/
private static final String SSL_PORT = "9095";
/**
* 根据连接信息端口判断认证方式。
*
* @param servers kafka 连接信息
* @param properties kafka 连接配置信息
*/
static void chooseCert(String servers, Properties properties) {
if (servers.contains(SASL_PORT)) {
properties.put("security.protocol", "SASL_PLAINTEXT");
properties.put("sasl.mechanism", "PLAIN");
properties.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username="
+ FlowWriteConfig.KAFKA_SASL_JAAS_USER + " password=" + FlowWriteConfig.KAFKA_SASL_JAAS_PIN + ";");
} else if (servers.contains(SSL_PORT)) {
properties.put("security.protocol", "SSL");
properties.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
properties.put("ssl.keystore.location", FlowWriteConfig.TOOLS_LIBRARY + "keystore.jks");
properties.put("ssl.keystore.password", FlowWriteConfig.KAFKA_SASL_JAAS_PIN);
properties.put("ssl.truststore.location", FlowWriteConfig.TOOLS_LIBRARY + "truststore.jks");
properties.put("ssl.truststore.password", FlowWriteConfig.KAFKA_SASL_JAAS_PIN);
properties.put("ssl.key.password", FlowWriteConfig.KAFKA_SASL_JAAS_PIN);
}
}
}

View File

@@ -0,0 +1,45 @@
package com.zdjizhi.tools.connections.kafka;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import java.util.Properties;
/**
* @author qidaijie
* @version 2021/6/813:54
*/
public class KafkaConsumer {
private static Properties createConsumerConfig() {
Properties properties = new Properties();
properties.put("bootstrap.servers", FlowWriteConfig.SOURCE_KAFKA_SERVERS);
properties.put("group.id", FlowWriteConfig.GROUP_ID);
properties.put("session.timeout.ms", FlowWriteConfig.SESSION_TIMEOUT_MS);
properties.put("max.poll.records", FlowWriteConfig.MAX_POLL_RECORDS);
properties.put("max.partition.fetch.bytes", FlowWriteConfig.MAX_PARTITION_FETCH_BYTES);
properties.put("partition.discovery.interval.ms", "10000");
CertUtils.chooseCert(FlowWriteConfig.SOURCE_KAFKA_SERVERS, properties);
return properties;
}
/**
* 官方序列化kafka数据
*
* @return kafka logs
*/
public static FlinkKafkaConsumer<String> flinkConsumer() {
FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(FlowWriteConfig.SOURCE_KAFKA_TOPIC,
new SimpleStringSchema(), createConsumerConfig());
//随着checkpoint提交将offset提交到kafka
kafkaConsumer.setCommitOffsetsOnCheckpoints(true);
//从消费组当前的offset开始消费
kafkaConsumer.setStartFromGroupOffsets();
return kafkaConsumer;
}
}

View File

@@ -0,0 +1,58 @@
package com.zdjizhi.tools.connections.kafka;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import java.util.Optional;
import java.util.Properties;
/**
* @author qidaijie
* @version 2021/6/814:04
*/
public class KafkaProducer {
private static Properties createProducerConfig(String kafkaServers) {
Properties properties = new Properties();
properties.put("bootstrap.servers", kafkaServers);
properties.put("acks", FlowWriteConfig.PRODUCER_ACK);
properties.put("retries", FlowWriteConfig.RETRIES);
properties.put("linger.ms", FlowWriteConfig.LINGER_MS);
properties.put("request.timeout.ms", FlowWriteConfig.REQUEST_TIMEOUT_MS);
properties.put("batch.size", FlowWriteConfig.BATCH_SIZE);
properties.put("buffer.memory", FlowWriteConfig.BUFFER_MEMORY);
properties.put("max.request.size", FlowWriteConfig.MAX_REQUEST_SIZE);
properties.put("compression.type", FlowWriteConfig.PRODUCER_KAFKA_COMPRESSION_TYPE);
CertUtils.chooseCert(kafkaServers, properties);
return properties;
}
public static FlinkKafkaProducer<String> getPercentKafkaProducer() {
FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<String>(
FlowWriteConfig.SINK_PERCENT_KAFKA_TOPIC,
new SimpleStringSchema(),
createProducerConfig(FlowWriteConfig.PERCENT_SINK_KAFKA_SERVERS), Optional.empty());
//允许producer记录失败日志而不是捕获和抛出它们
kafkaProducer.setLogFailuresOnly(true);
return kafkaProducer;
}
public static FlinkKafkaProducer<String> getTrafficFileMetaKafkaProducer() {
FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<String>(
FlowWriteConfig.SINK_FILE_DATA_SINK_KAFKA_TOPIC,
new SimpleStringSchema(),
createProducerConfig(FlowWriteConfig.SINK_FILE_DATA_KAFKA_SERVERS), Optional.empty());
//允许producer记录失败日志而不是捕获和抛出它们
kafkaProducer.setLogFailuresOnly(true);
return kafkaProducer;
}
}

View File

@@ -0,0 +1,53 @@
package com.zdjizhi.tools.connections.nacos;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.nacos.api.NacosFactory;
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.api.exception.NacosException;
import com.zdjizhi.common.FlowWriteConfig;
import java.util.Properties;
/**
* @author qidaijie
* @Package com.zdjizhi.tools.connections.nacos
* @Description:
* @date 2023/7/2714:49
*/
public class NacosConnection {
private static final Log logger = LogFactory.get();
private ConfigService configService;
public ConfigService getSchemaService() {
Properties properties = new Properties();
properties.setProperty(PropertyKeyConst.SERVER_ADDR, FlowWriteConfig.NACOS_SERVER);
properties.setProperty(PropertyKeyConst.NAMESPACE, FlowWriteConfig.NACOS_SCHEMA_NAMESPACE);
properties.setProperty(PropertyKeyConst.USERNAME, FlowWriteConfig.NACOS_USERNAME);
properties.setProperty(PropertyKeyConst.PASSWORD, FlowWriteConfig.NACOS_PIN);
try {
configService = NacosFactory.createConfigService(properties);
} catch (NacosException e) {
logger.error("NacosException:{}", e);
}
return configService;
}
public ConfigService getPublicService() {
Properties properties = new Properties();
properties.setProperty(PropertyKeyConst.SERVER_ADDR, FlowWriteConfig.NACOS_SERVER);
properties.setProperty(PropertyKeyConst.NAMESPACE, FlowWriteConfig.NACOS_PUBLIC_NAMESPACE);
properties.setProperty(PropertyKeyConst.USERNAME, FlowWriteConfig.NACOS_USERNAME);
properties.setProperty(PropertyKeyConst.PASSWORD, FlowWriteConfig.NACOS_PIN);
try {
configService = NacosFactory.createConfigService(properties);
} catch (NacosException e) {
logger.error("NacosException:{}", e);
}
return configService;
}
}

View File

@@ -0,0 +1,190 @@
package com.zdjizhi.tools.connections.zookeeper;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.Stat;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
/**
* @author qidaijie
*/
public class DistributedLock implements Lock, Watcher {
private static final Log logger = LogFactory.get();
private ZooKeeper zk = null;
/**
* 根节点
*/
private final String ROOT_LOCK = "/locks";
/**
* 竞争的资源
*/
private String lockName;
/**
* 等待的前一个锁
*/
private String waitLock;
/**
* 当前锁
*/
private String currentLock;
/**
* 计数器
*/
private CountDownLatch countDownLatch;
private int sessionTimeout = 2000;
private List<Exception> exceptionList = new ArrayList<Exception>();
/**
* 配置分布式锁
*
* @param config 连接的url
* @param lockName 竞争资源
*/
public DistributedLock(String config, String lockName) {
this.lockName = lockName;
try {
// 连接zookeeper
zk = new ZooKeeper(config, sessionTimeout, this);
Stat stat = zk.exists(ROOT_LOCK, false);
if (stat == null) {
// 如果根节点不存在,则创建根节点
zk.create(ROOT_LOCK, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
}
} catch (IOException | InterruptedException | KeeperException e) {
logger.error("Node already exists!");
}
}
// 节点监视器
@Override
public void process(WatchedEvent event) {
if (this.countDownLatch != null) {
this.countDownLatch.countDown();
}
}
@Override
public void lock() {
if (exceptionList.size() > 0) {
throw new LockException(exceptionList.get(0));
}
try {
if (this.tryLock()) {
logger.info(Thread.currentThread().getName() + " " + lockName + "获得了锁");
} else {
// 等待锁
waitForLock(waitLock, sessionTimeout);
}
} catch (InterruptedException | KeeperException e) {
logger.error("获取锁异常" + e);
}
}
@Override
public boolean tryLock() {
try {
String splitStr = "_lock_";
if (lockName.contains(splitStr)) {
throw new LockException("锁名有误");
}
// 创建临时有序节点
currentLock = zk.create(ROOT_LOCK + "/" + lockName + splitStr, new byte[0],
ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL);
// 取所有子节点
List<String> subNodes = zk.getChildren(ROOT_LOCK, false);
// 取出所有lockName的锁
List<String> lockObjects = new ArrayList<String>();
for (String node : subNodes) {
String tmpNode = node.split(splitStr)[0];
if (tmpNode.equals(lockName)) {
lockObjects.add(node);
}
}
Collections.sort(lockObjects);
// 若当前节点为最小节点,则获取锁成功
if (currentLock.equals(ROOT_LOCK + "/" + lockObjects.get(0))) {
return true;
}
// 若不是最小节点,则找到自己的前一个节点
String prevNode = currentLock.substring(currentLock.lastIndexOf("/") + 1);
waitLock = lockObjects.get(Collections.binarySearch(lockObjects, prevNode) - 1);
} catch (InterruptedException | KeeperException e) {
logger.error("获取锁过程异常" + e);
}
return false;
}
@Override
public boolean tryLock(long timeout, TimeUnit unit) {
try {
if (this.tryLock()) {
return true;
}
return waitForLock(waitLock, timeout);
} catch (KeeperException | InterruptedException | RuntimeException e) {
logger.error("判断是否锁定异常" + e);
}
return false;
}
// 等待锁
private boolean waitForLock(String prev, long waitTime) throws KeeperException, InterruptedException {
Stat stat = zk.exists(ROOT_LOCK + "/" + prev, true);
if (stat != null) {
this.countDownLatch = new CountDownLatch(1);
// 计数等待若等到前一个节点消失则precess中进行countDown停止等待获取锁
this.countDownLatch.await(waitTime, TimeUnit.MILLISECONDS);
this.countDownLatch = null;
}
return true;
}
@Override
public void unlock() {
try {
zk.delete(currentLock, -1);
currentLock = null;
zk.close();
} catch (InterruptedException | KeeperException e) {
logger.error("关闭锁异常" + e);
}
}
@Override
public Condition newCondition() {
return null;
}
@Override
public void lockInterruptibly() throws InterruptedException {
this.lock();
}
public class LockException extends RuntimeException {
private static final long serialVersionUID = 1L;
public LockException(String e) {
super(e);
}
public LockException(Exception e) {
super(e);
}
}
}

View File

@@ -0,0 +1,138 @@
package com.zdjizhi.tools.connections.zookeeper;
import cn.hutool.core.util.StrUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Stat;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.CountDownLatch;
/**
* @author qidaijie
* @version 2020/11/1411:28
*/
public class ZookeeperUtils implements Watcher {
private static final Log logger = LogFactory.get();
private static final int ID_MAX = 255;
private ZooKeeper zookeeper;
private static final int SESSION_TIME_OUT = 20000;
private CountDownLatch countDownLatch = new CountDownLatch(1);
@Override
public void process(WatchedEvent event) {
if (event.getState() == Event.KeeperState.SyncConnected) {
countDownLatch.countDown();
}
}
/**
* 修改节点信息
*
* @param path 节点路径
*/
public int modifyNode(String path, String zookeeperIp) {
createNode(path, "0".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, zookeeperIp);
int workerId = 0;
try {
connectZookeeper(zookeeperIp);
Stat stat = zookeeper.exists(path, true);
workerId = Integer.parseInt(getNodeDate(path));
if (workerId > ID_MAX) {
workerId = 0;
zookeeper.setData(path, "1".getBytes(), stat.getVersion());
} else {
String result = String.valueOf(workerId + 1);
if (stat != null) {
zookeeper.setData(path, result.getBytes(), stat.getVersion());
} else {
logger.error("Node does not exist!,Can't modify");
}
}
} catch (KeeperException | InterruptedException e) {
logger.error("modify error Can't modify," + e);
} finally {
closeConn();
}
logger.warn("workerID is" + workerId);
return workerId;
}
/**
* 连接zookeeper
*
* @param host 地址
*/
public void connectZookeeper(String host) {
try {
zookeeper = new ZooKeeper(host, SESSION_TIME_OUT, this);
countDownLatch.await();
} catch (IOException | InterruptedException e) {
logger.error("Connection to the Zookeeper Exception! message:" + e);
}
}
/**
* 关闭连接
*/
public void closeConn() {
try {
if (zookeeper != null) {
zookeeper.close();
}
} catch (InterruptedException e) {
logger.error("Close the Zookeeper connection Exception! message:" + e);
}
}
/**
* 获取节点内容
*
* @param path 节点路径
* @return 内容/异常null
*/
public String getNodeDate(String path) {
String result = null;
Stat stat = new Stat();
try {
byte[] resByte = zookeeper.getData(path, true, stat);
result = StrUtil.str(resByte, "UTF-8");
} catch (KeeperException | InterruptedException e) {
logger.error("Get node information exception" + e);
}
return result;
}
/**
* @param path 节点创建的路径
* @param date 节点所存储的数据的byte[]
* @param acls 控制权限策略
*/
public void createNode(String path, byte[] date, List<ACL> acls, String zookeeperIp) {
try {
connectZookeeper(zookeeperIp);
Stat exists = zookeeper.exists(path, true);
if (exists == null) {
Stat existsSnowflakeld = zookeeper.exists("/Snowflake", true);
if (existsSnowflakeld == null) {
zookeeper.create("/Snowflake", null, acls, CreateMode.PERSISTENT);
}
zookeeper.create(path, date, acls, CreateMode.PERSISTENT);
} else {
logger.warn("Node already exists ! Don't need to create");
}
} catch (KeeperException | InterruptedException e) {
logger.error(e);
} finally {
closeConn();
}
}
}

View File

@@ -0,0 +1,16 @@
package com.zdjizhi.tools.exception;
/**
* @author qidaijie
* @version 2021/3/2 59:42
*/
public class FlowWriteException extends RuntimeException {
public FlowWriteException() {
}
public FlowWriteException(String message) {
super(message);
}
}

View File

@@ -0,0 +1,41 @@
package com.zdjizhi.tools.general;
import java.util.Properties;
/**
* @author qidaijie
* @Package com.zdjizhi.common
* @Description:
* @date 2023/7/2714:22
*/
public class ConfigurationsUtils {
public static String getStringProperty(Properties properties, String key) {
if (!properties.isEmpty() && properties.containsKey(key)) {
return properties.getProperty(key).trim();
}
return "";
}
public static Integer getIntProperty(Properties properties, String key) {
if (!properties.isEmpty() && properties.containsKey(key)) {
return Integer.parseInt(properties.getProperty(key).trim());
}
return 0;
}
public static Long getLongProperty(Properties properties, String key) {
if (!properties.isEmpty() && properties.containsKey(key)) {
return Long.parseLong(properties.getProperty(key).trim());
}
return 0L;
}
public static Boolean getBooleanProperty(Properties properties, String key) {
if (!properties.isEmpty() && properties.containsKey(key)) {
return Boolean.parseBoolean(properties.getProperty(key).trim());
}
return false;
}
}

View File

@@ -0,0 +1,46 @@
package com.zdjizhi.tools.general;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.tools.ordinary.MD5Utils;
import static com.zdjizhi.common.FlowWriteConfig.judgeFileType;
/**
* 文件字段操作工具
*/
public class FileEdit {
public static String getFileUploadUrl(long cfgId, String sIp, int sPort, String dIp, int dPort, long foundTime, String account, String domain, String schemaType, String fileId) {
String fileType = null;
if (schemaType.equals("HTTP")) {
fileType = "html";
}
if (schemaType.equals("MAIL")) {
fileType = "eml";
}
return "http://" + FlowWriteConfig.OOS_SERVERS + "/v3/upload?cfg_id=" + cfgId + "&file_id=" + fileId + "&file_type=" + fileType + "&found_time=" + foundTime + "&s_ip=" + sIp + "&s_port=" + sPort + "&d_ip=" + dIp + "&d_port=" + dPort + "&domain=" + domain + "&account=" + account;
}
public static String getFileDownloadUrl(String fileId) {
return "http://" + FlowWriteConfig.OOS_SERVERS + "/v3/download?file_id=" + fileId;
}
public static String getFileType(String url) {
String[] split = url.split("\\.");
return split[split.length - 1];
}
public static String getFileId(String filename, String fileSuffix) throws Exception {
// String[] arr = url.split("/");
// String filename = arr[arr.length-1].substring(0,arr[arr.length-1].lastIndexOf("_"));
// String prefix = MD5Utils.md5Encode(filename);
// String suffix = arr[arr.length-1].substring(arr[arr.length-1].lastIndexOf("_"),arr[arr.length-1].lastIndexOf("."));
return filename + fileSuffix;
}
}

View File

@@ -0,0 +1,207 @@
package com.zdjizhi.tools.general;
import cn.hutool.crypto.digest.DigestUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.*;
import com.geedgenetworks.utils.IpLookupV2;
import com.geedgenetworks.utils.StringUtil;
import com.google.common.base.Joiner;
import com.zdjizhi.common.CommonConfig;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.common.pojo.KnowlegeBaseMeta;
import com.zdjizhi.tools.connections.http.HttpClientService;
import org.apache.http.client.utils.URIBuilder;
import java.io.ByteArrayInputStream;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
/**
* @author wangchengcheng
* @version 2023/11/10 15:23
*/
public class IpLookupUtils {
private static final Log logger = LogFactory.get();
private static final String ipBuiltInName = "ip_builtin.mmdb";
private static final String ipUserDefinedName = "ip_user_defined.mmdb";
private static final String asnName = "asn_builtin.mmdb";
/**
* ip定位库
*/
private static IpLookupV2 ipLookup;
/**
* 定位库默认分隔符
*/
private static final String LOCATION_SEPARATOR = ".";
/**
* 最大重试次数
*/
private static final int TRY_TIMES = 5;
/**
* http connections
*/
private static final HttpClientService httpClientService;
/**
* 定位库元数据缓存
*/
private static final HashMap<String, KnowlegeBaseMeta> knowledgeMetaCache = new HashMap<>(16);
private static String currentSha256IpUserDefined = "";
private static String currentSha256IpBuiltin = "";
private static String currentSha256AsnBuiltin = "";
static {
httpClientService = new HttpClientService();
try {
stuffKnowledgeMetaCache();
Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
stuffKnowledgeMetaCache();
}
}, 0, FlowWriteConfig.KNOWLEDGE_EXECUTION_MINUTES * 1000 * 60);
} catch (Exception e) {
logger.error("知识库加载失败,失败原因为:" + e);
}
}
private static void stuffKnowledgeMetaCache() {
final KnowlegeBaseMeta ipBuiltinknowlegeBaseMeta = getKnowlegeBaseMeta(FlowWriteConfig.IP_BUILTIN_KD_ID);
if (!currentSha256IpBuiltin.equals(ipBuiltinknowlegeBaseMeta.getSha256())) {
String fileName = Joiner.on(LOCATION_SEPARATOR).useForNull("").join(ipBuiltinknowlegeBaseMeta.getName(), ipBuiltinknowlegeBaseMeta.getFormat());
knowledgeMetaCache.put(fileName, ipBuiltinknowlegeBaseMeta);
}
final KnowlegeBaseMeta ipUserDefinedknowlegeBaseMeta = getKnowlegeBaseMeta(FlowWriteConfig.IP_USER_DEFINED_KD_ID);
if (!currentSha256IpUserDefined.equals(ipUserDefinedknowlegeBaseMeta.getSha256())) {
String fileName = Joiner.on(LOCATION_SEPARATOR).useForNull("").join(ipUserDefinedknowlegeBaseMeta.getName(), ipUserDefinedknowlegeBaseMeta.getFormat());
knowledgeMetaCache.put(fileName, ipUserDefinedknowlegeBaseMeta);
}
final KnowlegeBaseMeta asnBuiltinknowlegeBaseMeta = getKnowlegeBaseMeta(FlowWriteConfig.ASN_BUILTIN_KD_ID);
if (!currentSha256AsnBuiltin.equals(asnBuiltinknowlegeBaseMeta.getSha256())) {
String fileName = Joiner.on(LOCATION_SEPARATOR).useForNull("").join(asnBuiltinknowlegeBaseMeta.getName(), asnBuiltinknowlegeBaseMeta.getFormat());
knowledgeMetaCache.put(fileName, asnBuiltinknowlegeBaseMeta);
}
if (!currentSha256IpUserDefined.equals(ipUserDefinedknowlegeBaseMeta.getSha256()) || !currentSha256IpBuiltin.equals(ipBuiltinknowlegeBaseMeta.getSha256()) || !currentSha256AsnBuiltin.equals(asnBuiltinknowlegeBaseMeta.getSha256())) {
currentSha256IpBuiltin = ipBuiltinknowlegeBaseMeta.getSha256();
currentSha256IpUserDefined = ipUserDefinedknowlegeBaseMeta.getSha256();
currentSha256AsnBuiltin = asnBuiltinknowlegeBaseMeta.getSha256();
reloadIpLookup();
logger.info("知识库加载成功.");
}
}
/**
* 从HDFS下载文件更新IpLookup
*/
private static void reloadIpLookup() {
IpLookupV2.Builder builder = new IpLookupV2.Builder(false);
for (String fileName : knowledgeMetaCache.keySet()) {
int retryNum = 0;
KnowlegeBaseMeta knowlegeBaseMeta = knowledgeMetaCache.get(fileName);
String metaSha256 = knowlegeBaseMeta.getSha256();
while (retryNum < TRY_TIMES) {
System.out.println("download file " + fileName + ",HOS path :" + knowlegeBaseMeta.getPath());
Long startTime = System.currentTimeMillis();
byte[] httpGetByte = httpClientService.httpGetByte(knowlegeBaseMeta.getPath(), FlowWriteConfig.HTTP_SOCKET_TIMEOUT);
if (httpGetByte != null && httpGetByte.length > 0) {
String downloadFileSha256 = DigestUtil.sha256Hex(httpGetByte);
if (metaSha256.equals(downloadFileSha256)) {
ByteArrayInputStream inputStream = new ByteArrayInputStream(httpGetByte);
switch (fileName) {
case ipBuiltInName:
builder.loadDataFile(inputStream);
break;
case ipUserDefinedName:
builder.loadDataFilePrivate(inputStream);
break;
case asnName:
builder.loadAsnDataFile(inputStream);
break;
default:
}
System.out.println("update " + fileName + " finished, speed :" + (System.currentTimeMillis() - startTime) + "ms");
retryNum = TRY_TIMES;
} else {
logger.error("通过HOS下载{}的sha256为:{} ,网关内记录为:{} ,sha256不相等 开始第{}次重试下载文件", fileName, downloadFileSha256, metaSha256, retryNum);
retryNum++;
}
} else {
logger.error("通过HOS下载{}的流为空 ,开始第{}次重试下载文件", fileName, retryNum);
retryNum++;
}
}
}
ipLookup = builder.build();
}
public static IpLookupV2 getIpLookup() {
return ipLookup;
}
/**
* 根据配置组合生成知识库元数据过滤参数
*
* @return 过滤参数
*/
private static String getFilterParameter() {
String expr = "[?(@.version=='latest')][?(@.name in ('ip_builtin','ip_user_defined','asn_builtin'))]";
return expr;
}
public static String getCountryLookup(String ip) {
return ipLookup.countryLookup(ip);
}
private static KnowlegeBaseMeta getKnowlegeBaseMeta(String kd_id) {
KnowlegeBaseMeta knowlegeBaseMeta = null;
String knowledgeInfo = null;
try {
URIBuilder uriBuilder = new URIBuilder(FlowWriteConfig.KNOWLEDGE_BASE_URL);
HashMap<String, Object> parms = new HashMap<>();
parms.put("kb_id", kd_id);
httpClientService.setUrlWithParams(uriBuilder, FlowWriteConfig.KNOWLEDGE_BASE_PATH, parms);
knowledgeInfo = httpClientService.httpGet(uriBuilder.build(), FlowWriteConfig.HTTP_SOCKET_TIMEOUT);
if (knowledgeInfo.contains("200")) {
final Map<String, Object> jsonObject = JSONObject.parseObject(knowledgeInfo, Map.class);
JSONPath jsonPath = JSONPath.of(getFilterParameter());
String extract = jsonPath.extract(JSONReader.of(jsonObject.get("data").toString())).toString();
if (StringUtil.isNotBlank(extract)) {
JSONArray jsonArray = JSON.parseArray(extract);
if (jsonArray.size() > 0) {
for (int i = 0; i < jsonArray.size(); i++) {
knowlegeBaseMeta = JSONObject.parseObject(jsonArray.getString(i), KnowlegeBaseMeta.class);
}
}
}
} else {
logger.error("获取knowledge_base失败,请求回执为" + knowledgeInfo);
}
} catch (URISyntaxException e) {
logger.error("构造URI异常", e);
} catch (Exception e) {
logger.error("获取knowledge_base失败", e);
}
return knowlegeBaseMeta;
}
}

View File

@@ -0,0 +1,213 @@
package com.zdjizhi.tools.general;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.tools.connections.zookeeper.DistributedLock;
import com.zdjizhi.tools.connections.zookeeper.ZookeeperUtils;
/**
* 雪花算法
*
* @author qidaijie
*/
public class SnowflakeId {
private static final Log logger = LogFactory.get();
/**
* 共64位 第一位为符号位 默认0
* 时间戳 39位(17 year), centerId:(关联每个环境或任务数) :6位(0-63),
* workerId(关联进程):7(0-127) ,序列号11位(2047/ms)
*
* 序列号 /ms = (-1L ^ (-1L << 11))
* 最大使用年 = (1L << 39) / (1000L * 60 * 60 * 24 * 365)
*/
/**
* 开始时间截 (2020-11-14 00:00:00) max 17years
*/
private final long twepoch = 1693274481297L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 8L;
/**
* 数据标识id所占的位数
*/
private final long dataCenterIdBits = 5L;
/**
* 支持的最大机器id结果是63 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
* M << n = M * 2^n
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id结果是31
*/
private final long maxDataCenterId = -1L ^ (-1L << dataCenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 11L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(14+6)
*/
private final long dataCenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(4+6+14)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + dataCenterIdBits;
/**
* 生成序列的掩码这里为2047
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~255)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long dataCenterId;
/**
* 毫秒内序列(0~2047)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
/**
* 设置允许时间回拨的最大限制10s
*/
private static final long rollBackTime = 10000L;
private static SnowflakeId idWorker;
private static ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
static {
idWorker = new SnowflakeId(FlowWriteConfig.ZOOKEEPER_SERVERS, FlowWriteConfig.DATA_CENTER_ID_NUM);
}
//==============================Constructors=====================================
/**
* 构造函数
*/
private SnowflakeId(String zookeeperIp, long dataCenterIdNum) {
DistributedLock lock = new DistributedLock(FlowWriteConfig.ZOOKEEPER_SERVERS, "disLocks1");
try {
lock.lock();
int tmpWorkerId = zookeeperUtils.modifyNode("/Snowflake/" + "worker" + dataCenterIdNum, zookeeperIp);
if (tmpWorkerId > maxWorkerId || tmpWorkerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (dataCenterIdNum > maxDataCenterId || dataCenterIdNum < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than ", maxDataCenterId));
}
this.workerId = tmpWorkerId;
this.dataCenterId = dataCenterIdNum;
} catch (RuntimeException e) {
logger.error("This is not usual error!!!===>>>" + e + "<<<===");
}finally {
lock.unlock();
}
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
private synchronized long nextId() {
long timestamp = timeGen();
//设置一个允许回拨限制时间系统时间回拨范围在rollBackTime内可以等待校准
if (lastTimestamp - timestamp > 0 && lastTimestamp - timestamp < rollBackTime) {
timestamp = tilNextMillis(lastTimestamp);
}
//如果当前时间小于上一次ID生成的时间戳说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift)
| (dataCenterId << dataCenterIdShift)
| (workerId << workerIdShift)
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
return System.currentTimeMillis();
}
/**
* 静态工具类
*
* @return
*/
public static Long generateId() {
return idWorker.nextId();
}
}

View File

@@ -0,0 +1,114 @@
package com.zdjizhi.tools.json;
import com.alibaba.fastjson2.JSONArray;
import com.alibaba.fastjson2.JSONObject;
import com.zdjizhi.tools.exception.FlowWriteException;
/**
* @author qidaijie
* @Package com.zdjizhi.tools.json
* @Description:
* @date 2023/5/2014:58
*/
class DataTypeCheck {
/**
* 根据Schema指定的字段类型对原始日志类型不一致的数据进行转换。
*
* @param jsonObject 原始日志json
* @param fieldName 需要转换类型的字段名
* @param fieldValue 需要转换类型的值
* @param fieldType schema指定的类型
* @throws RuntimeException 抛出转换类型异常
*/
static void typeConverter(JSONObject jsonObject, String fieldName, Object fieldValue, String fieldType) throws RuntimeException {
switch (fieldType) {
case "String":
jsonObject.put(fieldName, fieldValue.toString());
break;
case "Integer":
jsonObject.put(fieldName, castToInt(fieldValue));
break;
case "long":
jsonObject.put(fieldName, castToLong(fieldValue));
break;
case "List":
jsonObject.put(fieldName, JSONArray.parseArray(fieldValue.toString()));
break;
case "Map":
jsonObject.put(fieldName, JSONObject.parseObject(fieldValue.toString()));
break;
case "double":
jsonObject.put(fieldName, castToDouble(fieldValue));
break;
default:
}
}
/**
* Integer 类型判断方法
*
* @param value json value
* @return Integer value or null
*/
private static Integer castToInt(Object value) {
if (value instanceof Integer) {
return (Integer) value;
}
if (value instanceof Number) {
return ((Number) value).intValue();
}
if (value instanceof String) {
String strVal = value.toString();
return Integer.parseInt(strVal);
}
throw new FlowWriteException("can not cast to int, value : " + value);
}
/**
* Long类型判断方法
*
* @param value json value
* @return (Long)value or null
*/
private static Long castToLong(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof String) {
String strVal = (String) value;
return Long.parseLong(strVal);
}
throw new FlowWriteException("can not cast to long, value : " + value);
}
/**
* Double类型判断方法
*
* @param value json value
* @return double value or null
*/
private static Double castToDouble(Object value) {
if (value instanceof Number) {
return ((Number) value).doubleValue();
}
if (value instanceof String) {
String strVal = (String) value;
return Double.parseDouble(strVal);
}
throw new FlowWriteException("can not cast to double, value : " + value);
}
}

View File

@@ -0,0 +1,45 @@
package com.zdjizhi.tools.json;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSONPath;
import com.alibaba.fastjson2.JSONReader;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* @author qidaijie
* @Package com.zdjizhi.tools.json
* @Description:
* @date 2023/5/1917:51
*/
public class JsonPathUtil {
private static final Log logger = LogFactory.get();
private static Map<String, JSONPath> jsonPathMap = new ConcurrentHashMap<>(16);
/**
* 根据表达式使用JsonPath解析数据
*
* @param message json
* @param expr 解析表达式
* @return 解析结果
*/
public static Object analysis(String message, String expr) {
Object flattenResult = "";
try {
JSONReader parser = JSONReader.of(message);
if (jsonPathMap.containsKey(expr)) {
flattenResult = jsonPathMap.get(expr).extract(parser);
} else {
JSONPath jsonPath = JSONPath.of(expr);
jsonPathMap.put(expr, jsonPath);
flattenResult = jsonPath.extract(parser);
}
} catch (ClassCastException | ArrayIndexOutOfBoundsException e) {
logger.error("The label resolution exception or [expr] analytic expression error" + e.getMessage());
}
return flattenResult;
}
}

View File

@@ -0,0 +1,303 @@
package com.zdjizhi.tools.json;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSONArray;
import com.alibaba.fastjson2.JSONObject;
import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.api.config.listener.Listener;
import com.alibaba.nacos.api.exception.NacosException;
import com.geedgenetworks.utils.StringUtil;
import com.zdjizhi.common.CommonConfig;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.tools.connections.nacos.NacosConnection;
import java.util.*;
import java.util.concurrent.Executor;
/**
* 使用FastJson解析json的工具类
*
* @author qidaijie
*/
public class MetaUtil {
private static final Log logger = LogFactory.get();
/**
* 有此标识的字段为失效字段,不计入最终日志字段
*/
private static final String VISIBILITY = "disabled";
/**
* 获取需要删除字段的列表
*/
private static final ArrayList<String> dropList = new ArrayList<>();
/**
* 获取schema指定的有效字段及类型
*/
private static HashMap<String, Class> schemaFieldsTypeMap;
/**
* 获取包含默认值的字段
*/
private static final HashMap<String, Object> defaultFieldsMap = new HashMap<>(16);
/**
* 获取任务列表
* list的每个元素是一个四元字符串数组 (有format标识的字段补全的字段用到的功能函数用到的参数),例如:
* (mail_subject mail_subject decode_of_base64 mail_subject_charset)
*/
private static ArrayList<String[]> jobList;
static {
NacosConnection nacosConnection = new NacosConnection();
ConfigService schemaService = nacosConnection.getSchemaService();
try {
String schema = schemaService.getConfigAndSignListener(FlowWriteConfig.NACOS_SCHEMA_DATA_ID, FlowWriteConfig.NACOS_SCHEMA_GROUP, FlowWriteConfig.NACOS_CONNECTION_TIMEOUT, new Listener() {
@Override
public Executor getExecutor() {
return null;
}
@Override
public void receiveConfigInfo(String configInfo) {
if (StringUtil.isNotBlank(configInfo)) {
if (CommonConfig.SCHEMA_UPDATE_ENABLED) {
logger.info("Reload the " + FlowWriteConfig.NACOS_SCHEMA_DATA_ID + "schema when it changes.");
clearCache();
schemaFieldsTypeMap = getFieldsFromSchema(configInfo);
jobList = getJobListFromHttp(configInfo);
} else {
logger.info("The schema changes, but the dynamic update configuration is disabled and no operation is performed!");
}
}
}
});
if (StringUtil.isNotBlank(schema)) {
schemaFieldsTypeMap = getFieldsFromSchema(schema);
jobList = getJobListFromHttp(schema);
}
} catch (NacosException e) {
logger.error("Get Schema config from Nacos error,The exception message is :" + e.getMessage());
}
}
/**
* 模式匹配,给定一个类型字符串返回一个类类型
*
* @param type 类型
* @return 类类型
*/
private static Class getClassName(String type) {
Class clazz;
switch (type) {
case "int":
clazz = Integer.class;
break;
case "string":
clazz = String.class;
break;
case "long":
clazz = long.class;
break;
case "array":
clazz = List.class;
break;
case "double":
clazz = double.class;
break;
case "float":
clazz = float.class;
break;
case "char":
clazz = char.class;
break;
case "byte":
clazz = byte.class;
break;
case "boolean":
clazz = boolean.class;
break;
case "short":
clazz = short.class;
break;
default:
clazz = String.class;
}
return clazz;
}
/**
* 类型转换
*
* @param jsonObject 原始日志map
*/
public static void typeTransform(JSONObject jsonObject) throws RuntimeException {
dropJsonField(jsonObject);
MetaUtil.setFieldDefault(jsonObject);
for (Map.Entry<String, Object> entry : jsonObject.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if (value != null) {
if (schemaFieldsTypeMap.containsKey(key)) {
try {
Class<?> schemaFieldClass = schemaFieldsTypeMap.get(key);
if (schemaFieldClass != value.getClass()) {
String simpleName = schemaFieldClass.getSimpleName();
DataTypeCheck.typeConverter(jsonObject, key, value, simpleName);
}
} catch (RuntimeException e) {
logger.error("The {} field type conversion is abnormal! message is:", key, e);
}
}
}
}
}
/**
* 删除schema内指定的无效字段
*
* @param jsonObject 原始日志
*/
public static void dropJsonField(JSONObject jsonObject) {
for (String field : dropList) {
jsonObject.remove(field);
}
}
/**
* 根据schema内指定的默认值给数据赋值。
*
* @param jsonObject 原始日志
*/
private static void setFieldDefault(JSONObject jsonObject) {
if (defaultFieldsMap.keySet().size() > 0) {
for (String fieldName : defaultFieldsMap.keySet()) {
Object logValue = jsonObject.get(fieldName);
if (logValue == null) {
jsonObject.put(fieldName, defaultFieldsMap.get(fieldName));
}
}
}
}
/**
* 通过schema来获取所需的字段及字段类型。
*
* @return 用于反射生成schema类型的对象的一个map集合
*/
private static HashMap<String, Class> getFieldsFromSchema(String schema) {
HashMap<String, Class> map = new HashMap<>(256);
//获取fields并转化为数组数组的每个元素都是一个name doc type
JSONObject schemaJson = JSONObject.parseObject(schema);
JSONArray fields = schemaJson.getJSONArray("fields");
for (Object field : fields) {
String filedStr = field.toString();
JSONObject fieldJson = JSONObject.parseObject(filedStr);
String name = fieldJson.getString("name");
if (checkKeepField(filedStr)) {
String type = fieldJson.getString("type");
if (type.contains("{")) {
JSONObject types = JSONObject.parseObject(type);
type = types.getString("type");
}
if (fieldJson.containsKey("default")) {
defaultFieldsMap.put(name, fieldJson.get("default"));
}
//组合用来生成实体类的map
map.put(name, getClassName(type));
} else {
dropList.add(name);
}
}
return map;
}
/**
* 判断字段是否需要保留
*
* @param message 单个field-json
* @return true or false
*/
private static boolean checkKeepField(String message) {
boolean isKeepField = true;
JSONObject fieldJson = JSONObject.parseObject(message);
boolean hasDoc = fieldJson.containsKey("doc");
if (hasDoc) {
JSONObject doc = JSONObject.parseObject(fieldJson.getString("doc"));
if (doc.containsKey("visibility")) {
String visibility = doc.getString("visibility");
if (VISIBILITY.equals(visibility)) {
isKeepField = false;
}
}
}
return isKeepField;
}
/**
* 解析schema解析之后返回一个任务列表 (useList toList funcList paramlist)
*
* @param schema 日志schema
* @return 任务列表
*/
private static ArrayList<String[]> getJobListFromHttp(String schema) {
ArrayList<String[]> list = new ArrayList<>();
JSONObject schemaJson = JSONObject.parseObject(schema);
JSONArray fields = schemaJson.getJSONArray("fields");
for (Object field : fields) {
JSONObject fieldJson = JSONObject.parseObject(field.toString());
boolean hasDoc = fieldJson.containsKey("doc");
if (hasDoc) {
JSONObject docJson = JSONObject.parseObject(fieldJson.getString("doc"));
boolean hasFormat = docJson.containsKey("format");
if (hasFormat) {
String name = fieldJson.getString("name");
JSONArray formatList = docJson.getJSONArray("format");
for (Object format : formatList) {
JSONObject formatJson = JSONObject.parseObject(format.toString());
String function = formatJson.getString("function");
String appendTo;
String params = null;
if (formatJson.containsKey("appendTo")) {
appendTo = formatJson.getString("appendTo");
} else {
appendTo = name;
}
if (formatJson.containsKey("param")) {
params = formatJson.getString("param");
}
list.add(new String[]{name, appendTo, function, params});
}
}
}
}
return list;
}
public static ArrayList<String[]> getJobList() {
return jobList;
}
/**
* 在配置变动时,清空缓存重新获取
*/
private static void clearCache() {
jobList.clear();
schemaFieldsTypeMap.clear();
dropList.clear();
defaultFieldsMap.clear();
}
}

View File

@@ -0,0 +1,60 @@
package com.zdjizhi.tools.logtransformation;
import com.alibaba.fastjson2.JSONObject;
import java.util.*;
public class ConvertRecordToPERCENT {
private Properties securityProp;
private HashMap<String, String> recordSchema;
public ConvertRecordToPERCENT(Properties securityProp) {
this.securityProp = securityProp;
final HashMap<String, String> schemaMap = new HashMap<String, String>();
for (String key : securityProp.stringPropertyNames()) {
final String schema = securityProp.getProperty(key);
schemaMap.put(key, schema);
}
this.recordSchema = schemaMap;
}
public JSONObject convertToPERCENT(JSONObject record) {
final JSONObject percent = new JSONObject();
for (Map.Entry<String, Object> entry : record.entrySet()) {
if (recordSchema.containsKey(entry.getKey())) {
percent.put(recordSchema.get(entry.getKey()), entry.getValue());
}
}
//填充common_start_time、common_end_time
percent.put("common_start_time", (long) record.get("start_timestamp_ms") / 1000);
percent.put("common_end_time", (long) record.get("end_timestamp_ms") / 1000);
//填充common_sessions
percent.put("common_sessions", 1);
//填充common_internal_ip、common_external_ip、common_direction、common_stream_dir
if (record.containsKey("flags")) {
final int flags = (int) record.get("flags");
if (flags > 0) {
if ((8L & flags) == 8L && (16L & flags) != 16L) {
percent.put("common_internal_ip", record.get("common_client_ip"));
percent.put("common_external_ip", record.get("common_server_ip"));
percent.put("common_direction", 69);
} else if ((8L & flags) != 8L && (16L & flags) == 16L) {
percent.put("common_internal_ip", record.get("common_server_ip"));
percent.put("common_external_ip", record.get("common_client_ip"));
percent.put("common_direction", 73);
}
if ((8192L & flags) == 8192L && (16384L & flags) == 16384L) {
percent.put("common_stream_dir", 3);
} else if ((8192L & flags) == 8192L) {
percent.put("common_stream_dir", 1);
} else if ((16384L & flags) == 16384L) {
percent.put("common_stream_dir", 2);
}
}
}
return percent;
}
}

View File

@@ -0,0 +1,64 @@
package com.zdjizhi.tools.ordinary;
import org.apache.log4j.Logger;
import java.security.MessageDigest;
/**
* 描述:转换MD5工具类
*
* @author Administrator
* @create 2018-08-13 15:11
*/
public class MD5Utils {
private static Logger logger = Logger.getLogger(MD5Utils.class);
public static String md5Encode(String msg) throws Exception {
try {
byte[] msgBytes = msg.getBytes("utf-8");
/*
* 声明使用Md5算法,获得MessaDigest对象
*/
MessageDigest md5 = MessageDigest.getInstance("MD5");
/*
* 使用指定的字节更新摘要
*/
md5.update(msgBytes);
/*
* 完成哈希计算,获得密文
*/
byte[] digest = md5.digest();
/*
* 以上两行代码等同于
* byte[] digest = md5.digest(msgBytes);
*/
return byteArr2hexString(digest);
} catch (Exception e) {
logger.error("Error in conversion MD5! " + msg);
// e.printStackTrace();
return "";
}
}
/**
* 将byte数组转化为16进制字符串形式
*
* @param bys 字节数组
* @return 字符串
*/
public static String byteArr2hexString(byte[] bys) {
StringBuffer hexVal = new StringBuffer();
int val = 0;
for (byte by : bys) {
//将byte转化为int 如果byte是一个负数就必须要和16进制的0xff做一次与运算
val = ((int) by) & 0xff;
if (val < 16) {
hexVal.append("0");
}
hexVal.append(Integer.toHexString(val));
}
return hexVal.toString();
}
}

View File

@@ -0,0 +1,115 @@
package com.zdjizhi.tools.transform;
import com.alibaba.fastjson2.JSONObject;
import com.zdjizhi.tools.json.MetaUtil;
import com.zdjizhi.tools.transform.impl.TransformFunctionImpl;
/**
* 描述:转换或补全工具类
*
* @author qidaijie
*/
public class TransForm {
private static TransformFunctionImpl transformFunction = new TransformFunctionImpl();
public static void transformLog(JSONObject jsonObject) {
for (String[] strings : MetaUtil.getJobList()) {
//该日志字段的值
Object logValue = jsonObject.get(strings[0]);
//结果值映射到的日志字段key
String appendToKey = strings[1];
//匹配操作函数的字段
String function = strings[2];
//额外的参数的值
String param = strings[3];
//结果值映射到的日志字段原始value
Object appendToValue = jsonObject.get(appendToKey);
functionSet(function, jsonObject, appendToKey, appendToValue, logValue, param);
}
}
/**
* 根据schema描述对应字段进行操作的 函数集合
*
* @param function 匹配操作函数的字段
* @param jsonObject 原始日志解析map
* @param appendToKey 需要补全的字段的key
* @param appendToValue 需要补全的字段的值
* @param logValue 用到的参数的值
* @param param 额外的参数的值
*/
private static void functionSet(String function, JSONObject jsonObject, String appendToKey, Object appendToValue, Object logValue, String param) {
switch (function) {
case "current_timestamp":
if (!(appendToValue instanceof Long)) {
jsonObject.put(appendToKey, transformFunction.getCurrentTime());
}
break;
case "snowflake_id":
jsonObject.put(appendToKey, transformFunction.getSnowflakeId());
break;
case "geo_ip_detail":
if (logValue != null && appendToValue == null) {
jsonObject.put(appendToKey, transformFunction.getGeoIpDetail(logValue.toString()));
}
break;
case "geo_asn":
if (logValue != null && appendToValue == null) {
jsonObject.put(appendToKey, transformFunction.getGeoAsn(logValue.toString()));
}
break;
case "geo_ip_country":
if (logValue != null && appendToValue == null) {
jsonObject.put(appendToKey, transformFunction.getGeoIpCountry(logValue.toString()));
}
break;
case "flattenSpec":
if (logValue != null && param != null) {
jsonObject.put(appendToKey, transformFunction.flattenSpec(logValue.toString(), param));
}
break;
case "if":
if (param != null) {
jsonObject.put(appendToKey, transformFunction.condition(jsonObject, param));
}
break;
case "decode_of_base64":
if (logValue != null) {
jsonObject.put( appendToKey, transformFunction.decodeBase64(jsonObject, logValue.toString(), param));
}
break;
case "sub_domain":
if (appendToValue == null && logValue != null) {
jsonObject.put( appendToKey, transformFunction.getTopDomain(logValue.toString()));
}
break;
case "radius_match":
if (logValue != null) {
jsonObject.put( appendToKey, transformFunction.radiusMatch(jsonObject, logValue.toString()));
}
break;
case "gtpc_match":
if (logValue != null) {
transformFunction.gtpcMatch(jsonObject, logValue.toString(), appendToKey, param);
}
break;
case "set_value":
if (param != null) {
transformFunction.setValue(jsonObject, appendToKey, param);
}
break;
case "get_value":
if (logValue != null) {
transformFunction.getValue(jsonObject, appendToKey, logValue);
}
break;
default:
}
}
}

View File

@@ -0,0 +1,40 @@
package com.zdjizhi.tools.transform;
import com.alibaba.fastjson2.JSONObject;
import com.geedgenetworks.utils.IpLookupV2;
/**
* @author qidaijie
* @Package com.zdjizhi.tools.general
* @Description:
* @date 2023/5/2010:11
*/
public interface TransformFunction {
long getCurrentTime();
long getSnowflakeId();
String getGeoIpDetail(String ip);
String getGeoIpCountry(String ip);
String getGeoAsn(String ip);
String radiusMatch(JSONObject jsonObject, String ip);
void gtpcMatch(JSONObject jsonObject, String logValue, String appendToKey, String param);
String getTopDomain(String domain);
String decodeBase64(JSONObject jsonObject,String message, String param);
Object flattenSpec(String message, String expr);
Object condition(JSONObject jsonObject, String ifParam);
void setValue(JSONObject jsonObject, String appendToKey, String param);
void getValue(JSONObject jsonObject, String appendToKey, Object logValue);
}

View File

@@ -0,0 +1,307 @@
package com.zdjizhi.tools.transform.impl;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSONObject;
import com.geedgenetworks.utils.FormatUtils;
import com.geedgenetworks.utils.StringUtil;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.tools.general.IpLookupUtils;
import com.zdjizhi.tools.general.SnowflakeId;
import com.zdjizhi.tools.transform.TransformFunction;
import com.zdjizhi.tools.connections.hbase.HBaseUtils;
import com.zdjizhi.tools.json.JsonPathUtil;
import java.io.UnsupportedEncodingException;
import java.util.Base64;
import java.util.HashMap;
/**
* @author qidaijie
* @Package com.zdjizhi.tools.transform.impl
* @Description:
* @date 2023/5/20 10:12
*/
public class TransformFunctionImpl implements TransformFunction {
private static final Log logger = LogFactory.get();
private static final int IF_PARAM_LENGTH = 3;
/**
* if函数连接分隔符
*/
private static final String IF_CONDITION_SPLITTER = "=";
private static final String SEPARATOR = "!=";
/**
* 标识字段为日志字段还是schema指定字段
*/
private static final String IS_JSON_KEY_TAG = "$.";
/**
* 生成当前时间戳的操作
*/
@Override
public long getCurrentTime() {
return System.currentTimeMillis() / 1000;
}
@Override
public long getSnowflakeId() {
return SnowflakeId.generateId();
}
/**
* 根据clientIp获取location信息
*
* @param ip client IP
* @return ip地址详细信息
*/
@Override
public String getGeoIpDetail(String ip) {
String detail = "";
try {
detail = IpLookupUtils.getIpLookup().cityLookupDetail(ip);
} catch (NullPointerException npe) {
logger.error("The IP Location MMDB file is not loaded or IP is null! " + npe);
} catch (RuntimeException e) {
logger.error("Get clientIP location error! " + e.getMessage());
}
return detail;
}
/**
* 根据ip获取country信息
*
* @param ip server IP
* @return 国家
*/
@Override
public String getGeoIpCountry(String ip) {
String country = "";
try {
country = IpLookupUtils.getIpLookup().countryLookup(ip);
} catch (NullPointerException npe) {
logger.error("The IP Location MMDB file is not loaded or IP is null! " + npe);
} catch (RuntimeException e) {
logger.error("Get ServerIP location error! " + e.getMessage());
}
return country;
}
/**
* 根据ip获取asn信息
*
* @param ip client/server IP
* @return ASN
*/
@Override
public String getGeoAsn(String ip) {
String asn = "";
try {
asn = IpLookupUtils.getIpLookup().asnLookup(ip);
} catch (NullPointerException npe) {
logger.error("The ASN MMDB file is not loaded or IP is null! " + npe);
} catch (RuntimeException e) {
logger.error("Get IP ASN error! " + e.getMessage());
}
return asn;
}
/**
* radius借助HBase补齐
*
* @param ip client IP
* @return account
*/
@Override
public String radiusMatch(JSONObject jsonObject, String ip) {
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
int vsysId = jsonObject.getIntValue("common_vsys_id", 1);
return HBaseUtils.getAccount(ip + vsysId);
} else {
return HBaseUtils.getAccount(ip);
}
}
/**
* 借助HBase补齐GTP-C信息,解析tunnels信息优先使用gtp_uplink_teid其次使用gtp_downlink_teid
* <p>
* "common_tunnels":[{"tunnels_schema_type":"GTP","gtp_endpoint_a2b_teid":235261261,"gtp_endpoint_b2a_teid":665547833,"gtp_sgw_ip":"192.56.5.2","gtp_pgw_ip":"192.56.10.20","gtp_sgw_port":2152,"gtp_pgw_port":2152}]
*
* @param jsonObject 原始日志json
* @param logValue 上行TEID
* @param appendToKey 结果值映射到的日志字段key
* @param param 用于解析jsonarray直接定位到GTP信息所在的位置
*/
@Override
public void gtpcMatch(JSONObject jsonObject, String logValue, String appendToKey, String param) {
try {
String teid = null;
String[] exprs = param.split(FlowWriteConfig.FORMAT_SPLITTER);
for (String expr : exprs) {
Object result = JsonPathUtil.analysis(logValue, expr);
if (result != null) {
teid = result.toString();
break;
}
}
if (teid != null) {
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
int vsysId = jsonObject.getIntValue("common_vsys_id", 1);
teid = teid + vsysId;
}
String[] appendToKeys = appendToKey.split(FlowWriteConfig.FORMAT_SPLITTER);
HashMap<String, Object> userData = HBaseUtils.getGtpData(teid);
if (userData != null) {
for (String key : appendToKeys) {
jsonObject.put(key, userData.get(key).toString());
}
} else {
logger.warn("Description The user whose TEID is " + teid + " was not matched!");
}
}
} catch (RuntimeException re) {
logger.error("An exception occurred in teid type conversion or parsing of user information!" + re.getMessage());
re.printStackTrace();
}
}
/**
* 解析顶级域名
*
* @param domain 初始域名
* @return 顶级域名
*/
@Override
public String getTopDomain(String domain) {
String topDomain = "";
try {
topDomain = FormatUtils.getTopPrivateDomain(domain);
} catch (StringIndexOutOfBoundsException outException) {
logger.error("Parse top-level domain exceptions, exception domain names:" + domain);
}
return topDomain;
}
/**
* 根据编码解码base64
*
* @param jsonObject 原始日志json
* @param message base64
* @param param 用于获取编码的参数
* @return 解码字符串
*/
@Override
public String decodeBase64(JSONObject jsonObject, String message, String param) {
String decodeResult = "";
Object charset = isJsonValue(jsonObject, param);
try {
if (StringUtil.isNotBlank(message)) {
byte[] base64decodedBytes = Base64.getDecoder().decode(message);
if (charset == null) {
decodeResult = new String(base64decodedBytes);
} else {
decodeResult = new String(base64decodedBytes, charset.toString());
}
}
} catch (RuntimeException e) {
logger.error("Resolve Base64 exception, exception information:" + e.getMessage());
} catch (UnsupportedEncodingException e) {
logger.error("The Character Encoding [" + charset.toString() + "] is not supported.exception information:" + e.getMessage());
}
return decodeResult;
}
/**
* 根据表达式解析json
*
* @param message json
* @param expr 解析表达式
* @return 解析结果
*/
@Override
public Object flattenSpec(String message, String expr) {
return JsonPathUtil.analysis(message, expr);
}
/**
* IF函数实现解析日志构建三目运算;包含判断是否为数字若为数字则转换为long类型返回结果。
*
* @param jsonObject 原始日志反序列化对象
* @param ifParam 字段名/普通字符串
* @return resultA or resultB or null
*/
@Override
public Object condition(JSONObject jsonObject, String ifParam) {
Object result = null;
try {
String[] split = ifParam.split(FlowWriteConfig.FORMAT_SPLITTER);
if (split.length == IF_PARAM_LENGTH) {
String expression = split[0];
Object resultA = isJsonValue(jsonObject, split[1]);
Object resultB = isJsonValue(jsonObject, split[2]);
if (expression.contains(SEPARATOR)) {
String[] regexp = expression.split(SEPARATOR);
Object direction = isJsonValue(jsonObject, regexp[0]);
if (direction instanceof Number) {
result = Integer.parseInt(direction.toString()) != Integer.parseInt(regexp[1]) ? resultA : resultB;
} else if (direction instanceof String) {
result = direction.equals(regexp[1]) ? resultA : resultB;
}
} else {
String[] regexp = expression.split(IF_CONDITION_SPLITTER);
Object direction = isJsonValue(jsonObject, regexp[0]);
if (direction instanceof Number) {
result = Integer.parseInt(direction.toString()) == Integer.parseInt(regexp[1]) ? resultA : resultB;
} else if (direction instanceof String) {
result = direction.equals(regexp[1]) ? resultA : resultB;
}
}
}
} catch (RuntimeException e) {
logger.error("IF function execution exception, exception information:" + e.getMessage());
}
return result;
}
/**
* 给json中的某个key赋值指定值
*
* @param jsonObject 原始日志json
* @param appendToKey 要赋值的key
* @param param 参数(指定值)
*/
@Override
public void setValue(JSONObject jsonObject, String appendToKey, String param) {
jsonObject.put(appendToKey, param);
}
/**
* 从json中获取A的值赋值给B
*
* @param jsonObject 原始日志json
* @param appendToKey 要赋值的key
* @param logValue 获取的值
*/
@Override
public void getValue(JSONObject jsonObject, String appendToKey, Object logValue) {
jsonObject.put(appendToKey, logValue);
}
/**
* 判断是否为日志字段,是则返回对应value否则返回原始字符串
*
* @param jsonObject 原始日志反序列化对象
* @param param 字段名/普通字符串
* @return JSON.Value or String
*/
private static Object isJsonValue(JSONObject jsonObject, String param) {
if (param.contains(IS_JSON_KEY_TAG)) {
return jsonObject.get(param.substring(2));
} else {
return param;
}
}
}

View File

@@ -0,0 +1,54 @@
package com.zdjizhi.topology;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSONObject;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.operator.count.SendCountProcess;
import com.zdjizhi.operator.map.MapCompleted;
import com.zdjizhi.operator.map.TypeMapCompleted;
import com.zdjizhi.operator.process.DealFileProcessFunction;
import com.zdjizhi.tools.connections.kafka.KafkaConsumer;
import com.zdjizhi.tools.connections.kafka.KafkaProducer;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
public class LogFlowWriteTopology {
private static final Log logger = LogFactory.get();
public static void main(String[] args) {
final StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
//两个输出之间的最大时间 (单位milliseconds)
environment.setBufferTimeout(FlowWriteConfig.BUFFER_TIMEOUT);
SingleOutputStreamOperator<JSONObject> completedStream;
if (FlowWriteConfig.LOG_TRANSFORM_TYPE == 0) {//不对日志字段类型做校验。
completedStream = environment.addSource(KafkaConsumer.flinkConsumer()).name(FlowWriteConfig.SOURCE_KAFKA_TOPIC).setParallelism(FlowWriteConfig.SOURCE_PARALLELISM)
.process(new MapCompleted()).name("MapCompletedFunction").setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
} else {//对日志字段类型做弱校验可根据schema进行强转。
completedStream = environment.addSource(KafkaConsumer.flinkConsumer()).name(FlowWriteConfig.SOURCE_KAFKA_TOPIC).setParallelism(FlowWriteConfig.SOURCE_PARALLELISM)
.process(new TypeMapCompleted()).name("TypeMapCompletedFunction").setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
}
//处理带有非结构化文件字段的数据
SingleOutputStreamOperator<String> dealFileProcessFunction = completedStream.process(new DealFileProcessFunction()).name("DealFileProcessFunction").uid("DealFile-ProcessFunction").setParallelism(FlowWriteConfig.DEAL_FILE_PARALLELISM);
//补全后的数据发送至百分点的kafka
dealFileProcessFunction.addSink(KafkaProducer.getPercentKafkaProducer()).name("ToPercentKafka").uid("To-Percent-Kafka").setParallelism(FlowWriteConfig.SINK_PERCENT_PARALLELISM);
//文件元数据发送至TRAFFIC-FILE-METADATA
dealFileProcessFunction.getSideOutput(DealFileProcessFunction.metaToKafa).addSink(KafkaProducer.getTrafficFileMetaKafkaProducer()).name("toTrafficFileMeta").uid("to-Traffic-FileMeta").setParallelism(FlowWriteConfig.SINK_FILE_DATA_PARALLELISM);
dealFileProcessFunction.getSideOutput(DealFileProcessFunction.dealFileMetircTag).process(new SendCountProcess()).name("SendCountProcess").uid("Send-Count-Process").setParallelism(1);
try {
environment.execute(args[0]);
} catch (Exception e) {
logger.error("This Flink task start ERROR! Exception information is :" + e);
e.printStackTrace();
}
}
}

25
src/main/log4j.properties Normal file
View File

@@ -0,0 +1,25 @@
#Log4j
log4j.rootLogger=warn,console,file
# 控制台日志设置
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.Threshold=warn
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
# 文件日志设置
log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
log4j.appender.file.Threshold=info
log4j.appender.file.encoding=UTF-8
log4j.appender.file.Append=true
#路径请用相对路径,做好相关测试输出到应用目下
log4j.appender.file.file=${nis.root}/log/galaxy-name.log
log4j.appender.file.DatePattern='.'yyyy-MM-dd
log4j.appender.file.layout=org.apache.log4j.PatternLayout
#log4j.appender.file.layout.ConversionPattern=%d{HH:mm:ss} %X{ip} [%t] %5p %c{1} %m%n
log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] %X{ip} [Thread\:%t] %l %x - %m%n
#MyBatis 配置com.nis.web.dao是mybatis接口所在包
log4j.logger.com.nis.web.dao=info
#bonecp数据源配置
log4j.category.com.jolbox=info,console

View File

@@ -0,0 +1,75 @@
package com.zdjizhi.function;
import cn.hutool.core.codec.Base64;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.junit.Test;
import java.io.UnsupportedEncodingException;
/**
* @author qidaijie
* @Package com.zdjizhi.function
* @Description:
* @date 2022/11/39:36
*/
public class Base64Test {
private static final Log logger = LogFactory.get();
/**
* 根据编码解码base64(hutool)
* @return 解码字符串
*/
@Test
public void decodeBase64Hutool() {
try {
System.out.println(Base64.decodeStr("bWFpbF90ZXN0X2VuZ2xpc2gudHh0"));
System.out.println(Base64.decodeStr("aGVsbG8="));
} catch (RuntimeException e) {
logger.error("Resolve Base64 exception, exception information:" + e.getMessage());
e.printStackTrace();
}
}
/**
* 根据编码解码base64
* @return 解码字符串
*/
@Test
public void encodeBase64() {
try {
System.out.println(java.util.Base64.getUrlEncoder().encodeToString("runoob?java8".getBytes("ISO-8859-1")));
System.out.println(java.util.Base64.getUrlEncoder().encodeToString("runoob?java8".getBytes("utf-8")));
} catch (RuntimeException e) {
logger.error("Resolve Base64 exception, exception information:" + e.getMessage());
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
/**
* 根据编码解码base64
* @return 解码字符串
*/
@Test
public void decodeBase64() {
try {
byte[] base64decodedBytes = java.util.Base64.getDecoder().decode("bWFpbF90ZXN0X2VuZ2xpc2gudHh0");
System.out.println("原始字符串: " + new String(base64decodedBytes, "utf-8"));
System.out.println("原始字符串: " + new String(base64decodedBytes));
} catch (RuntimeException e) {
logger.error("Resolve Base64 exception, exception information:" + e.getMessage());
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
}

View File

@@ -0,0 +1,53 @@
package com.zdjizhi.function;
import org.jasypt.encryption.pbe.StandardPBEStringEncryptor;
import org.junit.Test;
/**
* @author qidaijie
* @Package com.zdjizhi
* @Description:
* @date 2022/3/1610:55
*/
public class EncryptorTest {
@Test
public void passwordTest(){
StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor();
// 配置加密解密的密码/salt值
encryptor.setPassword("galaxy");
// 对"raw_password"进行加密S5kR+Y7CI8k7MaecZpde25yK8NKUnd6p
String kafkaUser = encryptor.encrypt("admin");
String kafkaPin = encryptor.encrypt("galaxy2019");
String nacosPin = encryptor.encrypt("nacos");
String nacosUser = encryptor.encrypt("nacos");
System.out.println("Kafka:\n"+"The username is: "+kafkaUser);
System.out.println("The pin is: "+kafkaPin);
System.out.println("Nacos:\n"+"The username is: "+nacosUser);
System.out.println("The pin is: "+nacosPin);
// 再进行解密raw_password
System.out.println("Kafka:\n"+"The username is: "+encryptor.decrypt(kafkaUser));
System.out.println("The pin is: "+encryptor.decrypt(kafkaPin));
System.out.println("Nacos:\n"+"The username is: "+encryptor.decrypt(nacosUser));
System.out.println("The pin is: "+encryptor.decrypt(nacosPin));
System.out.println("------------------------------------------------------");
System.out.println("The vknRT6U4I739rLIha9CvojM+4uFyXZLEYpO2HZayLnRak1HPW0K2yZ3vnQBA2foo decryption result is: "+encryptor.decrypt("vknRT6U4I739rLIha9CvojM+4uFyXZLEYpO2HZayLnRak1HPW0K2yZ3vnQBA2foo"));
System.out.println("The SU05WiYTLLrF+cVZ410gJdrfEgxgnYG9RGsI+3dZZq54XFLOXEzOvRuMAvLlBf4k decryption result is: "+encryptor.decrypt("SU05WiYTLLrF+cVZ410gJdrfEgxgnYG9RGsI+3dZZq54XFLOXEzOvRuMAvLlBf4k"));
System.out.println("The qUA355VopKSx6kwwwXZwqWWEYSu76Slz decryption result is: "+encryptor.decrypt("qUA355VopKSx6kwwwXZwqWWEYSu76Slz"));
System.out.println("The sJ9iiTeE/7moI2hKn8asMg== decryption result is: "+encryptor.decrypt("sJ9iiTeE/7moI2hKn8asMg=="));
System.out.println("The 63aTpwv2vH0vPikW+3Jjig== decryption result is: "+encryptor.decrypt("63aTpwv2vH0vPikW+3Jjig=="));
System.out.println("The Ei1P4R1e5KTdJR+ZVnBmug== decryption result is: "+encryptor.decrypt("Ei1P4R1e5KTdJR+ZVnBmug=="));
System.out.println("The iW8ekP1SZC6v/7cfJKAqXXrjApJox+cH decryption result is: "+encryptor.decrypt("iW8ekP1SZC6v/7cfJKAqXXrjApJox+cH"));
System.out.println("The TV7Jm4dQCE/LJznp4iTm4ICkBscquv9G decryption result is: "+encryptor.decrypt("TV7Jm4dQCE/LJznp4iTm4ICkBscquv9G"));
System.out.println("The LDEb2OekU7iZWiFw6pUYBSozVKP27r1y decryption result is: "+encryptor.decrypt("LDEb2OekU7iZWiFw6pUYBSozVKP27r1y"));
StandardPBEStringEncryptor encryptorCM = new StandardPBEStringEncryptor();
encryptorCM.setPassword("bifang-api");
System.out.println("The Zp65tFN3CsAXwpNfgfjZ0PrquSrokM1c decryption result is: "+encryptorCM.decrypt("Zp65tFN3CsAXwpNfgfjZ0PrquSrokM1c"));
}
}

View File

@@ -0,0 +1,47 @@
package com.zdjizhi.function;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.tools.connections.hbase.HBaseUtils;
import com.zdjizhi.tools.json.JsonPathUtil;
import org.junit.Test;
import java.util.HashMap;
public class GtpcTest {
private static final Log logger = LogFactory.get();
@Test
public void gtpcMatch() {
String param = "$.[?(@.tunnels_schema_type=='GTP')].gtp_endpoint_a2b_teid,$.[?(@.tunnels_schema_type=='GTP')].gtp_endpoint_b2a_teid";
String logValue = "[{\"tunnels_schema_type\":\"GTP\",\"gtp_endpoint_a2b_teid\":4129335432,\"gtp_endpoint_b2a_teid\":4129335434,\"gtp_sgw_ip\":\"120.36.3.97\",\"gtp_pgw_ip\":\"43.224.53.100\",\"gtp_sgw_port\":2152,\"gtp_pgw_port\":51454},{\"tunnels_schema_type\":\"ETHERNET\",\"source_mac\":\"80:69:33:ea:a5:57\",\"destination_mac\":\"14:09:dc:df:a3:40\"}]";
String appendToKey = "common_imsi,common_imei,common_phone_number";
try {
String teid = null;
String[] exprs = param.split(FlowWriteConfig.FORMAT_SPLITTER);
for (String expr : exprs) {
String value = JsonPathUtil.analysis(logValue, expr).toString();
if (value != null) {
teid = value;
break;
}
}
System.out.println(teid);
if (teid != null) {
String[] appendToKeys = appendToKey.split(FlowWriteConfig.FORMAT_SPLITTER);
HashMap<String, Object> userData = HBaseUtils.getGtpData(teid);
if (userData != null) {
for (String key : appendToKeys) {
System.out.println(userData.get(key).toString());
}
} else {
logger.warn("Description The user whose TEID is " + teid + " was not matched!");
}
}
} catch (RuntimeException re) {
logger.error("An exception occurred in teid type conversion or parsing of user information!" + re);
}
}
}

View File

@@ -0,0 +1,239 @@
package com.zdjizhi.function;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.geedgenetworks.utils.StringUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* @author qidaijie
* @Package com.zdjizhi
* @Description:
* @date 2021/12/310:42
*/
public class HBaseTest {
private static final Log logger = LogFactory.get();
private static Map<String, String> radiusMap = new ConcurrentHashMap<>(16);
private static Map<String,HashMap<String, Object>> gtpcMap = new ConcurrentHashMap<>(16);
@Test
public void getColumn() {
// 管理Hbase的配置信息
Configuration configuration = HBaseConfiguration.create();
// 设置zookeeper节点
configuration.set("hbase.zookeeper.quorum", "192.168.44.12:2181");
configuration.set("hbase.client.retries.number", "1");
configuration.set("hbase.client.pause", "50");
configuration.set("hbase.rpc.timeout", "3000");
configuration.set("zookeeper.recovery.retry", "1");
configuration.set("zookeeper.recovery.retry.intervalmill", "200");
try {
System.out.println(System.currentTimeMillis());
Connection connection = ConnectionFactory.createConnection(configuration);
Table table = connection.getTable(TableName.valueOf("tsg_galaxy:relation_framedip_account"));
Scan scan2 = new Scan();
ResultScanner scanner = table.getScanner(scan2);
for (Result result : scanner) {
int acctStatusType;
boolean hasType = result.containsColumn(Bytes.toBytes("radius"), Bytes.toBytes("acct_status_type"));
if (hasType) {
acctStatusType = Bytes.toInt(result.getValue(Bytes.toBytes("radius"), Bytes.toBytes("acct_status_type")));
} else {
acctStatusType = 3;
}
String framedIp = Bytes.toString(result.getValue(Bytes.toBytes("radius"), Bytes.toBytes("framed_ip")));
String account = Bytes.toString(result.getValue(Bytes.toBytes("radius"), Bytes.toBytes("account")));
System.out.println("status" + acctStatusType + "key:" + framedIp + "value:" + account);
}
} catch (IOException e) {
e.printStackTrace();
}finally {
System.out.println(System.currentTimeMillis());
}
}
@Test
public void getGtpcData() {
// 管理Hbase的配置信息
Configuration configuration = HBaseConfiguration.create();
// 设置zookeeper节点
configuration.set("hbase.zookeeper.quorum", "192.168.44.12:2181");
configuration.set("hbase.client.retries.number", "1");
configuration.set("hbase.client.pause", "50");
configuration.set("hbase.rpc.timeout", "3000");
configuration.set("zookeeper.recovery.retry", "1");
configuration.set("zookeeper.recovery.retry.intervalmill", "200");
long begin = System.currentTimeMillis();
ResultScanner scanner = null;
try {
Connection connection = ConnectionFactory.createConnection(configuration);
Table table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_GTPC_TABLE_NAME));
Scan scan2 = new Scan();
scanner = table.getScanner(scan2);
for (Result result : scanner) {
String upLinkTeid = getTeid(result, "uplink_teid");
String downLinkTeid = getTeid(result, "downlink_teid");
String phoneNumber = getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "phone_number").trim();
String imsi = getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imsi").trim();
String imei = getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imei").trim();
Long lastUpdateTime = getLong(result, FlowWriteConfig.GTPC_FAMILY_NAME, "last_update_time");
HashMap<String, Object> buildUserData = buildUserData(phoneNumber, imsi, imei, lastUpdateTime);
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = getVsysId(result).trim();
updateCache(gtpcMap, upLinkTeid+vsysId, buildUserData, lastUpdateTime);
updateCache(gtpcMap, downLinkTeid+vsysId, buildUserData, lastUpdateTime);
} else {
updateCache(gtpcMap, upLinkTeid, buildUserData, lastUpdateTime);
updateCache(gtpcMap, downLinkTeid, buildUserData, lastUpdateTime);
}
}
logger.warn("The obtain the number of GTP-C relationships : " + gtpcMap.size());
logger.warn("The time spent to obtain GTP-C relationships : " + (System.currentTimeMillis() - begin));
} catch (IOException | RuntimeException e) {
logger.error("The relationship between USER and TEID obtained from HBase is abnormal! message is :" + e);
e.printStackTrace();
} finally {
if (scanner != null) {
scanner.close();
}
}
for (String key : gtpcMap.keySet()){
System.out.println(key +"---"+gtpcMap.get(key));
}
}
/**
* 获取HBase内String类型的值
*
* @param result 结果集
* @param familyName 列族名称
* @param columnName 列名称
* @return 结果数据
*/
private static String getString(Result result, String familyName, String columnName) {
byte[] familyBytes = Bytes.toBytes(familyName);
byte[] columnBytes = Bytes.toBytes(columnName);
boolean contains = result.containsColumn(familyBytes, columnBytes);
if (contains) {
String data = Bytes.toString(result.getValue(familyBytes, columnBytes)).trim();
if (StringUtil.isNotBlank(data)) {
return data;
}
}
return "";
}
/**
* 获取HBase内String类型的值
*
* @param result 结果集
* @param columnName 列名称
* @return 结果数据
*/
private static Long getLong(Result result, String familyName, String columnName) {
byte[] familyBytes = Bytes.toBytes(familyName);
byte[] columnBytes = Bytes.toBytes(columnName);
boolean contains = result.containsColumn(familyBytes, columnBytes);
if (contains) {
return Bytes.toLong(result.getValue(familyBytes, columnBytes));
}
return 0L;
}
/**
* 获取HBase内String类型的值
*
* @param result 结果集
* @param columnName 列名称
* @return 结果数据
*/
private static String getTeid(Result result, String columnName) {
byte[] familyBytes = Bytes.toBytes(FlowWriteConfig.GTPC_FAMILY_NAME);
byte[] columnBytes = Bytes.toBytes(columnName);
boolean contains = result.containsColumn(familyBytes, columnBytes);
if (contains) {
String data = String.valueOf(Bytes.toLong(result.getValue(familyBytes, columnBytes))).trim();
if (StringUtil.isNotBlank(data)) {
return data;
}
}
return "0";
}
/**
* 构建用户信息
*
* @param phoneNumber 手机号
* @param imsi 用户标识
* @param imei 设备标识
* @return 用户信息
*/
private static HashMap<String, Object> buildUserData(String phoneNumber, String imsi, String imei, Long lastUpdateTime) {
HashMap<String, Object> tmpMap = new HashMap<>(4);
tmpMap.put("common_phone_number", phoneNumber);
tmpMap.put("common_imsi", imsi);
tmpMap.put("common_imei", imei);
tmpMap.put("last_update_time", lastUpdateTime);
return tmpMap;
}
/**
* 获取HBase内String类型的值
*
* @param result 结果集
* @return 结果数据
*/
static String getVsysId(Result result) {
byte[] familyBytes = Bytes.toBytes("common");
byte[] columnBytes = Bytes.toBytes("vsys_id");
boolean contains = result.containsColumn(familyBytes, columnBytes);
if (contains) {
String data = String.valueOf(Bytes.toInt(result.getValue(familyBytes, columnBytes))).trim();
if (StringUtil.isNotBlank(data)) {
return data;
}
}
return "1";
}
/**
* 判断缓存与新获取的数据时间戳大小,若大于缓存内记录的时间戳;则更新缓存
*
* @param gtpcMap 缓存集合
* @param key 上下行teid
* @param userData 获取HBase内的用户信息
* @param lastUpdateTime 该用户信息最后更新时间
*/
private static void updateCache(Map<String, HashMap<String, Object>> gtpcMap, String key, HashMap<String, Object> userData, Long lastUpdateTime) {
if (StringUtil.isNotBlank(key)){
if (gtpcMap.containsKey(key)) {
Long oldUpdateTime = Long.parseLong(gtpcMap.get(key).get("last_update_time").toString());
if (lastUpdateTime > oldUpdateTime) {
gtpcMap.put(key, userData);
}
} else {
gtpcMap.put(key, userData);
}
}
}
}

View File

@@ -0,0 +1,165 @@
package com.zdjizhi.function;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.io.file.FileReader;
import cn.hutool.crypto.digest.DigestUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.geedgenetworks.utils.GalaxyDataBaseReader;
import com.geedgenetworks.utils.IpLookupV2;
import com.maxmind.db.CHMCache;
import com.maxmind.db.Reader;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.tools.connections.http.HttpClientService;
import org.apache.commons.io.IOUtils;
import org.junit.Test;
import java.io.*;
import java.net.InetAddress;
import java.util.Map;
/**
* @author qidaijie
* @Package com.zdjizhi
* @Description:
* @date 2021/11/611:38
*/
public class IpLookupTest {
private static final Log logger = LogFactory.get();
private static final String DAT_FILEPATH = "D:\\workerspace\\dat\\";
@Test
public void ipLookupDatabaseTest() {
IpLookupV2.Builder ipLookupBuilder = new IpLookupV2.Builder(false);
ipLookupBuilder.loadDataFileV4(DAT_FILEPATH + "ip_v4_built_in.mmdb");
// ipLookupBuilder.loadDataFileV6(DAT_FILEPATH + "ip_v6_built_in.mmdb");
// ipLookupBuilder.loadDataFilePrivateV4(DAT_FILEPATH + "ip_v4_user_defined.mmdb");
// ipLookupBuilder.loadDataFilePrivateV6(DAT_FILEPATH + "ip_v6_user_defined.mmdb");
// ipLookupBuilder.loadAsnDataFile(DAT_FILEPATH + "asn_v4.mmdb");
// ipLookupBuilder.loadAsnDataFileV6(DAT_FILEPATH + "asn_v6.mmdb");
IpLookupV2 ipLookup = ipLookupBuilder.build();
// String ip = "23.200.74.224";
String ip = "121.14.89.209";
try {
System.out.println(ipLookup.cityLookup(ip));
System.out.println(ipLookup.cityLookupDetail(ip));
System.out.println(ipLookup.cityLatLngLookup(ip));
System.out.println(ipLookup.provinceLookup(ip));
System.out.println(ipLookup.administrativeAreaLookupDetail(ip));
System.out.println(ipLookup.countryLookup(ip));
System.out.println(ipLookup.locationLookupDetail(ip));
System.out.println(ipLookup.administrativeAreaLookupDetail(ip));
System.out.println(ipLookup.infoLookupToJSONString(ip));
//ASN
System.out.println(ipLookup.asnLookup(ip));
System.out.println(ipLookup.asnLookupInfo(ip));
System.out.println(ipLookup.asnLookupDetail(ip));
System.out.println(ipLookup.asnLookupOrganization(ip));
} catch (NullPointerException npe) {
logger.error("The MMDB file is not loaded or IP is null! " + npe.getMessage());
} catch (RuntimeException e) {
logger.error("Get clientIP location error! " + e.getMessage());
}
}
@Test
public void ipLookupForBytesTest() {
InputStream inputStream = null;
try {
String url = "http://192.168.44.12:9098/hos/knowledge_base_hos_bucket/d2ab3313-1941-4847-84fa-5dbbd8a9007f-aXBfdjRfYnVpbHRfaW4=.mmdb";
HttpClientService httpClientService = new HttpClientService();
inputStream = httpClientService.httpGetInputStream(url, 3000);
Reader reader = new Reader(inputStream, new CHMCache());
InetAddress ipAddress = InetAddress.getByName("121.14.89.209");
Map map = reader.get(ipAddress, Map.class);
System.out.println(map.toString());
} catch (IOException e) {
e.printStackTrace();
} finally {
IoUtil.close(inputStream);
}
}
@Test
public void ipLookupInputStreamTest() {
InputStream asnInputStream = null;
InputStream ipv4InputStream = null;
String ip = "114.64.231.114";
try {
HttpClientService httpClientService = new HttpClientService();
String asnUrl = "http://192.168.44.12:9098/hos/knowledge_base_hos_bucket/1b96764c-59dd-4d6b-8edb-623705f708a5-YXNuX3Y0.mmdb";
String ipv4Url = "http://192.168.44.12:9098/hos/knowledge_base_hos_bucket/24cb6a74-f048-4672-988f-112858427a3b-aXBfdjRfYnVpbHRfaW4=.mmdb";
asnInputStream = httpClientService.httpGetInputStream(asnUrl, 3000);
ipv4InputStream = httpClientService.httpGetInputStream(ipv4Url, 3000);
IpLookupV2 ipLookup = new IpLookupV2.Builder(false)
.loadDataFileV4(ipv4InputStream)
.loadAsnDataFileV4(asnInputStream)
.build();
System.out.println(ipLookup.cityLookup(ip));
System.out.println(ipLookup.cityLookupDetail(ip));
System.out.println(ipLookup.cityLatLngLookup(ip));
System.out.println(ipLookup.provinceLookup(ip));
System.out.println(ipLookup.administrativeAreaLookupDetail(ip));
System.out.println(ipLookup.countryLookup(ip));
System.out.println(ipLookup.locationLookupDetail(ip));
System.out.println(ipLookup.asnLookup(ip));
System.out.println(ipLookup.administrativeAreaLookupDetail(ip));
} catch (NullPointerException npe) {
logger.error("The MMDB file is not loaded or IP is null! " + npe.getMessage());
} catch (RuntimeException e) {
logger.error("Get clientIP location error! " + e.getMessage());
} finally {
IoUtil.close(asnInputStream);
IoUtil.close(ipv4InputStream);
}
}
@Test
public void inputLookupTest() {
// String ip = "121.14.89.209";
//
byte[] localFile = new FileReader(DAT_FILEPATH + "built_in_ip_location.mmdb").readBytes();
String localFileSha256 = DigestUtil.sha256Hex(localFile);
System.out.println("本地文件SHA256"+localFileSha256);
//
// IpLookupV2 ipLookup = new IpLookupV2.Builder(false)
// .loadDataFileV4(DAT_FILEPATH + "built_in_ip_location.mmdb").build();
//
// System.out.println(ipLookup.infoLookup(ip));
try {
HttpClientService httpClientService = new HttpClientService();
InputStream inputStream = httpClientService.httpGetInputStream("http://192.168.44.55:9098/hos/knowledge_base_hos_bucket/9b1ce6b4-024d-4343-80d5-6e6dc0ad0863-aXA0.mmdb", FlowWriteConfig.HTTP_SOCKET_TIMEOUT);
byte[] bytes = IOUtils.toByteArray(inputStream);
String downloadFileSha256 = DigestUtil.sha256Hex(bytes);
InputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
System.out.println("HOS下载文件2 SHA256" + downloadFileSha256);
System.out.println("HOS下载文件2 size" + bytes.length);
// IpLookupV2 ipLookup2 = new IpLookupV2.Builder(false)
// .loadDataFileV4(byteArrayInputStream).build();
new GalaxyDataBaseReader.Builder(byteArrayInputStream).withCache(new CHMCache()).build();
InputStream inputStream1 = httpClientService.httpGetInputStream("http://192.168.44.55:9098/hos/knowledge_base_hos_bucket/9b1ce6b4-024d-4343-80d5-6e6dc0ad0863-aXA0.mmdb", FlowWriteConfig.HTTP_SOCKET_TIMEOUT);
new GalaxyDataBaseReader.Builder(inputStream1).withCache(new CHMCache()).build();
// System.out.println(ipLookup2.infoLookup(ip));
} catch (IOException e) {
e.printStackTrace();
}
}
}

View File

@@ -0,0 +1,89 @@
package com.zdjizhi.function;
import org.junit.Test;
import java.time.*;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
public class TimestampTest {
@Test
public void timestampToDate() {
System.out.println(getLocalDateTime(1693905281L, 0, "Asia/Shanghai"));
System.out.println(getLocalDateTime(1693905281048L, 3, "Asia/Shanghai"));
System.out.println(getLocalDateTime(1693905281048L, 6, "Asia/Shanghai"));
System.out.println(getLocalDateTime(1693905281048L, 9, "UTC+03:00"));
System.out.println(getZoneDateTime(1693905281L, 3, "Asia/Shanghai"));
System.out.println(getZoneDateTime(1693905281048L, 6, "Asia/Shanghai"));
}
@Test
public void timestampConversion() {
long nanosTimestamp = 1630988475000000000L; // 纳秒级时间戳
System.out.println("纳秒级时间戳: " + timestampToSeconds(nanosTimestamp));
long microsTimestamp = 1630988475000000L; // 微秒级时间戳
System.out.println("微秒级时间戳: " + timestampToSeconds(microsTimestamp));
long millisTimestamp = 1693969952127L; // 微秒级时间戳
System.out.println("毫秒级时间戳: " + timestampToSeconds(millisTimestamp));
long errorTimestamp = 169396995L; // 微秒级时间戳
System.out.println("异常时间戳: " + timestampToSeconds(errorTimestamp));
}
private Long timestampToSeconds(long timestamp) {
int timestampLength = Long.toString(timestamp).length();
switch (timestampLength) {
case 13:
return TimeUnit.MILLISECONDS.toSeconds(timestamp);
case 16:
return TimeUnit.MICROSECONDS.toSeconds(timestamp);
case 19:
return TimeUnit.NANOSECONDS.toSeconds(timestamp);
default:
// throw new RuntimeException("This timestamp:" + timestamp + " format is not nanosecond, microsecond, millisecond, or second");
return timestamp;
}
}
private String getLocalDateTime(Long timestamp, int precision, String timeZone) {
boolean isMillis = String.valueOf(timestamp).length() > 10;
String timePattern = "yyyy-MM-dd HH:mm:ss";
if (precision > 0) {
String s = String.join("", Collections.nCopies(precision, "S"));
timePattern = String.join(".", timePattern, s);
}
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(timePattern);
Instant instant;
if (isMillis) {
instant = Instant.ofEpochMilli(timestamp);
} else {
instant = Instant.ofEpochSecond(timestamp);
}
return LocalDateTime.ofInstant(instant, ZoneId.of(timeZone)).format(formatter);
}
private String getZoneDateTime(Long timestamp, int precision, String timeZone) {
boolean isMillis = String.valueOf(timestamp).length() > 10;
String timePattern = "yyyy-MM-dd'T'HH:mm:ss.";
if (precision > 0) {
String s = String.join("", Collections.nCopies(precision, "S"));
timePattern = String.join("", timePattern, s, "XXX");
}
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(timePattern);
Instant instant;
if (isMillis) {
instant = Instant.ofEpochMilli(timestamp);
} else {
instant = Instant.ofEpochSecond(timestamp);
}
return ZonedDateTime.ofInstant(instant, ZoneId.of(timeZone)).format(formatter);
}
}

View File

@@ -0,0 +1,39 @@
package com.zdjizhi.function;
import com.google.common.net.InternetDomainName;
import com.geedgenetworks.utils.FormatUtils;
import org.junit.Test;
/**
* @author qidaijie
* @Package com.zdjizhi.function
* @Description:
* @date 2023/7/2413:55
*/
public class TopDomainTest {
@Test
public void getTopDomainTest() {
String host = "heartsofsteel-.tumblr.com";
System.out.println(FormatUtils.getTopPrivateDomain(host));
host = "heartsofsteel.tumblr.com";
System.out.println(FormatUtils.getTopPrivateDomain(host));
// String sni = "www.googleapis.com";
String sni = "juicebox-.tumblr.com";
String domain = FormatUtils.getDomain(sni);
System.out.println(domain);
System.out.println(FormatUtils.getTopPrivateDomain(sni));
System.out.println(InternetDomainName.isValid(sni));
System.out.println(InternetDomainName.isValid(domain));
InternetDomainName internetDomainName = InternetDomainName.from(domain);
if (internetDomainName.isUnderPublicSuffix()) {
System.out.println(internetDomainName.topPrivateDomain().toString());
}
}
}

View File

@@ -0,0 +1,75 @@
package com.zdjizhi.hdfs;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
/**
* @author qidaijie
* @Package com.zdjizhi.tools.connections.hadoop
* @Description:
* @date 2022/11/217:57
*/
public class FileUtilsTest {
private static final Log logger = LogFactory.get();
private static FileSystem fileSystem;
static {
Configuration configuration = new Configuration();
try {
configuration.set("fs.defaultFS","hdfs://ns1");
configuration.set("hadoop.proxyuser.root.hosts","*");
configuration.set("hadoop.proxyuser.root.groups","*");
configuration.set("ha.zookeeper.quorum","192.168.44.83:2181,192.168.44.84:2181,192.168.44.85:2181");
configuration.set("dfs.nameservices","ns1");
configuration.set("dfs.ha.namenodes.ns1","nn1,nn2");
configuration.set("dfs.namenode.rpc-address.ns1.nn1","192.168.44.85:9000");
configuration.set("dfs.namenode.rpc-address.ns1.nn2","192.168.44.86:9000");
configuration.set("dfs.client.failover.proxy.provider.ns1","org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
//创建fileSystem,用于连接hdfs
fileSystem = FileSystem.get(configuration);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Test
public void mkdir() throws Exception{
fileSystem.mkdirs(new Path("/knowledgebase/test"));
}
@Test
public void create() throws Exception{
FSDataOutputStream outputStream = fileSystem.create(new Path("/knowledgebase/test/test.txt"));
outputStream.write("Hello World".getBytes());
outputStream.flush();
outputStream.close();
}
@Test
public void cat() throws Exception{
FSDataInputStream inputStream = fileSystem.open(new Path("/knowledgebase/test/test.txt"));
IOUtils.copyBytes(inputStream, System.out, 1024);
inputStream.close();
}
@Test
public void rename() throws Exception{
fileSystem.rename(new Path("/knowledgebase/test/test.txt"), new Path("/knowledgebase/test/test1.txt"));
}
@Test
public void delete() throws Exception{
fileSystem.delete(new Path("/knowledgebase/test"),true);//是否递归删除
}
}

View File

@@ -0,0 +1,100 @@
package com.zdjizhi.hos;
import cn.hutool.core.io.IoUtil;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.base.Joiner;
import com.maxmind.db.CHMCache;
import com.maxmind.db.Reader;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.tools.connections.http.HttpClientService;
import com.geedgenetworks.utils.StringUtil;
import org.junit.Test;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.util.Map;
/**
* @author qidaijie
* @Package com.zdjizhi.hos
* @Description:
* @date 2022/11/713:55
*/
public class hosUtilsTest {
@Test
public void downloadToLocalTest() {
FileOutputStream outputStream = null;
InputStream inputStream = null;
try {
String url = "http://192.168.44.242:9098/hos/knowledge_base_hos_bucket/0773846a-87df-4869-a5b7-815eda384b92-aXBfdjRfYnVpbHRfaW4=.mmdb";
HttpClientService httpClientService = new HttpClientService();
inputStream = httpClientService.httpGetInputStream(url, 3000);
File file = new File(FlowWriteConfig.TOOLS_LIBRARY.concat(File.separator).concat("ip_v4_built_in.mmdb"));
if (!file.getParentFile().exists()) {
file.getParentFile().mkdir();
}
outputStream = new FileOutputStream(file);
IoUtil.copy(inputStream, outputStream);
} catch (IOException | RuntimeException e) {
e.printStackTrace();
} finally {
IoUtil.close(inputStream);
IoUtil.close(outputStream);
}
}
@Test
public void locationTest() {
InputStream inputStream = null;
try {
// Header header = new BasicHeader("token", FlowWriteConfig.HOS_TOKEN);
String url = "http://192.168.44.12:9098/hos/knowledge_base_hos_bucket/ac0ef83d-f23a-41a0-9c66-51c4e2b4cbc0-aXBfdjRfYnVpbHRfaW4=.mmdb";
HttpClientService httpClientService = new HttpClientService();
inputStream = httpClientService.httpGetInputStream(url, 3000);
Reader reader = new Reader(inputStream, new CHMCache());
InetAddress ipAddress = InetAddress.getByName("114.64.231.114");
Map map = reader.get(ipAddress, Map.class);
if (map != null) {
System.out.println(StringUtil.setDefaultIfEmpty(map.toString(), "unkonw").toString());
System.out.println(Joiner.on(".").useForNull("").join(map.get("COUNTRY"),
map.get("SUPER_ADMINISTRATIVE_AREA"), map.get("ADMINISTRATIVE_AREA")));
System.out.println(Joiner.on(".").useForNull("").join(map.get("COUNTRY"),
map.get("SUPER_ADMINISTRATIVE_AREA"), map.get("ADMINISTRATIVE_AREA")).replace("\"", ""));
}
} catch (IOException | RuntimeException e) {
e.printStackTrace();
} finally {
IoUtil.close(inputStream);
}
}
@Test
public void asnTest() {
InputStream inputStream = null;
try {
String url = "http://192.168.44.12:9098/hos/knowledge_base_hos_bucket/95b20b83-e6bd-4d28-85d5-3b4e32da9a3f-aXBfdjRfYnVpbHRfaW4=.mmdb";
HttpClientService httpClientService = new HttpClientService();
inputStream = httpClientService.httpGetInputStream(url, 3000);
Reader reader = new Reader(inputStream, new CHMCache());
InetAddress ipAddress = InetAddress.getByName("23.200.74.224");
Map map = reader.get(ipAddress, Map.class);
if (map != null) {
System.out.println(StringUtil.setDefaultIfEmpty(map.get("ASN"), "unkonw").toString());
System.out.println(StringUtil.setDefaultIfEmpty(map.get("ASN"), "unkonw").toString().replace("\"", ""));
System.out.println(StringUtil.setDefaultIfEmpty(map.toString(), "unkonw").toString());
}
} catch (IOException | RuntimeException e) {
e.printStackTrace();
} finally {
IoUtil.close(inputStream);
}
}
}

View File

@@ -0,0 +1,98 @@
package com.zdjizhi.json;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONArray;
import com.alibaba.fastjson2.JSONObject;
import com.zdjizhi.json.pojo.UserList;
import com.zdjizhi.json.pojo.UserMap;
import org.junit.Test;
import java.util.List;
import java.util.Map;
/**
* @author qidaijie
* @Package com.zdjizhi.json
* @Description:
* @date 2023/5/2014:02
*/
public class FastJsonTest {
@Test
public void pojoTest() {
//all right
String message = "{\"name\":\"zhangsan\",\"age\":50,\"idcard\":\"140303199999999999\",\"previousaddress\":{\"first\":\"北京\",\"second\":\"上海\"}}";
//int error (yes)
// String message = "{\"name\":\"zhangsan\",\"age\":\"123\",\"idCard\":\"140303199999999999\",\"previousaddress\":{\"first\":\"北京\",\"second\":\"上海\"}}";
//string error (yes)
// String message = "{\"name\":123,\"age\":123,\"idCard\":\"140303199999999999\",\"previousaddress\":{\"first\":\"北京\",\"second\":\"上海\"}}";
//json error (no)
// String message = "{\"name\":\"zhangsan\",\"age\":50,\"idCard\":\"140303199999999999\",\"previousaddress\":\"{\\\"first\\\":\\\"北京\\\",\\\"second\\\":\\\"上海\\\"}\"}";
UserMap user = JSON.parseObject(message, UserMap.class);
System.out.println(user.getName());
System.out.println(user.getDevicetag());
System.out.println(JSON.toJSONString(user));
}
@Test
public void typeCheckTest() {
//jsonobject
String message = "{\"name\":\"zhangsan\",\"age\":50,\"idcard\":\"123456789\",\"devicetag\":\"{\\\"tags\\\":[{\\\"tag\\\":\\\"group\\\",\\\"value\\\":\\\"7400\\\"},{\\\"tag\\\":\\\"center\\\",\\\"value\\\":\\\"7400\\\"}]}\"}";
JSONObject objectTest = JSONObject.parseObject(message);
for (Map.Entry<String, Object> entry : objectTest.entrySet()) {
System.out.println("key:" + entry.getKey() + "————value:" + entry.getValue() + "————class: " + entry.getValue().getClass());
}
System.out.println("\n输出原始日志" + objectTest.toString());
Object previousMap = objectTest.get("devicetag");
if (previousMap.getClass() != Map.class) {
JSONObject previousObject = JSONObject.parseObject(previousMap.toString());
objectTest.put("devicetag", previousObject);
}
System.out.println("输出转换map类型后的日志" + objectTest.toString());
UserMap userMap = objectTest.toJavaObject(UserMap.class);
System.out.println(JSON.toJSONString(userMap));
System.out.println("\n-----------------------------------------------\n");
//jsonarray
message = "{\"name\":\"zhangsan\",\"age\":50,\"idcard\":\"123456789\",\"devicetag\":\"[{\\\"tag\\\":\\\"group\\\",\\\"value\\\":\\\"7400\\\"},{\\\"tag\\\":\\\"center\\\",\\\"value\\\":\\\"7400\\\"}]\"}";
JSONObject arrayTest = JSONObject.parseObject(message);
for (Map.Entry<String, Object> entry : arrayTest.entrySet()) {
System.out.println("key:" + entry.getKey() + "————value:" + entry.getValue() + "————class: " + entry.getValue().getClass());
}
System.out.println("\n输出原始日志" + arrayTest.toString());
Object previousList = arrayTest.get("devicetag");
if (previousList.getClass() != List.class) {
JSONArray previousArray = JSONArray.parseArray(previousList.toString());
arrayTest.put("devicetag", previousArray);
}
System.out.println("输出转换list类型后的日志" + arrayTest.toString());
UserList userList = arrayTest.toJavaObject(UserList.class);
System.out.println(JSON.toJSONString(userList));
}
@Test
public void typeTest() {
String message = "{\"name\":\"zhangsan\",\"age\":12,\"object\":{\"name\":\"a\",\"age\":12},\"array\":[{\"one\":1},{\"two\":2}]}";
JSONObject objectTest = JSONObject.parseObject(message);
for (Map.Entry<String, Object> entry : objectTest.entrySet()) {
String key = entry.getKey();
Class<?> aClass = entry.getValue().getClass();
System.out.println(key + "---------" + aClass.getSimpleName());
}
Object bbb = objectTest.get("bbb");
if (bbb == null){
System.out.println("null");
}
}
}

View File

@@ -0,0 +1,102 @@
package com.zdjizhi.json;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.*;
import com.alibaba.nacos.api.NacosFactory;
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.api.exception.NacosException;
import com.zdjizhi.common.CommonConfig;
import com.zdjizhi.common.FlowWriteConfig;
import org.junit.Test;
import java.util.Properties;
/**
* @author qidaijie
* @Package com.zdjizhi.json
* @Description:
* @date 2022/3/2410:22
*/
public class JsonPathTest {
private static Properties properties = new Properties();
static {
properties.setProperty(PropertyKeyConst.SERVER_ADDR, "192.168.44.12:8848");
properties.setProperty(PropertyKeyConst.USERNAME, "nacos");
properties.setProperty(PropertyKeyConst.PASSWORD, "nacos");
}
@Test
public void konwledgeBaseTest() {
try {
ConfigService configService = NacosFactory.createConfigService(properties);
String schema = configService.getConfig("knowledge_base.json", "DEFAULT_GROUP", 5000);
// String KNOWLEDGE_EXPR = "[?(@.version=='latest' && (@.name in ('ip_v4_built_in','ip_v6_built_in','ip_v4_user_defined','ip_v6_user_defined','asn_v4','asn_v6')))]";
// String KNOWLEDGE_EXPR = "[?(@.version=='latest' && (@.type in ('ip_location','asn','ip')))]";
// String KNOWLEDGE_EXPR = "[?(@.version=='latest' && (@.name in ('ip_v4_built_in','ip_v6_built_in','ip_v4_user_defined','ip_v6_user_defined','asn_v4','asn_v6')) && (@.type in ('ip_location','asn','ip')))]";
// String types = "[?(@.type in ('ip_location','asn','ip_user_defined'))]";
// String names = "[?(@.name in ('ip_v4_built_in','ip_v6_built_in','asn_v4','asn_v6','内置IP定位库'))]";
// String expr = "[?(@.version=='latest')][?(@.type in ('ip_location','asn','ip_user_defined'))]";
// String expr = "[?(@.version=='latest')][?(@.name == 'QQQ' || (@.type == 'ip_user_defined'))]";
String expr = "[?(@.version=='latest')][@.type in ('ip_location','asn','ip')][?(@.name in ('QQQ'))]";
// JSONPath jsonPath = JSONPath.of(combinationFilterList());
JSONPath jsonPath = JSONPath.of(expr);
String extract = jsonPath.extract(JSONReader.of(schema)).toString();
JSONArray jsonArray = JSON.parseArray(extract);
for (int i = 0; i < jsonArray.size(); i++) {
System.out.println(jsonArray.getString(i));
// KnowlegeBaseMeta knowlegeBaseMeta = JSONObject.parseObject(jsonArray.getString(i), KnowlegeBaseMeta.class);
// System.out.println(knowlegeBaseMeta.toString());
}
} catch (NacosException e) {
e.printStackTrace();
}
}
public static String combinationFilterList() {
String[] typeList = CommonConfig.KNOWLEDGEBASE_TYPE_LIST.split(",");
String[] nameList = CommonConfig.KNOWLEDGEBASE_NAME_LIST.split(",");
String expr = "[?(@.version=='latest')]";
// ip_location > 'ip_location'
if (typeList.length > 1) {
StringBuilder typeBuilder = new StringBuilder();
typeBuilder.append("[?(@.type in (");
for (int i = 0; i < typeList.length; i++) {
if (i == typeList.length - 1) {
typeBuilder.append("'").append(typeList[i]).append("'))]");
} else {
typeBuilder.append("'").append(typeList[i]).append("',");
}
}
expr = expr + typeBuilder.toString();
}
if (nameList.length > 1) {
StringBuilder nameBuilder = new StringBuilder();
nameBuilder.append("[?(@.name in (");
for (int i = 0; i < nameList.length; i++) {
if (i == nameList.length - 1) {
nameBuilder.append("'").append(nameList[i]).append("'))]");
} else {
nameBuilder.append("'").append(nameList[i]).append("',");
}
}
expr = expr + nameBuilder.toString();
}
return expr;
}
}

View File

@@ -0,0 +1,223 @@
package com.zdjizhi.json;
import com.alibaba.fastjson2.JSONArray;
import com.alibaba.fastjson2.JSONObject;
import com.alibaba.nacos.api.NacosFactory;
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.api.exception.NacosException;
import com.zdjizhi.common.FlowWriteConfig;
import org.junit.Test;
import java.util.*;
/**
* Applicable to schemas >= TSG22.08
*
* @author qidaijie
* @Package com.zdjizhi.nacos
* @Description:
* @date 2022/3/1714:57
*/
public class NewSchemaTest {
private static Properties properties = new Properties();
/**
* 获取需要删除字段的列表
*/
private static ArrayList<String> dropList = new ArrayList<>();
/**
* 获取schema指定的有效字段及类型
*/
private static HashMap<String, Class> jsonFieldsMap;
/**
* 获取包含默认值的字段
*/
private static HashMap<String, Object> defaultFieldsMap = new HashMap<>(16);
static {
properties.setProperty(PropertyKeyConst.SERVER_ADDR, "192.168.44.11:8848");
properties.setProperty(PropertyKeyConst.NAMESPACE, "f507879a-8b1b-4330-913e-83d4fcdc14bb");
properties.setProperty(PropertyKeyConst.USERNAME, "nacos");
properties.setProperty(PropertyKeyConst.PASSWORD, "nacos");
}
@Test
public void newSchemaTest() {
try {
ConfigService configService = NacosFactory.createConfigService(properties);
String dataId = "session_record.json";
String group = "Galaxy";
String schema = configService.getConfig(dataId, group, 5000);
ArrayList<String[]> newJobList = getNewJobList(schema);
for (String[] job : newJobList) {
System.out.println(Arrays.toString(job));
}
HashMap<String, Class> fieldsFromSchema = getFieldsFromSchema(schema);
for (String key : fieldsFromSchema.keySet()) {
System.out.println("fileName:" + key + " Class:" + fieldsFromSchema.get(key));
}
} catch (NacosException e) {
e.printStackTrace();
}
}
/**
* 模式匹配,给定一个类型字符串返回一个类类型
*
* @param type 类型
* @return 类类型
*/
private static Class getClassName(String type) {
Class clazz;
switch (type) {
case "int":
clazz = Integer.class;
break;
case "string":
clazz = String.class;
break;
case "long":
clazz = long.class;
break;
case "array":
clazz = List.class;
break;
case "double":
clazz = double.class;
break;
case "float":
clazz = float.class;
break;
case "char":
clazz = char.class;
break;
case "byte":
clazz = byte.class;
break;
case "boolean":
clazz = boolean.class;
break;
case "short":
clazz = short.class;
break;
default:
clazz = String.class;
}
return clazz;
}
/**
* 通过schema来获取所需的字段及字段类型。
*
* @return 用于反射生成schema类型的对象的一个map集合
*/
private static HashMap<String, Class> getFieldsFromSchema(String schema) {
HashMap<String, Class> map = new HashMap<>(256);
//获取fields并转化为数组数组的每个元素都是一个name doc type
com.alibaba.fastjson2.JSONObject schemaJson = com.alibaba.fastjson2.JSONObject.parseObject(schema);
com.alibaba.fastjson2.JSONArray fields = schemaJson.getJSONArray("fields");
for (Object field : fields) {
String filedStr = field.toString();
if (checkKeepField(filedStr)) {
com.alibaba.fastjson2.JSONObject fieldJson = com.alibaba.fastjson2.JSONObject.parseObject(filedStr);
String name = fieldJson.getString("name");
String type = fieldJson.getString("type");
if (type.contains("{")) {
com.alibaba.fastjson2.JSONObject types = com.alibaba.fastjson2.JSONObject.parseObject(type);
type = types.getString("type");
}
if (fieldJson.containsKey("default")) {
defaultFieldsMap.put(name, fieldJson.get("default"));
}
//组合用来生成实体类的map
map.put(name, getClassName(type));
} else {
dropList.add(filedStr);
}
}
return map;
}
/**
* 判断字段是否需要保留
*
* @param message 单个field-json
* @return true or false
*/
private static boolean checkKeepField(String message) {
boolean isKeepField = true;
com.alibaba.fastjson2.JSONObject fieldJson = com.alibaba.fastjson2.JSONObject.parseObject(message);
boolean hasDoc = fieldJson.containsKey("doc");
if (hasDoc) {
com.alibaba.fastjson2.JSONObject doc = com.alibaba.fastjson2.JSONObject.parseObject(fieldJson.getString("doc"));
if (doc.containsKey("visibility")) {
String visibility = doc.getString("visibility");
if ("disabled".equals(visibility)) {
isKeepField = false;
}
}
}
return isKeepField;
}
/**
* 根据http链接获取schema解析之后返回一个任务列表 (useList toList funcList paramlist)
*
* @return 任务列表
*/
private static ArrayList<String[]> getNewJobList(String schema) {
ArrayList<String[]> list = new ArrayList<>();
JSONObject schemaJson = JSONObject.parseObject(schema);
JSONArray fields = schemaJson.getJSONArray("fields");
for (Object field : fields) {
JSONObject fieldJson = JSONObject.parseObject(field.toString());
boolean hasDoc = fieldJson.containsKey("doc");
if (hasDoc) {
JSONObject docJson = JSONObject.parseObject(fieldJson.getString("doc"));
boolean hasFormat = docJson.containsKey("format");
if (hasFormat) {
String name = fieldJson.getString("name");
JSONArray formatList = docJson.getJSONArray("format");
for (Object format : formatList) {
JSONObject formatJson = JSONObject.parseObject(format.toString());
String function = formatJson.getString("function");
String appendTo;
String params = null;
if (formatJson.containsKey("appendTo")) {
appendTo = formatJson.getString("appendTo");
} else {
appendTo = name;
}
if (formatJson.containsKey("param")) {
params = formatJson.getString("param");
}
list.add(new String[]{name, appendTo, function, params});
}
}
}
}
return list;
}
}

View File

@@ -0,0 +1,121 @@
package com.zdjizhi.json;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.nacos.api.NacosFactory;
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.api.exception.NacosException;
import com.zdjizhi.common.FlowWriteConfig;
import com.geedgenetworks.utils.StringUtil;
import org.junit.Test;
import java.util.*;
/**
* Applicable to schemas < TSG22.08
*
* @author qidaijie
* @Package com.zdjizhi.nacos
* @Description:
* @date 2022/3/1714:57
*/
public class OldSchemaTest {
private static Properties properties = new Properties();
static {
properties.setProperty(PropertyKeyConst.SERVER_ADDR, "192.168.44.12:8848");
properties.setProperty(PropertyKeyConst.NAMESPACE, "prod");
properties.setProperty(PropertyKeyConst.USERNAME, "nacos");
properties.setProperty(PropertyKeyConst.PASSWORD, "nacos");
}
@Test
public void oldSchemaTest() {
try {
ConfigService configService = NacosFactory.createConfigService(properties);
String dataId = "session_record.json";
String group = "Galaxy";
String schema = configService.getConfig(dataId, group, 5000);
ArrayList<String[]> oldJobList = getOldJobList(schema);
for (String[] job : oldJobList) {
System.out.println(Arrays.toString(job));
}
} catch (NacosException e) {
e.printStackTrace();
}
}
/**
* 解析schema解析之后返回一个任务列表 (useList toList funcList paramlist)
*
* @param schema 日志schema
* @return 任务列表
*/
private static ArrayList<String[]> getOldJobList(String schema) {
ArrayList<String[]> list = new ArrayList<>();
//获取fields并转化为数组数组的每个元素都是一个name doc type
JSONObject schemaJson = JSON.parseObject(schema);
JSONArray fields = (JSONArray) schemaJson.get("fields");
for (Object field : fields) {
if (JSON.parseObject(field.toString()).containsKey("doc")) {
Object doc = JSON.parseObject(field.toString()).get("doc");
if (JSON.parseObject(doc.toString()).containsKey("format")) {
String name = JSON.parseObject(field.toString()).get("name").toString();
Object format = JSON.parseObject(doc.toString()).get("format");
JSONObject formatObject = JSON.parseObject(format.toString());
String functions = formatObject.get("transform").toString();
String appendTo = null;
String params = null;
if (formatObject.containsKey("appendTo")) {
appendTo = formatObject.get("appendTo").toString();
}
if (formatObject.containsKey("param")) {
params = formatObject.get("param").toString();
}
if (StringUtil.isNotBlank(appendTo) && StringUtil.isBlank(params)) {
String[] functionArray = functions.split(FlowWriteConfig.FORMAT_SPLITTER);
String[] appendToArray = appendTo.split(FlowWriteConfig.FORMAT_SPLITTER);
for (int i = 0; i < functionArray.length; i++) {
list.add(new String[]{name, appendToArray[i], functionArray[i], null});
}
} else if (StringUtil.isNotBlank(appendTo) && StringUtil.isNotBlank(params)) {
String[] functionArray = functions.split(FlowWriteConfig.FORMAT_SPLITTER);
String[] appendToArray = appendTo.split(FlowWriteConfig.FORMAT_SPLITTER);
String[] paramArray = params.split(FlowWriteConfig.FORMAT_SPLITTER);
for (int i = 0; i < functionArray.length; i++) {
list.add(new String[]{name, appendToArray[i], functionArray[i], paramArray[i]});
}
} else {
list.add(new String[]{name, name, functions, params});
}
}
}
}
return list;
}
}

View File

@@ -0,0 +1,63 @@
package com.zdjizhi.json.pojo;
/**
* @author qidaijie
* @Package com.zdjizhi.json
* @Description:
* @date 2023/5/1918:42
*/
public class KnowlegeBaseMeta {
private String name;
private String sha256;
private String format;
private String path;
public KnowlegeBaseMeta(String name, String sha256, String format, String path) {
this.name = name;
this.sha256 = sha256;
this.format = format;
this.path = path;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getSha256() {
return sha256;
}
public void setSha256(String sha256) {
this.sha256 = sha256;
}
public String getFormat() {
return format;
}
public void setFormat(String format) {
this.format = format;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
@Override
public String toString() {
return "KnowlegeBaseMeta{" +
"name='" + name + '\'' +
", sha256='" + sha256 + '\'' +
", format='" + format + '\'' +
", path='" + path + '\'' +
'}';
}
}

View File

@@ -0,0 +1,56 @@
package com.zdjizhi.json.pojo;
import java.util.List;
/**
* @author qidaijie
* @Package com.zdjizhi.json.pojo
* @Description:
* @date 2023/5/2014:06
*/
public class UserList {
private String name;
private Integer age;
private Long idcard;
private List devicetag;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getAge() {
return age;
}
public void setAge(Integer age) {
this.age = age;
}
public Long getIdcard() {
return idcard;
}
public void setIdcard(Long idcard) {
this.idcard = idcard;
}
public List getDevicetag() {
return devicetag;
}
public void setDevicetag(List devicetag) {
this.devicetag = devicetag;
}
public UserList(String name, Integer age, Long idcard, List devicetag) {
this.name = name;
this.age = age;
this.idcard = idcard;
this.devicetag = devicetag;
}
}

View File

@@ -0,0 +1,55 @@
package com.zdjizhi.json.pojo;
import java.util.Map;
/**
* @author qidaijie
* @Package com.zdjizhi.json.pojo
* @Description:
* @date 2023/5/2014:06
*/
public class UserMap {
private String name;
private Integer age;
private Long idcard;
private Map devicetag;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getAge() {
return age;
}
public void setAge(Integer age) {
this.age = age;
}
public Long getIdcard() {
return idcard;
}
public void setIdcard(Long idcard) {
this.idcard = idcard;
}
public Map getDevicetag() {
return devicetag;
}
public void setDevicetag(Map devicetag) {
this.devicetag = devicetag;
}
public UserMap(String name, Integer age, Long idcard, Map devicetag) {
this.name = name;
this.age = age;
this.idcard = idcard;
this.devicetag = devicetag;
}
}

View File

@@ -0,0 +1,102 @@
package com.zdjizhi.nacos;
import com.alibaba.nacos.api.NacosFactory;
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.api.config.listener.Listener;
import com.alibaba.nacos.api.exception.NacosException;
import com.alibaba.nacos.client.config.impl.LocalConfigInfoProcessor;
import org.junit.Test;
import java.io.IOException;
import java.io.StringReader;
import java.util.Properties;
import java.util.concurrent.Executor;
/**
* @author qidaijie
* @Package com.zdjizhi
* @Description:
* @date 2022/3/1016:58
*/
public class NacosTest {
/**
* <dependency>
* <groupId>com.alibaba.nacos</groupId>
* <artifactId>nacos-client</artifactId>
* <version>1.2.0</version>
* </dependency>
*/
private static Properties properties = new Properties();
/**
* config data id = config name
*/
private static final String DATA_ID = "dos_detection.properties";
/**
* config group
*/
private static final String GROUP = "Galaxy";
private void getProperties() {
properties.setProperty(PropertyKeyConst.SERVER_ADDR, "192.168.44.241:8848");
properties.setProperty(PropertyKeyConst.NAMESPACE, "prod");
properties.setProperty(PropertyKeyConst.USERNAME, "nacos");
properties.setProperty(PropertyKeyConst.PASSWORD, "nacos");
}
@Test
public void GetConfigurationTest() {
try {
getProperties();
ConfigService configService = NacosFactory.createConfigService(properties);
String content = configService.getConfig(DATA_ID, GROUP, 5000);
Properties nacosConfigMap = new Properties();
nacosConfigMap.load(new StringReader(content));
System.out.println(nacosConfigMap.getProperty("source.kafka.servers"));
System.out.println(content);
} catch (NacosException | IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Test
public void ListenerConfigurationTest() {
getProperties();
try {
//first get config
ConfigService configService = NacosFactory.createConfigService(properties);
String config = configService.getConfig(DATA_ID, GROUP, 5000);
System.out.println(config);
//start listenner
configService.addListener(DATA_ID, GROUP, new Listener() {
@Override
public Executor getExecutor() {
return null;
}
@Override
public void receiveConfigInfo(String configMsg) {
System.out.println(configMsg);
}
});
} catch (NacosException e) {
e.printStackTrace();
}
//keep running,change nacos config,print new config
for (int i = 0; i < 3; i++) {
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}

View File

@@ -0,0 +1,117 @@
package com.zdjizhi.nacos;
import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONObject;
import com.alibaba.nacos.api.NacosFactory;
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.api.config.listener.Listener;
import com.alibaba.nacos.api.exception.NacosException;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.Executor;
/**
* @author qidaijie
* @Package com.zdjizhi.nacos
* @Description:
* @date 2022/3/1714:57
*/
public class SchemaListenerTest {
private static Properties properties = new Properties();
private static ArrayList<String[]> jobList;
static {
properties.setProperty(PropertyKeyConst.SERVER_ADDR, "192.168.44.12:8848");
properties.setProperty(PropertyKeyConst.NAMESPACE, "prod");
properties.setProperty(PropertyKeyConst.USERNAME, "nacos");
properties.setProperty(PropertyKeyConst.PASSWORD, "nacos");
try {
ConfigService configService = NacosFactory.createConfigService(properties);
String dataId = "session_record.json";
String group = "Galaxy";
jobList = getJobListFromHttp(configService.getConfig(dataId, group, 5000));
System.out.println(jobList);
configService.addListener(dataId, group, new Listener() {
@Override
public Executor getExecutor() {
return null;
}
@Override
public void receiveConfigInfo(String configMsg) {
jobList = getJobListFromHttp(configMsg);
}
});
} catch (NacosException e) {
e.printStackTrace();
}
}
@Test
public void dealCommonMessage() {
//keep running,change nacos config,print new config
for (int i = 0; i < 1; i++) {
try {
for (String[] job : jobList) {
System.out.println(Arrays.toString(job));
}
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
/**
* 根据http链接获取schema解析之后返回一个任务列表 (useList toList funcList paramlist)
*
* @return 任务列表
*/
private static ArrayList<String[]> getJobListFromHttp(String schema) {
ArrayList<String[]> list = new ArrayList<>();
JSONObject schemaJson = new JSONObject(schema, false, true);
JSONArray fields = schemaJson.getJSONArray("fields");
for (Object field : fields) {
JSONObject fieldJson = new JSONObject(field, false, true);
boolean hasDoc = fieldJson.containsKey("doc");
if (hasDoc) {
JSONObject docJson = fieldJson.getJSONObject("doc");
boolean hasFormat = docJson.containsKey("format");
if (hasFormat) {
String name = fieldJson.getStr("name");
JSONArray formatList = docJson.getJSONArray("format");
for (Object format : formatList) {
JSONObject formatJson = new JSONObject(format, false, true);
String function = formatJson.getStr("function");
String appendTo = null;
String params = null;
if (formatJson.containsKey("appendTo")) {
appendTo = formatJson.getStr("appendTo");
} else {
appendTo = name;
}
if (formatJson.containsKey("param")) {
params = formatJson.getStr("param");
}
list.add(new String[]{name, appendTo, function, params});
}
}
}
}
return list;
}
}

View File

@@ -0,0 +1,22 @@
package com.zdjizhi.schema;
import com.zdjizhi.tools.general.ConfigurationsUtils;
import java.io.IOException;
import java.util.HashMap;
import java.util.Properties;
public class SecurityEventSchema {
public static void main(String[] args) throws IOException {
Properties prop = new Properties();
prop.load(ConfigurationsUtils.class.getClassLoader().getResourceAsStream("security_event_mapping_table.properties"));
final HashMap<String, String> securityEventSchema = new HashMap<>();
for (String key : prop.stringPropertyNames()) {
final String schema = prop.getProperty(key);
securityEventSchema.put(key,schema);
}
System.out.println(securityEventSchema);
}
}

View File

@@ -0,0 +1,7 @@
package com.zdjizhi.schema;
public class Test {
public static void main(String[] args) {
System.out.println(26286 & 2132321);
}
}

View File

@@ -0,0 +1,8 @@
{"common_schema_type":"GTP-C","gtp_version":"v2","gtp_msg_type":"delete","gtp_downlink_teid":392955378,"gtp_uplink_teid":579533063,"gtp_phone_number":"8617239723344","gtp_imei":"86989005126503","gtp_imsi":"460077015061987","common_direction":69,"common_l7_protocol":"UNCATEGORIZED","common_app_label":"gtpv2","common_app_id":{"THIRD":[{"app_name":"gtpv2","app_id":735,"surrogate_id":0,"packet_sequence":1}]},"common_app_identify_info":[{"app_name":"gtpv2","packet_sequence":1}],"common_server_ip":"172.50.0.100","common_client_ip":"172.50.0.1","common_server_port":2123,"common_client_port":12564,"common_stream_dir":2,"common_address_type":4,"common_address_list":"12564-2123-172.50.0.1-172.50.0.100","common_start_time":1663752297,"common_end_time":1663752711,"common_con_duration_ms":305,"common_s2c_pkt_num":107,"common_s2c_byte_num":9951,"common_c2s_pkt_num":0,"common_c2s_byte_num":0,"common_client_location":"United States.Other.Other","common_server_location":"United States.Other.Other","common_stream_trace_id":"579013265830096219","common_l4_protocol":"IPv4_UDP","common_sled_ip":"192.168.40.161","common_device_id":"unknown","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","common_vsys_id":1,"common_policy_id":0,"common_service":8,"common_action":0}
{"common_schema_type":"GTP-C","gtp_version":"v2","gtp_msg_type":"modify","gtp_downlink_teid":247749709,"gtp_uplink_teid":226022706,"gtp_phone_number":"8615859199999","gtp_imei":"86941104639999","gtp_imsi":"460028591549999","common_direction":69,"common_l7_protocol":"UNCATEGORIZED","common_app_label":"gtpv2","common_app_id":{"THIRD":[{"app_name":"gtpv2","app_id":735,"surrogate_id":0,"packet_sequence":1}]},"common_app_identify_info":[{"app_name":"gtpv2","packet_sequence":1}],"common_server_ip":"172.50.0.100","common_client_ip":"172.50.0.1","common_server_port":2123,"common_client_port":12564,"common_stream_dir":2,"common_address_type":4,"common_address_list":"12564-2123-172.50.0.1-172.50.0.100","common_start_time":1663752297,"common_end_time":1663752711,"common_con_duration_ms":305,"common_s2c_pkt_num":107,"common_s2c_byte_num":9951,"common_c2s_pkt_num":0,"common_c2s_byte_num":0,"common_client_location":"United States.Other.Other","common_server_location":"United States.Other.Other","common_stream_trace_id":"579013265830096219","common_l4_protocol":"IPv4_UDP","common_sled_ip":"192.168.40.161","common_device_id":"unknown","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","common_vsys_id":1,"common_policy_id":0,"common_service":8,"common_action":0,"common_vsys_id":2}
{"common_schema_type":"GTP-C","gtp_version":"v2","gtp_msg_type":"delete","gtp_downlink_teid":247749709,"gtp_uplink_teid":226022706,"gtp_phone_number":"8615859199999","gtp_imei":"86941104639999","gtp_imsi":"460028591549999","common_direction":69,"common_l7_protocol":"UNCATEGORIZED","common_app_label":"gtpv2","common_app_id":{"THIRD":[{"app_name":"gtpv2","app_id":735,"surrogate_id":0,"packet_sequence":1}]},"common_app_identify_info":[{"app_name":"gtpv2","packet_sequence":1}],"common_server_ip":"172.50.0.100","common_client_ip":"172.50.0.1","common_server_port":2123,"common_client_port":12564,"common_stream_dir":2,"common_address_type":4,"common_address_list":"12564-2123-172.50.0.1-172.50.0.100","common_start_time":1663752297,"common_end_time":1663753228,"common_con_duration_ms":305,"common_s2c_pkt_num":107,"common_s2c_byte_num":9951,"common_c2s_pkt_num":0,"common_c2s_byte_num":0,"common_client_location":"United States.Other.Other","common_server_location":"United States.Other.Other","common_stream_trace_id":"579013265830096219","common_l4_protocol":"IPv4_UDP","common_sled_ip":"192.168.40.161","common_device_id":"unknown","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","common_vsys_id":1,"common_policy_id":0,"common_service":8,"common_action":0,"common_vsys_id":3}
{"common_schema_type":"GTP-C","gtp_version":"v2","gtp_msg_type":"modify","gtp_downlink_teid":247749709,"gtp_uplink_teid":226022706,"gtp_phone_number":"8615859199999","gtp_imei":"86941104639999","gtp_imsi":"460028591549999","common_direction":69,"common_l7_protocol":"UNCATEGORIZED","common_app_label":"gtpv2","common_app_id":{"THIRD":[{"app_name":"gtpv2","app_id":735,"surrogate_id":0,"packet_sequence":1}]},"common_app_identify_info":[{"app_name":"gtpv2","packet_sequence":1}],"common_server_ip":"172.50.0.100","common_client_ip":"172.50.0.1","common_server_port":2123,"common_client_port":12564,"common_stream_dir":2,"common_address_type":4,"common_address_list":"12564-2123-172.50.0.1-172.50.0.100","common_start_time":1663752297,"common_end_time":1663752711,"common_con_duration_ms":305,"common_s2c_pkt_num":107,"common_s2c_byte_num":9951,"common_c2s_pkt_num":0,"common_c2s_byte_num":0,"common_client_location":"United States.Other.Other","common_server_location":"United States.Other.Other","common_stream_trace_id":"579013265830096219","common_l4_protocol":"IPv4_UDP","common_sled_ip":"192.168.40.161","common_device_id":"unknown","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","common_vsys_id":1,"common_policy_id":0,"common_service":8,"common_action":0,"common_vsys_id":4}
{"common_schema_type":"GTP-C","gtp_version":"v2","gtp_msg_type":"modify","gtp_downlink_teid":247749709,"gtp_uplink_teid":226022706,"gtp_phone_number":"8615859199999","gtp_imei":"86941104639999","gtp_imsi":"460028591549999","common_direction":69,"common_l7_protocol":"UNCATEGORIZED","common_app_label":"gtpv2","common_app_id":{"THIRD":[{"app_name":"gtpv2","app_id":735,"surrogate_id":0,"packet_sequence":1}]},"common_app_identify_info":[{"app_name":"gtpv2","packet_sequence":1}],"common_server_ip":"172.50.0.100","common_client_ip":"172.50.0.1","common_server_port":2123,"common_client_port":12564,"common_stream_dir":2,"common_address_type":4,"common_address_list":"12564-2123-172.50.0.1-172.50.0.100","common_start_time":1663752297,"common_end_time":1663752711,"common_con_duration_ms":305,"common_s2c_pkt_num":107,"common_s2c_byte_num":9951,"common_c2s_pkt_num":0,"common_c2s_byte_num":0,"common_client_location":"United States.Other.Other","common_server_location":"United States.Other.Other","common_stream_trace_id":"579013265830096219","common_l4_protocol":"IPv4_UDP","common_sled_ip":"192.168.40.161","common_device_id":"unknown","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","common_vsys_id":1,"common_policy_id":0,"common_service":8,"common_action":0,"common_vsys_id":3}

View File

@@ -0,0 +1,7 @@
{"common_stream_dir":1,"common_address_type":4,"common_client_ip":"192.168.50.26","common_server_ip":"192.168.40.190","common_client_port":62228,"common_server_port":1813,"common_c2s_pkt_num":1,"common_s2c_pkt_num":0,"common_c2s_byte_num":52,"common_s2c_byte_num":0,"common_start_time":1663754402,"common_end_time":1663754402,"common_con_duration_ms":0,"common_stream_trace_id":107327506993450,"common_l4_protocol":"IPv4_UDP","common_address_list":"62228-1813-192.168.50.26-192.168.40.190","common_policy_id":0,"common_service":162,"common_sled_ip":"192.168.40.161","common_schema_type":"RADIUS","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","radius_packet_type":4,"radius_account":"test1","radius_acct_status_type":1,"radius_acct_session_id":"10964","radius_framed_ip":"192.168.50.26","radius_event_timestamp":1663580387}
{"common_stream_dir":1,"common_address_type":4,"common_client_ip":"192.168.50.26","common_server_ip":"192.168.40.190","common_client_port":62229,"common_server_port":1813,"common_c2s_pkt_num":1,"common_s2c_pkt_num":0,"common_c2s_byte_num":52,"common_s2c_byte_num":0,"common_start_time":1663754402,"common_end_time":1663754402,"common_con_duration_ms":0,"common_stream_trace_id":107327506993452,"common_l4_protocol":"IPv4_UDP","common_address_list":"62229-1813-192.168.50.26-192.168.40.190","common_policy_id":0,"common_service":162,"common_sled_ip":"192.168.40.161","common_schema_type":"RADIUS","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","radius_packet_type":4,"radius_account":"test2","radius_acct_status_type":1,"radius_acct_session_id":"10964","radius_framed_ip":"192.168.50.16","radius_event_timestamp":1663580387,"common_vsys_id":3}
{"common_stream_dir":1,"common_address_type":4,"common_client_ip":"192.168.50.26","common_server_ip":"192.168.40.190","common_client_port":62229,"common_server_port":1813,"common_c2s_pkt_num":1,"common_s2c_pkt_num":0,"common_c2s_byte_num":52,"common_s2c_byte_num":0,"common_start_time":1663754402,"common_end_time":1663754402,"common_con_duration_ms":0,"common_stream_trace_id":107327506993452,"common_l4_protocol":"IPv4_UDP","common_address_list":"62229-1813-192.168.50.26-192.168.40.190","common_policy_id":0,"common_service":162,"common_sled_ip":"192.168.40.161","common_schema_type":"RADIUS","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","radius_packet_type":4,"radius_account":"test2","radius_acct_status_type":1,"radius_acct_session_id":"10964","radius_framed_ip":"192.168.50.16","radius_event_timestamp":1663580387,"common_vsys_id":4}
{"common_stream_dir":1,"common_address_type":4,"common_client_ip":"192.168.50.26","common_server_ip":"192.168.40.190","common_client_port":62229,"common_server_port":1813,"common_c2s_pkt_num":1,"common_s2c_pkt_num":0,"common_c2s_byte_num":52,"common_s2c_byte_num":0,"common_start_time":1663754402,"common_end_time":1663754653,"common_con_duration_ms":0,"common_stream_trace_id":107327506993452,"common_l4_protocol":"IPv4_UDP","common_address_list":"62229-1813-192.168.50.26-192.168.40.190","common_policy_id":0,"common_service":162,"common_sled_ip":"192.168.40.161","common_schema_type":"RADIUS","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg\"},{\"tag\":\"data_center\",\"value\":\"center-xxg\"}]}","radius_packet_type":4,"radius_account":"test2","radius_acct_status_type":2,"radius_acct_session_id":"10964","radius_framed_ip":"192.168.50.16","radius_event_timestamp":1663580387,"common_vsys_id":3}

View File

@@ -0,0 +1,2 @@
{"common_schema_type":"HTTP","common_sessions":1,"http_request_line":"GET sampleFile.html HTTP/1.1","http_host":"www.texaslotto.com","http_url":"www.texaslotto.com/sampleFile.html","http_user_agent":"xPTS/2.0","http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html","http_isn":1953597368,"http_proxy_flag":0,"http_version":"http1","http_response_latency_ms":1,"http_session_duration_ms":2,"http_sequence":80,"common_protocol_label":"ETHERNET.IPv4.UDP.GTP.IPv4.TCP","common_c2s_byte_diff":17110,"common_c2s_pkt_diff":119,"common_s2c_byte_diff":16490,"common_s2c_pkt_diff":81,"common_c2s_ipfrag_num":0,"common_s2c_ipfrag_num":0,"common_first_ttl":64,"common_c2s_tcp_unorder_num":0,"common_s2c_tcp_unorder_num":0,"common_c2s_tcp_lostlen":0,"common_s2c_tcp_lostlen":0,"common_c2s_pkt_retrans":240,"common_s2c_pkt_retrans":162,"common_c2s_byte_retrans":12800,"common_s2c_byte_retrans":18400,"common_direction":69,"common_l7_protocol":"HTTP","common_app_label":"unknown","common_app_id":{"UNKNOWN":[{"app_name":"unknown","app_id":4,"surrogate_id":0,"packet_sequence":29}]},"common_app_identify_info":[{"app_name":"unknown","packet_sequence":29}],"common_tcp_client_isn":1953597368,"common_tcp_server_isn":1950649408,"common_server_ip":"10.201.35.10","common_client_ip":"1.1.1.27","common_server_port":80,"common_client_port":2000,"common_stream_dir":3,"common_address_type":4,"common_address_list":"IPv4_TCP<2000-80-1.1.1.27-10.201.35.10>|GTP<111534000-665547833>|IPv4_UDP<2152-2152-192.56.5.2-192.56.10.20>|MAC<000c299b2fa4-000c2915b4f4>","common_start_time":1660272209,"common_end_time":1660272424,"common_con_duration_ms":215201,"common_s2c_pkt_num":243,"common_s2c_byte_num":49470,"common_c2s_pkt_num":360,"common_c2s_byte_num":51600,"common_establish_latency_ms":1,"common_client_location":"china1.beijing.beijing1","common_tunnels":[{"tunnels_schema_type":"GTP","gtp_endpoint_a2b_teid":247749709,"gtp_endpoint_b2a_teid":665547833,"gtp_sgw_ip":"192.56.5.2","gtp_pgw_ip":"192.56.10.20","gtp_sgw_port":2152,"gtp_pgw_port":2152},{"tunnels_schema_type":"MULTIPATH_ETHERNET","c2s_source_mac":"00:0c:29:9b:2f:a4","c2s_destination_mac":"00:0c:29:15:b4:f4","s2c_source_mac":"00:0c:29:15:b4:f4","s2c_destination_mac":"00:0c:29:9b:2f:a4"}],"common_stream_trace_id":"869231578438992199","common_l4_protocol":"IPv4_TCP","common_sled_ip":"192.168.40.81","common_device_id":"21426003","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg-three\"},{\"tag\":\"data_center\",\"value\":\"center-xxg-three\"}]}","common_policy_id":0,"common_service":2,"common_action":0}
{"common_schema_type":"HTTP","common_sessions":1,"http_request_line":"GET sampleFile.html HTTP/1.1","http_host":"www.texaslotto.com","http_url":"www.texaslotto.com/sampleFile.html","http_user_agent":"xPTS/2.0","http_response_line":"HTTP/1.1 200 OK","http_response_content_type":"text/html","http_isn":1953597368,"http_proxy_flag":0,"http_version":"http1","http_response_latency_ms":1,"http_session_duration_ms":2,"http_sequence":80,"common_protocol_label":"ETHERNET.IPv4.UDP.GTP.IPv4.TCP","common_c2s_byte_diff":17110,"common_c2s_pkt_diff":119,"common_s2c_byte_diff":16490,"common_s2c_pkt_diff":81,"common_c2s_ipfrag_num":0,"common_s2c_ipfrag_num":0,"common_first_ttl":64,"common_c2s_tcp_unorder_num":0,"common_s2c_tcp_unorder_num":0,"common_c2s_tcp_lostlen":0,"common_s2c_tcp_lostlen":0,"common_c2s_pkt_retrans":240,"common_s2c_pkt_retrans":162,"common_c2s_byte_retrans":12800,"common_s2c_byte_retrans":18400,"common_direction":69,"common_l7_protocol":"HTTP","common_app_label":"unknown","common_app_id":{"UNKNOWN":[{"app_name":"unknown","app_id":4,"surrogate_id":0,"packet_sequence":29}]},"common_app_identify_info":[{"app_name":"unknown","packet_sequence":29}],"common_tcp_client_isn":1953597368,"common_tcp_server_isn":1950649408,"common_server_ip":"10.201.35.10","common_client_ip":"1.1.1.27","common_server_port":80,"common_client_port":2000,"common_stream_dir":3,"common_address_type":4,"common_address_list":"IPv4_TCP<2000-80-1.1.1.27-10.201.35.10>|GTP<111534000-665547833>|IPv4_UDP<2152-2152-192.56.5.2-192.56.10.20>|MAC<000c299b2fa4-000c2915b4f4>","common_start_time":1660272209,"common_end_time":1660272424,"common_con_duration_ms":215201,"common_s2c_pkt_num":243,"common_s2c_byte_num":49470,"common_c2s_pkt_num":360,"common_c2s_byte_num":51600,"common_establish_latency_ms":1,"common_client_location":"china1.beijing.beijing1","common_tunnels":[{"tunnels_schema_type":"GTP","gtp_endpoint_a2b_teid":247749709,"gtp_endpoint_b2a_teid":665547833,"gtp_sgw_ip":"192.56.5.2","gtp_pgw_ip":"192.56.10.20","gtp_sgw_port":2152,"gtp_pgw_port":2152},{"tunnels_schema_type":"MULTIPATH_ETHERNET","c2s_source_mac":"00:0c:29:9b:2f:a4","c2s_destination_mac":"00:0c:29:15:b4:f4","s2c_source_mac":"00:0c:29:15:b4:f4","s2c_destination_mac":"00:0c:29:9b:2f:a4"}],"common_stream_trace_id":"869231578438992199","common_l4_protocol":"IPv4_TCP","common_sled_ip":"192.168.40.81","common_device_id":"21426003","common_device_tag":"{\"tags\":[{\"tag\":\"device_group\",\"value\":\"group-xxg-three\"},{\"tag\":\"data_center\",\"value\":\"center-xxg-three\"}]}","common_policy_id":0,"common_service":2,"common_action":0,"common_vsys_id":4}