74 Commits

Author SHA1 Message Date
wangchengcheng
a17666abff fix:Modify the method of obtaining DoS Detection task knowledge base(TSG-17971) 2023-12-25 10:51:02 +08:00
wangchengcheng
322bb1e4cb fix:Adapt to CM interface(TSG-18052) 2023-12-21 18:25:54 +08:00
wangchengcheng
bdfe5f73db fix:recv_time fill value error 2023-12-18 13:49:53 +08:00
wangchengcheng
91cb1ce5d2 OLAP DoS Detection重组日志结构适配。(TSG-17836) 2023-12-11 09:50:28 +08:00
wangchengcheng
52336accbd 1.适配IP定位库v4/v6合并后的加载逻辑(GAL-436)。
2.适配bifang23.11接口(TSG-17675)。
3.移除广播流。
4.修复静态阈值判断部分的BUG。
2023-11-13 16:45:04 +08:00
unknown
c8a2a6b627 TSG-16920 修复自定义静态阈值满足任意阈值条件均产生告警 2023-09-07 18:20:02 +08:00
unknown
24d70f690e TSG-15712 修正DoS基线阈值告警信息中告警严重程度与告警值不匹配问题 2023-06-27 17:31:56 +08:00
unknown
77e982b22f TSG-15286 静态阈值新增业务测试用例 2023-06-09 10:36:54 +08:00
unknown
b3a23686a0 GAL-352 zdjizhi 1.1.1更换为zdjizhi 1.1.3 2023-06-08 17:01:26 +08:00
unknown
b9a694ddb9 GAL-349 优化DoS检测程序知识库更新流程 2023-06-08 16:57:19 +08:00
unknown
6fb37324ff GAL-352 获取SketchLog及bifang静态阈值适配Fastjson2 2023-06-07 14:29:53 +08:00
unknown
315b638470 GAL-352 DoS检测适配Fastjson2序列化库 2023-06-06 17:53:06 +08:00
unknown
bd48417eb8 删除注释代码 2023-05-26 15:51:57 +08:00
unknown
72acc976e3 TSG-15219 修复静态阈值的condition处理逻辑,新增静态阈值单元测试类 2023-05-26 15:44:37 +08:00
unknown
6be3ea7f1e TSG-15219 优化DoS静态阈值下的检测逻辑 2023-05-24 14:36:29 +08:00
unknown
04ee45f77d TSG-15167 新增知识库文件校验功能 2023-05-23 10:38:15 +08:00
unknown
d8b0a7637b 新增命中静态阈值后填充Profile ID 2023-04-03 17:35:36 +08:00
unknown
b56a2ec31e GAL-306 修复DoS检测不能读取HDFS中IP定位库问题,支持yarn per-job运行模式 2023-03-27 17:15:27 +08:00
unknown
11747d9964 GAL-296 解决使用Yarn模式运行时的依赖冲突问题 2023-03-07 18:58:25 +08:00
wanglihui
ce15a27a1b TSG-13094 修复DoS Event日志出现MVsys id 2022-12-21 17:11:14 +08:00
wanglihui
01bbe562c9 Merge branch 'tsg-22.11' of git.mesalab.cn:bigdata/tsg/flink-dos-detection into tsg-22.11 2022-12-19 10:18:44 +08:00
wanglihui
f07651cf14 修改vsys id字段名为common_t_vsys_id 2022-12-19 10:18:17 +08:00
unknown
7c201a8a3f 新增Nacos Namespace配置,删除更新至HDFS时Flush操作 2022-12-16 16:52:33 +08:00
wanglihui
78435d54ea Merge branch 'knowledge' of https://git.mesalab.cn/bigdata/tsg/flink-dos-detection into tsg-22.11 2022-12-06 19:11:30 +08:00
wanglihui
76c9247bb9 Merge branch 'knowledge' of https://git.mesalab.cn/bigdata/tsg/flink-dos-detection into tsg-22.11 2022-12-06 19:10:28 +08:00
wanglihui
488b7c6644 修改部分日志输出 2022-12-06 17:13:09 +08:00
unknown
0662d265dd GAL-224 取消SSL检测,新增HDFS高可用设置 2022-11-28 16:42:56 +08:00
unknown
87fe11dc93 优化单机模式落地方式 2022-11-28 15:38:36 +08:00
unknown
9a2a5b3957 GAL-224 DoS检测支持知识库动态加载 2022-11-23 15:30:24 +08:00
fy
c58acdcfc9 Flink连接知识库实现方案初始准备 2022-11-14 14:34:00 +08:00
wanglihui
b409150532 修复提交yarn集群依赖冲突bug 2022-10-17 18:15:11 +08:00
wanglihui
7e6d5fcfc5 修复无法vsys id 2022-10-17 11:10:35 +08:00
wanglihui
859cd379e5 DoS 检测支持vsys id 2022-09-23 18:37:33 +08:00
wanglihui
47ddef9bca DoS 检测事件日志默认VSYS ID 为 1 2022-08-19 10:17:52 +08:00
wanglihui
0a6f36393c DoS Detection Bifang Access Token 可支持动态配置 2022-07-19 17:20:09 +08:00
unknown
84a1e6879a 修复动态获取nacos配置 2022-06-28 11:15:49 +08:00
unknown
ab8f6aba81 修复动态获取nacos配置 2022-06-28 11:14:54 +08:00
unknown
94e8fb807a 集成nacos,修复依赖冲突,启动报错 2022-06-22 15:58:49 +08:00
wanglihui
cead1d4d99 Merge branch 'tsg-22.06' of git.mesalab.cn:bigdata/tsg/flink-dos-detection 2022-06-20 18:15:24 +08:00
徐鹏飞
2d98c3b6e6 修改获取静态阈值逻辑,根据默认指定vsysID获取 2022-06-13 18:00:57 +08:00
徐鹏飞
3dc29a07be 新增根据vsysId从bifang获取静态阈值 2022-05-31 18:00:36 +08:00
wanglihui
1fcdb79739 DoS事件日志对Conditions基于速率检测属性值修正 2022-05-17 13:54:43 +08:00
wanglihui
3d974217d9 集成nacos,将部分配置放到nacos中管理。 2022-04-08 10:16:29 +08:00
wanglihui
db17064f73 更新工具库到1.0.8适配新版MMDB定位库 2022-02-11 16:06:24 +08:00
wanglihui
065e5abb09 增加jasypt配置加密方式 2021-12-20 13:50:18 +08:00
wanglihui
75bbdd2962 更新DoS检测程序,新增读取baseline TTL配置。
修复DoS检测-Conditions阈值描述语言逻辑问题。
2021-12-08 13:51:09 +08:00
wanglihui
c46a395d9b 修改静态阈值,上下限判定等配置。 2021-11-26 09:46:53 +08:00
wanglihui
cc3f0cf620 解决获取配置为空时空指针异常bug。 2021-11-05 19:01:12 +08:00
wanglihui
0617b1e614 修改获取基线值策略,当基线type=3且基线值小于静态敏感阈值时,将其替换。 2021-10-29 18:43:05 +08:00
wanglihui
0125b031dd 代码格式化,实体类重写equals、hashcode方法。 2021-10-22 18:38:29 +08:00
wanglihui
177e7461cc 优化构建baseline方式 2021-10-21 18:27:48 +08:00
wanglihui
be916531fb 修改构建threshold RangeMap逻辑,基于attack type为key,避免IP冲突问题。 2021-10-20 18:23:12 +08:00
wanglihui
c692112445 增加基于packets与bits作为static条件判断依据。
修复static配置IP冲突问题。
2021-10-19 18:39:13 +08:00
wanglihui
b03ab9642d 修复静态条件与基线条件冲突bug 2021-09-28 16:11:52 +08:00
wanglihui
c44250bf73 新增读取DoS Detection Profiles IP冲突检测机制
修复DoS event日志end_time大于当前时间bug
2021-09-26 18:41:36 +08:00
wanglihui
77bc6a844e 修复读取配置IP冲突问题 2021-09-23 18:36:27 +08:00
wanglihui
e930fa23ed 修改判定逻辑,增加基线敏感阈值作为判定条件。 2021-09-16 18:47:00 +08:00
wanglihui
8cd4dea19e 获取静态阈值列表使用bifang默认token
修复获取baseline下标方法
2021-09-15 10:08:17 +08:00
wanglihui
62f3c65d66 基于DoS Sketch一元组进行实时检测 2021-09-14 18:46:23 +08:00
wanglihui
8cfb442c44 增加一元组作为基线生成数据源 2021-09-13 14:14:58 +08:00
wanglihui
4f8807dfa1 修改计算速率方式,使用session总数除以时间窗口 2021-09-13 09:46:02 +08:00
wanglihui
81f6499458 新增敏感阈值,过滤告警信息
修改计算平均值方式,先聚合再平均
2021-09-09 10:46:50 +08:00
wanglihui
b4237bb4a9 新增kafka sasl认证机制 2021-09-06 16:19:33 +08:00
wanglihui
c5943298bd 修复因double精度问题导致日志判定结果等级错误bug 2021-08-26 18:42:28 +08:00
wanglihui
b4f919647a 新增根据静态阈值判定dos攻击逻辑
新增定时器,定时获取静态阈值与baseline
2021-08-24 16:35:31 +08:00
wanglihui
55af33b508 新增解析静态阈值功能 2021-08-20 18:34:40 +08:00
wanglihui
28e7275674 新增rangeMap存储对应IP段配置信息 2021-08-20 11:52:20 +08:00
wanglihui
f744677021 新增读取bifang静态阈值配置接口
修改galaxy工具类库版本
2021-08-18 19:15:49 +08:00
wanglihui
c957f3ec1c 增加基线值为0时处理逻辑,将0替换为默认值。 2021-08-17 18:56:53 +08:00
wanglihui
9bda526d48 修复判定超出基线百分比逻辑bug 2021-08-17 11:09:45 +08:00
wanglihui
e89e1b08c9 修改处理逻辑,去掉处理机IP与数据中心作为key的判定条件。 2021-08-16 18:24:13 +08:00
wanglihui
e0de04886b 添加配置文件注释、删除过期配置文件 2021-08-11 18:38:53 +08:00
wanglihui
30a24683e3 metrics统计增加根据server IP hashcode分区数。 2021-08-09 18:28:52 +08:00
wanglihui
5190654a8f 修改处理逻辑为内存中缓存baseline数据 2021-08-05 18:42:34 +08:00
47 changed files with 2952 additions and 648 deletions

250
pom.xml
View File

@@ -6,19 +6,26 @@
<groupId>com.zdjizhi</groupId>
<artifactId>flink-dos-detection</artifactId>
<version>1.0-SNAPSHOT</version>
<version>23.12</version>
<name>flink-dos-detection</name>
<url>http://www.example.com</url>
<properties>
<galaxy.tools.version>1.2.1</galaxy.tools.version>
<flink.version>1.13.1</flink.version>
<hive.version>2.1.1</hive.version>
<hadoop.version>2.7.1</hadoop.version>
<scala.binary.version>2.12</scala.binary.version>
<jsonpath.version>2.4.0</jsonpath.version>
</properties>
<repositories>
<repository>
<id>nexus</id>
<name>Team Nexus Repository</name>
<url>http://192.168.40.125:8099/content/groups/public</url>
<url>http://192.168.40.153:8099/content/groups/public</url>
</repository>
<repository>
@@ -96,96 +103,74 @@
</execution>
</executions>
</plugin>
</plugins>
<resources>
<resource>
<directory>properties</directory>
<includes>
<include>**/*.properties</include>
<include>**/*.xml</include>
</includes>
<filtering>false</filtering>
</resource>
<resource>
<directory>src\main</directory>
<includes>
<include>log4j.properties</include>
</includes>
<filtering>false</filtering>
</resource>
</resources>
</build>
<dependencies>
<!-- https://mvnrepository.com/artifact/org.jasypt/jasypt -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.21</version>
<groupId>org.jasypt</groupId>
<artifactId>jasypt</artifactId>
<version>1.9.3</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.21</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-sql-connector-kafka_2.11</artifactId>
<version>${flink.version}</version>
<!--<scope>provided</scope>-->
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_2.11</artifactId>
<version>${flink.version}</version>
<!--<scope>provided</scope>-->
</dependency>
<!--&lt;!&ndash; https://mvnrepository.com/artifact/org.apache.flink/flink-table &ndash;&gt;-->
<!--<dependency>-->
<!--<groupId>org.apache.flink</groupId>-->
<!--<artifactId>flink-table</artifactId>-->
<!--<version>${flink.version}</version>-->
<!--&lt;!&ndash;<scope>provided</scope>&ndash;&gt;-->
<!--<groupId>org.slf4j</groupId>-->
<!--<artifactId>slf4j-api</artifactId>-->
<!--<version>1.7.21</version>-->
<!--</dependency>-->
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-json -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-json</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>1.0.0</version>
</dependency>
<!--Flink modules-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java</artifactId>
<version>${flink.version}</version>
<!--<scope>provided</scope>-->
</dependency>
<!--<dependency>-->
<!--<groupId>org.slf4j</groupId>-->
<!--<artifactId>slf4j-log4j12</artifactId>-->
<!--<version>1.7.21</version>-->
<!--</dependency>-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_2.11</artifactId>
<version>${flink.version}</version>
<!--<scope>provided</scope>-->
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path</artifactId>
<version>${jsonpath.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner_2.11</artifactId>
<artifactId>flink-connector-kafka_2.12</artifactId>
<version>${flink.version}</version>
<!--<scope>provided</scope>-->
</dependency>
<!-- CLI dependencies -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.11</artifactId>
<artifactId>flink-clients_2.12</artifactId>
<version>${flink.version}</version>
<!--<scope>provided</scope>-->
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.7.1</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<artifactId>zookeeper</artifactId>
@@ -195,6 +180,51 @@
<artifactId>jdk.tools</artifactId>
<groupId>jdk.tools</groupId>
</exclusion>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.apache.hadoop</groupId>-->
<!-- <artifactId>hadoop-hdfs</artifactId>-->
<!-- <version>${hadoop.version}</version>-->
<!-- </dependency>-->
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-client -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>2.2.3</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-over-slf4j</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>hadoop-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
</exclusions>
</dependency>
@@ -214,11 +244,10 @@
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-hbase-2.2_2.11</artifactId>
<version>${flink.version}</version>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5.6</version>
</dependency>
<dependency>
@@ -228,25 +257,92 @@
</dependency>
<dependency>
<groupId>com.zdjizhi</groupId>
<artifactId>galaxy</artifactId>
<version>1.0.4</version>
<groupId>com.github.seancfoley</groupId>
<artifactId>ipaddress</artifactId>
<version>5.3.3</version>
</dependency>
<dependency>
<groupId>com.alibaba.fastjson2</groupId>
<artifactId>fastjson2</artifactId>
<version>2.0.32</version>
</dependency>
<dependency>
<groupId>com.alibaba.nacos</groupId>
<artifactId>nacos-client</artifactId>
<version>1.2.0</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>log4j-over-slf4j</artifactId>
<groupId>org.slf4j</groupId>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
<exclusion>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</exclusion>
<exclusion>
<artifactId>slf4j-api</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-codec/commons-codec -->
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.11</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-databind -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.9.10</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.guava/guava -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>22.0</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.2</version>
</dependency>
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path</artifactId>
<version>2.4.0</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>com.geedgenetworks</groupId>
<artifactId>galaxy</artifactId>
<version>${galaxy.tools.version}</version>
</dependency>
</dependencies>
</project>
</project>

View File

@@ -1,49 +0,0 @@
package com.zdjizhi.common;
import com.zdjizhi.utils.CommonConfigurations;
/**
* Created by wk on 2021/1/6.
*/
public class CommonConfig {
public static final int STREAM_EXECUTION_ENVIRONMENT_PARALLELISM = CommonConfigurations.getIntProperty("stream.execution.environment.parallelism");
public static final String STREAM_EXECUTION_JOB_NAME = CommonConfigurations.getStringProperty("stream.execution.job.name");
public static final int KAFKA_INPUT_PARALLELISM = CommonConfigurations.getIntProperty("kafka.input.parallelism");
public static final String KAFKA_INPUT_TOPIC_NAME = CommonConfigurations.getStringProperty("kafka.input.topic.name");
public static final String KAFKA_INPUT_BOOTSTRAP_SERVERS = CommonConfigurations.getStringProperty("kafka.input.bootstrap.servers");
public static final String KAFKA_SCAN_STARTUP_MODE = CommonConfigurations.getStringProperty("kafka.input.scan.startup.mode");
public static final String KAFKA_GROUP_ID = CommonConfigurations.getStringProperty("kafka.input.group.id");
public static final int KAFKA_OUTPUT_METRIC_PARALLELISM = CommonConfigurations.getIntProperty("kafka.output.metric.parallelism");
public static final String KAFKA_OUTPUT_METRIC_TOPIC_NAME = CommonConfigurations.getStringProperty("kafka.output.metric.topic.name");
public static final int KAFKA_OUTPUT_EVENT_PARALLELISM = CommonConfigurations.getIntProperty("kafka.output.event.parallelism");
public static final String KAFKA_OUTPUT_EVENT_TOPIC_NAME = CommonConfigurations.getStringProperty("kafka.output.event.topic.name");
public static final String KAFKA_OUTPUT_BOOTSTRAP_SERVERS = CommonConfigurations.getStringProperty("kafka.output.bootstrap.servers");
public static final int HBASE_INPUT_PARALLELISM = CommonConfigurations.getIntProperty("hbase.input.parallelism");
public static final String HBASE_ZOOKEEPER_QUORUM = CommonConfigurations.getStringProperty("hbase.zookeeper.quorum");
public static final int HBASE_CLIENT_OPERATION_TIMEOUT = CommonConfigurations.getIntProperty("hbase.client.operation.timeout");
public static final int HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD = CommonConfigurations.getIntProperty("hbase.client.scanner.timeout.period");
public static final String HBASE_BASELINE_TABLE_NAME = CommonConfigurations.getStringProperty("hbase.baseline.table.name");
public static final int HBASE_BASELINE_TOTAL_NUM = CommonConfigurations.getIntProperty("hbase.baseline.total.num");
public static final int FLINK_FIRST_AGG_PARALLELISM = CommonConfigurations.getIntProperty("flink.first.agg.parallelism");
public static final int FLINK_SECOND_AGG_PARALLELISM = CommonConfigurations.getIntProperty("flink.second.agg.parallelism");
public static final int FLINK_WATERMARK_MAX_ORDERNESS = CommonConfigurations.getIntProperty("flink.watermark.max.orderness");
public static final int FLINK_WINDOW_MAX_TIME = CommonConfigurations.getIntProperty("flink.window.max.time");
public static final int SOURCE_IP_LIST_LIMIT = CommonConfigurations.getIntProperty("source.ip.list.limit");
public static final int DATA_CENTER_ID_NUM = CommonConfigurations.getIntProperty("data.center.id.num");
public static final String IP_MMDB_PATH = CommonConfigurations.getStringProperty("ip.mmdb.path");
public static final double BASELINE_SESSIONS_MINOR_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.minor.threshold");
public static final double BASELINE_SESSIONS_WARNING_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.warning.threshold");
public static final double BASELINE_SESSIONS_MAJOR_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.major.threshold");
public static final double BASELINE_SESSIONS_SEVERE_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.severe.threshold");
public static final double BASELINE_SESSIONS_CRITICAL_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.critical.threshold");
}

View File

@@ -0,0 +1,63 @@
package com.zdjizhi.common;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Objects;
public class DosBaselineThreshold implements Serializable {
private ArrayList<Integer> session_rate;
private Integer session_rate_baseline_type;
private Integer session_rate_default_value;
public ArrayList<Integer> getSession_rate() {
return session_rate;
}
public void setSession_rate(ArrayList<Integer> session_rate) {
this.session_rate = session_rate;
}
public Integer getSession_rate_baseline_type() {
return session_rate_baseline_type;
}
public void setSession_rate_baseline_type(Integer session_rate_baseline_type) {
this.session_rate_baseline_type = session_rate_baseline_type;
}
public Integer getSession_rate_default_value() {
return session_rate_default_value;
}
public void setSession_rate_default_value(Integer session_rate_default_value) {
this.session_rate_default_value = session_rate_default_value;
}
@Override
public String toString() {
return "DosBaselineThreshold{" +
"session_rate=" + session_rate +
", session_rate_baseline_type=" + session_rate_baseline_type +
", session_rate_default_value=" + session_rate_default_value +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof DosBaselineThreshold)) {
return false;
}
DosBaselineThreshold that = (DosBaselineThreshold) o;
return Objects.equals(getSession_rate(), that.getSession_rate()) &&
Objects.equals(getSession_rate_baseline_type(), that.getSession_rate_baseline_type()) &&
Objects.equals(getSession_rate_default_value(), that.getSession_rate_default_value());
}
@Override
public int hashCode() {
return Objects.hash(getSession_rate(), getSession_rate_baseline_type(), getSession_rate_default_value());
}
}

View File

@@ -0,0 +1,118 @@
package com.zdjizhi.common;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Objects;
/**
* @author wlh
*/
public class DosDetectionThreshold implements Serializable {
private long profile_id;
private String attack_type;
private ArrayList<String> server_ip_list;
private String server_ip_addr;
private long packets_per_sec;
private long bits_per_sec;
private long sessions_per_sec;
private int is_valid;
private int vsys_id;
private Integer[] superior_ids;
public long getProfile_id() {
return profile_id;
}
public void setProfile_id(long profile_id) {
this.profile_id = profile_id;
}
public String getAttack_type() {
return attack_type;
}
public void setAttack_type(String attack_type) {
this.attack_type = attack_type;
}
public ArrayList<String> getServer_ip_list() {
return server_ip_list;
}
public void setServer_ip_list(ArrayList<String> server_ip_list) {
this.server_ip_list = server_ip_list;
}
public String getServer_ip_addr() {
return server_ip_addr;
}
public void setServer_ip_addr(String server_ip_addr) {
this.server_ip_addr = server_ip_addr;
}
public long getPackets_per_sec() {
return packets_per_sec;
}
public void setPackets_per_sec(long packets_per_sec) {
this.packets_per_sec = packets_per_sec;
}
public long getBits_per_sec() {
return bits_per_sec;
}
public void setBits_per_sec(long bits_per_sec) {
this.bits_per_sec = bits_per_sec;
}
public long getSessions_per_sec() {
return sessions_per_sec;
}
public void setSessions_per_sec(long sessions_per_sec) {
this.sessions_per_sec = sessions_per_sec;
}
public int getIs_valid() {
return is_valid;
}
public void setIs_valid(int is_valid) {
this.is_valid = is_valid;
}
public int getVsys_id() {
return vsys_id;
}
public void setVsys_id(int vsys_id) {
this.vsys_id = vsys_id;
}
public Integer[] getSuperior_ids() {
return superior_ids;
}
public void setSuperior_ids(Integer[] superior_ids) {
this.superior_ids = superior_ids;
}
@Override
public String toString() {
return "DosDetectionThreshold{" +
"profile_id=" + profile_id +
", attack_type='" + attack_type + '\'' +
", server_ip_list=" + server_ip_list +
", server_ip_addr='" + server_ip_addr + '\'' +
", packets_per_sec=" + packets_per_sec +
", bits_per_sec=" + bits_per_sec +
", sessions_per_sec=" + sessions_per_sec +
", is_valid=" + is_valid +
", vsys_id=" + vsys_id +
", superior_ids=" + Arrays.toString(superior_ids) +
'}';
}
}

View File

@@ -2,11 +2,13 @@ package com.zdjizhi.common;
import java.io.Serializable;
public class DosEventLog implements Serializable {
public class DosEventLog implements Serializable, Cloneable {
private long recv_time;
private long log_id;
private int vsys_id;
private long start_time;
private long end_time;
private long profile_id;
private String attack_type;
private String severity;
private String conditions;
@@ -18,6 +20,14 @@ public class DosEventLog implements Serializable {
private long packet_rate;
private long bit_rate;
public long getRecv_time() {
return recv_time;
}
public void setRecv_time(long recv_time) {
this.recv_time = recv_time;
}
public long getLog_id() {
return log_id;
}
@@ -26,6 +36,14 @@ public class DosEventLog implements Serializable {
this.log_id = log_id;
}
public int getVsys_id() {
return vsys_id;
}
public void setVsys_id(int vsys_id) {
this.vsys_id = vsys_id;
}
public long getStart_time() {
return start_time;
}
@@ -42,6 +60,14 @@ public class DosEventLog implements Serializable {
this.end_time = end_time;
}
public long getProfile_id() {
return profile_id;
}
public void setProfile_id(long profile_id) {
this.profile_id = profile_id;
}
public String getAttack_type() {
return attack_type;
}
@@ -124,10 +150,13 @@ public class DosEventLog implements Serializable {
@Override
public String toString() {
return "dosEventLog{" +
"log_id=" + log_id +
return "DosEventLog{" +
"recv_time=" + recv_time +
", log_id=" + log_id +
", vsys_id=" + vsys_id +
", start_time=" + start_time +
", end_time=" + end_time +
", profile_id=" + profile_id +
", attack_type='" + attack_type + '\'' +
", severity='" + severity + '\'' +
", conditions='" + conditions + '\'' +
@@ -140,4 +169,9 @@ public class DosEventLog implements Serializable {
", bit_rate=" + bit_rate +
'}';
}
@Override
public Object clone() throws CloneNotSupportedException {
return super.clone();
}
}

View File

@@ -1,17 +1,26 @@
package com.zdjizhi.common;
import java.io.Serializable;
import java.util.Objects;
public class DosMetricsLog implements Serializable {
private long sketch_start_time;
private String common_sled_ip;
private String common_data_center;
private String attack_type;
private String destination_ip;
private long session_rate;
private long packet_rate;
private long bit_rate;
private int partition_num;
private int vsys_id;
public int getPartition_num() {
return partition_num;
}
public void setPartition_num(int partition_num) {
this.partition_num = partition_num;
}
public long getSketch_start_time() {
return sketch_start_time;
@@ -21,22 +30,6 @@ public class DosMetricsLog implements Serializable {
this.sketch_start_time = sketch_start_time;
}
public String getCommon_sled_ip() {
return common_sled_ip;
}
public void setCommon_sled_ip(String common_sled_ip) {
this.common_sled_ip = common_sled_ip;
}
public String getCommon_data_center() {
return common_data_center;
}
public void setCommon_data_center(String common_data_center) {
this.common_data_center = common_data_center;
}
public String getAttack_type() {
return attack_type;
}
@@ -77,17 +70,25 @@ public class DosMetricsLog implements Serializable {
this.bit_rate = bit_rate;
}
public int getVsys_id() {
return vsys_id;
}
public void setVsys_id(int vsys_id) {
this.vsys_id = vsys_id;
}
@Override
public String toString() {
return "DosMetricsLog{" +
"sketch_start_time=" + sketch_start_time +
", common_sled_ip='" + common_sled_ip + '\'' +
", common_data_center='" + common_data_center + '\'' +
", attack_type='" + attack_type + '\'' +
", destination_ip='" + destination_ip + '\'' +
", session_rate=" + session_rate +
", packet_rate=" + packet_rate +
", bit_rate=" + bit_rate +
", partition_num=" + partition_num +
", vsys_id=" + vsys_id +
'}';
}
}

View File

@@ -1,9 +1,11 @@
package com.zdjizhi.common;
import java.io.Serializable;
import java.util.Objects;
public class DosSketchLog implements Serializable {
private long common_recv_time;
private String common_sled_ip;
private String common_data_center;
private long sketch_start_time;
@@ -14,11 +16,14 @@ public class DosSketchLog implements Serializable {
private long sketch_sessions;
private long sketch_packets;
private long sketch_bytes;
private int vsys_id;
@Override
public String toString() {
return "DosSketchLog{" +
"common_sled_ip='" + common_sled_ip + '\'' +
"common_recv_time=" + common_recv_time +
", common_sled_ip='" + common_sled_ip + '\'' +
", common_data_center='" + common_data_center + '\'' +
", sketch_start_time=" + sketch_start_time +
", sketch_duration=" + sketch_duration +
@@ -28,9 +33,18 @@ public class DosSketchLog implements Serializable {
", sketch_sessions=" + sketch_sessions +
", sketch_packets=" + sketch_packets +
", sketch_bytes=" + sketch_bytes +
", vsys_id=" + vsys_id +
'}';
}
public long getCommon_recv_time() {
return common_recv_time;
}
public void setCommon_recv_time(long common_recv_time) {
this.common_recv_time = common_recv_time;
}
public String getCommon_sled_ip() {
return common_sled_ip;
}
@@ -110,4 +124,12 @@ public class DosSketchLog implements Serializable {
public void setSketch_bytes(long sketch_bytes) {
this.sketch_bytes = sketch_bytes;
}
public int getVsys_id() {
return vsys_id;
}
public void setVsys_id(int vsys_id) {
this.vsys_id = vsys_id;
}
}

View File

@@ -0,0 +1,32 @@
package com.zdjizhi.common;
import java.util.Arrays;
public class DosVsysId {
private Integer id;
private Integer[] superior_ids;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Integer[] getSuperior_ids() {
return superior_ids;
}
public void setSuperior_ids(Integer[] superior_ids) {
this.superior_ids = superior_ids;
}
@Override
public String toString() {
return "DosVsysId{" +
"id=" + id +
", superior_ids=" + Arrays.toString(superior_ids) +
'}';
}
}

View File

@@ -0,0 +1,105 @@
package com.zdjizhi.common;
import com.zdjizhi.utils.CommonConfigurations;
import org.jasypt.encryption.pbe.StandardPBEStringEncryptor;
/**
* @author wlh
* @date 2021/1/6
*/
public class FlowWriteConfig {
/**
* 定位库默认分隔符
*/
private static StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor();
static {
encryptor.setPassword("galaxy");
}
public static final int STREAM_EXECUTION_ENVIRONMENT_PARALLELISM = CommonConfigurations.getIntProperty("stream.execution.environment.parallelism");
public static final String STREAM_EXECUTION_JOB_NAME = CommonConfigurations.getStringProperty("stream.execution.job.name");
public static final int KAFKA_INPUT_PARALLELISM = CommonConfigurations.getIntProperty("kafka.input.parallelism");
public static final String KAFKA_INPUT_TOPIC_NAME = CommonConfigurations.getStringProperty("kafka.input.topic.name");
public static final String KAFKA_INPUT_BOOTSTRAP_SERVERS = CommonConfigurations.getStringProperty("kafka.input.bootstrap.servers");
public static final String KAFKA_GROUP_ID = CommonConfigurations.getStringProperty("kafka.input.group.id");
public static final int KAFKA_OUTPUT_METRIC_PARALLELISM = CommonConfigurations.getIntProperty("kafka.output.metric.parallelism");
public static final String KAFKA_OUTPUT_METRIC_TOPIC_NAME = CommonConfigurations.getStringProperty("kafka.output.metric.topic.name");
public static final int KAFKA_OUTPUT_EVENT_PARALLELISM = CommonConfigurations.getIntProperty("kafka.output.event.parallelism");
public static final String KAFKA_OUTPUT_EVENT_TOPIC_NAME = CommonConfigurations.getStringProperty("kafka.output.event.topic.name");
public static final String KAFKA_OUTPUT_BOOTSTRAP_SERVERS = CommonConfigurations.getStringProperty("kafka.output.bootstrap.servers");
public static final String HBASE_ZOOKEEPER_QUORUM = CommonConfigurations.getStringProperty("hbase.zookeeper.quorum");
public static final int HBASE_CLIENT_OPERATION_TIMEOUT = CommonConfigurations.getIntProperty("hbase.client.operation.timeout");
public static final int HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD = CommonConfigurations.getIntProperty("hbase.client.scanner.timeout.period");
public static final String HBASE_BASELINE_TABLE_NAME = CommonConfigurations.getStringProperty("hbase.baseline.table.name");
public static final int HBASE_BASELINE_TOTAL_NUM = CommonConfigurations.getIntProperty("hbase.baseline.total.num");
public static final int HBASE_BASELINE_TTL = CommonConfigurations.getIntProperty("hbase.baseline.ttl");
public static final int FLINK_FIRST_AGG_PARALLELISM = CommonConfigurations.getIntProperty("flink.first.agg.parallelism");
public static final int FLINK_DETECTION_MAP_PARALLELISM = CommonConfigurations.getIntProperty("flink.detection.map.parallelism");
public static final int FLINK_WATERMARK_MAX_ORDERNESS = CommonConfigurations.getIntProperty("flink.watermark.max.orderness");
public static final int FLINK_WINDOW_MAX_TIME = CommonConfigurations.getIntProperty("flink.window.max.time");
public static final int SOURCE_IP_LIST_LIMIT = CommonConfigurations.getIntProperty("source.ip.list.limit");
public static final int DESTINATION_IP_PARTITION_NUM = CommonConfigurations.getIntProperty("destination.ip.partition.num");
public static final int DATA_CENTER_ID_NUM = CommonConfigurations.getIntProperty("data.center.id.num");
public static final String BIFANG_SERVER_URI = CommonConfigurations.getStringProperty("bifang.server.uri");
public static final String BIFANG_SERVER_ENCRYPTPWD_PATH = CommonConfigurations.getStringProperty("bifang.server.encryptpwd.path");
public static final String BIFANG_SERVER_LOGIN_PATH = CommonConfigurations.getStringProperty("bifang.server.login.path");
public static final String BIFANG_SERVER_POLICY_THRESHOLD_PATH = CommonConfigurations.getStringProperty("bifang.server.policy.threshold.path");
public static final String BIFANG_SERVER_POLICY_VSYSID_PATH = CommonConfigurations.getStringProperty("bifang.server.policy.vaysid.path");
public static final int HTTP_POOL_MAX_CONNECTION = CommonConfigurations.getIntProperty("http.pool.max.connection");
public static final int HTTP_POOL_MAX_PER_ROUTE = CommonConfigurations.getIntProperty("http.pool.max.per.route");
public static final int HTTP_POOL_REQUEST_TIMEOUT = CommonConfigurations.getIntProperty("http.pool.request.timeout");
public static final int HTTP_POOL_CONNECT_TIMEOUT = CommonConfigurations.getIntProperty("http.pool.connect.timeout");
public static final int HTTP_POOL_RESPONSE_TIMEOUT = CommonConfigurations.getIntProperty("http.pool.response.timeout");
public static final int STATIC_THRESHOLD_SCHEDULE_MINUTES = CommonConfigurations.getIntProperty("static.threshold.schedule.minutes");
public static final int BASELINE_THRESHOLD_SCHEDULE_DAYS = CommonConfigurations.getIntProperty("baseline.threshold.schedule.days");
public static final String SASL_JAAS_CONFIG_USER = CommonConfigurations.getStringProperty("sasl.jaas.config.user");
public static final String SASL_JAAS_CONFIG_PASSWORD = encryptor.decrypt(CommonConfigurations.getStringProperty("sasl.jaas.config.password"));
public static final int SASL_JAAS_CONFIG_FLAG = CommonConfigurations.getIntProperty("sasl.jaas.config.flag");
public static final Integer HTTP_SOCKET_TIMEOUT = CommonConfigurations.getIntProperty("http.socket.timeout");
public static final Long KNOWLEDGE_EXECUTION_INTERVAL = CommonConfigurations.getLongProperty("knowledge.execution.interval");
public static final String KNOWLEDGE_BASE_URL = CommonConfigurations.getStringProperty("knowledge.base.uri");
public static final String KNOWLEDGE_BASE_PATH = CommonConfigurations.getStringProperty("knowledge.base.path");
public static final String IP_USER_DEFINED_KD_ID = CommonConfigurations.getStringProperty("ip.user.defined.kd.id");
public static final String IP_BUILTIN_KD_ID = CommonConfigurations.getStringProperty("ip.builtin.kd.id");
public static final String BIFANG_SERVER_TOKEN = CommonConfigurations.getStringProperty("bifang.server.token");
public static final Integer STATIC_SENSITIVITY_THRESHOLD = CommonConfigurations.getIntProperty("static.sensitivity.threshold");
public static final Double BASELINE_SENSITIVITY_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sensitivity.threshold");
public static final Double BASELINE_SESSIONS_MINOR_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.minor.threshold");
public static final Double BASELINE_SESSIONS_WARNING_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.warning.threshold");
public static final Double BASELINE_SESSIONS_MAJOR_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.major.threshold");
public static final Double BASELINE_SESSIONS_SEVERE_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.severe.threshold");
public static final Double BASELINE_SESSIONS_CRITICAL_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.critical.threshold");
}

View File

@@ -0,0 +1,74 @@
package com.zdjizhi.common.pojo;
import java.io.Serializable;
/**
*
*/
public class KnowlegeBaseMeta implements Serializable {
private String kb_id;
private String name;
private String sha256;
private String format;
private String path;
public KnowlegeBaseMeta(String kd_id, String name, String sha256, String format, String path) {
this.kb_id = kd_id;
this.name = name;
this.sha256 = sha256;
this.format = format;
this.path = path;
}
public String getKb_id() {
return kb_id;
}
public void setKb_id(String kb_id) {
this.kb_id = kb_id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getSha256() {
return sha256;
}
public void setSha256(String sha256) {
this.sha256 = sha256;
}
public String getFormat() {
return format;
}
public void setFormat(String format) {
this.format = format;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
@Override
public String toString() {
return "KnowlegeBaseMeta{" +
"kb_id='" + kb_id + '\'' +
", name='" + name + '\'' +
", sha256='" + sha256 + '\'' +
", format='" + format + '\'' +
", path='" + path + '\'' +
'}';
}
}

View File

@@ -1,116 +1,201 @@
package com.zdjizhi.etl;
import com.zdjizhi.common.CommonConfig;
import com.zdjizhi.common.DosEventLog;
import com.zdjizhi.common.DosSketchLog;
import com.zdjizhi.sink.OutputStreamSink;
import com.zdjizhi.utils.IpUtils;
import com.zdjizhi.utils.SnowflakeId;
import org.apache.commons.lang.StringUtils;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.typeutils.MapTypeInfo;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.geedgenetworks.utils.DateUtils;
import com.geedgenetworks.utils.StringUtil;
import com.zdjizhi.common.*;
import com.zdjizhi.utils.*;
import inet.ipaddr.IPAddress;
import inet.ipaddr.IPAddressString;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.shaded.guava18.com.google.common.collect.TreeRangeMap;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.math.BigDecimal;
import java.text.NumberFormat;
import java.text.ParseException;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* @author wlh
* DoS检测判断逻辑
*/
public class DosDetection extends BroadcastProcessFunction<DosSketchLog, Map<String, Map<String, List<Integer>>>, DosEventLog> {
public class DosDetection extends ProcessFunction<DosSketchLog, DosEventLog> {
private static final Logger logger = LoggerFactory.getLogger(DosDetection.class);
private static final Log logger = LogFactory.get();
private static Map<String, Map<String, DosBaselineThreshold>> baselineMap = new HashMap<>();
private final static NumberFormat PERCENT_INSTANCE = NumberFormat.getPercentInstance();
private HashMap<Integer, HashMap<String, TreeRangeMap<IPAddress, DosDetectionThreshold>>> thresholdRangeMap;
private final static int BASELINE_SIZE = 144;
private final static int STATIC_CONDITION_TYPE = 1;
private final static int BASELINE_CONDITION_TYPE = 2;
private final static int SENSITIVITY_CONDITION_TYPE = 3;
private final static String SESSIONS_TAG = "sessions";
private final static String PACKETS_TAG = "packets";
private final static String BITS_TAG = "bits";
private final static int OTHER_BASELINE_TYPE = 3;
private static MapStateDescriptor<String, Map<String, Map<String, List<Integer>>>> descriptor = new MapStateDescriptor<>("boradcast-state",
Types.STRING,
new MapTypeInfo<>(String.class, new MapTypeInfo<>(String.class, (Class<List<Integer>>) (Class<?>) List.class).getTypeClass()));
private final static NumberFormat PERCENT_INSTANCE = NumberFormat.getPercentInstance();
@Override
public void open(Configuration parameters) {
ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(2,
new BasicThreadFactory.Builder().namingPattern("Dos-Detection-%d").daemon(true).build());
try {
super.open(parameters);
executorService.scheduleAtFixedRate(() -> thresholdRangeMap = ParseStaticThreshold.createStaticThreshold(), 0,
FlowWriteConfig.STATIC_THRESHOLD_SCHEDULE_MINUTES, TimeUnit.MINUTES);
executorService.scheduleAtFixedRate(() -> baselineMap = ParseBaselineThreshold.readFromHbase(), 0,
FlowWriteConfig.BASELINE_THRESHOLD_SCHEDULE_DAYS, TimeUnit.DAYS);
} catch (Exception e) {
logger.error("定时器任务执行失败", e);
}
PERCENT_INSTANCE.setMinimumFractionDigits(2);
}
@Override
public void processElement(DosSketchLog value, ReadOnlyContext ctx, Collector<DosEventLog> out) throws Exception {
public void processElement(DosSketchLog value, Context ctx, Collector<DosEventLog> out) throws Exception {
DosEventLog finalResult = null;
try {
Map<String, Map<String, List<Integer>>> broadcast = ctx.getBroadcastState(descriptor).get("broadcast-state");
String destinationIp = value.getDestination_ip();
int vsysId = value.getVsys_id();
String key = destinationIp + "-" + vsysId;
String attackType = value.getAttack_type();
logger.info("当前判断IP{}, 类型: {}",destinationIp,attackType);
if (broadcast.containsKey(destinationIp)){
List<Integer> baseline = broadcast.get(destinationIp).get(attackType);
if (baseline != null && baseline.size() == BASELINE_SIZE){
int timeIndex = getCurrentTimeIndex(value.getSketch_start_time());
Integer base = baseline.get(timeIndex);
long sketchSessions = value.getSketch_sessions();
long diff = sketchSessions - base;
if (diff > 0){
String percent = getDiffPercent(diff, sketchSessions);
double diffPercentDouble = getDiffPercentDouble(percent);
Severity severity = judgeSeverity(diffPercentDouble);
if (severity != Severity.NORMAL){
DosEventLog result = getResult(value, severity, percent);
logger.info("检测到当前server IP {} 存在 {} 异常,日志详情\n {}",destinationIp,attackType,result.toString());
out.collect(result);
}else {
logger.info("当前server IP{} 未出现 {} 异常,日志详情 {}",destinationIp,attackType,value.toString());
}
}
}
}else {
logger.info("未获取到当前server IP{} 类型 {} baseline数据",destinationIp,attackType);
IPAddress destinationIpAddress = new IPAddressString(destinationIp).getAddress();
DosDetectionThreshold threshold = null;
if (thresholdRangeMap.containsKey(vsysId)) {
threshold = thresholdRangeMap.get(vsysId).getOrDefault(attackType, TreeRangeMap.create()).get(destinationIpAddress);
}
}catch (Exception e){
logger.error("判定失败\n {} \n{}",value,e);
logger.debug("当前判断IP{}, 类型: {}", key, attackType);
if (threshold == null && baselineMap.containsKey(key)) {
finalResult = getDosEventLogByBaseline(value, key);
} else if (threshold == null && !baselineMap.containsKey(key)) {
finalResult = getDosEventLogBySensitivityThreshold(value);
} else if (threshold != null) {
finalResult = getDosEventLogByStaticThreshold(value, threshold);
} else {
logger.debug("未获取到当前server IP{} 类型 {} 静态阈值 和 baseline", key, attackType);
}
} catch (Exception e) {
logger.error("判定失败\n {} \n{}", value, e);
}
if (finalResult != null) {
out.collect(finalResult);
}
}
@Override
public void processBroadcastElement(Map<String, Map<String, List<Integer>>> value, Context ctx, Collector<DosEventLog> out) {
try {
ctx.getBroadcastState(descriptor).put("broadcast-state", value);
}catch (Exception e){
logger.error("更新广播状态失败 {}",e);
private DosEventLog getDosEventLogBySensitivityThreshold(DosSketchLog value) {
long sketchSessions = value.getSketch_sessions();
Integer staticSensitivityThreshold = FlowWriteConfig.STATIC_SENSITIVITY_THRESHOLD;
long diff = sketchSessions - staticSensitivityThreshold;
return getDosEventLog(value, staticSensitivityThreshold, diff, 0, SENSITIVITY_CONDITION_TYPE, SESSIONS_TAG);
}
private DosEventLog getDosEventLogByBaseline(DosSketchLog value, String key) {
String attackType = value.getAttack_type();
long sketchSessions = value.getSketch_sessions();
DosBaselineThreshold dosBaselineThreshold = baselineMap.get(key).get(attackType);
Integer base = getBaseValue(dosBaselineThreshold, value);
long diff = sketchSessions - base;
return getDosEventLog(value, base, diff, 0, BASELINE_CONDITION_TYPE, SESSIONS_TAG);
}
private DosEventLog getDosEventLogByStaticThreshold(DosSketchLog value, DosDetectionThreshold threshold) throws CloneNotSupportedException {
long sessionBase = threshold.getSessions_per_sec();
long pktBase = threshold.getPackets_per_sec();
long bitBase = threshold.getBits_per_sec();
long diffSession = value.getSketch_sessions() - sessionBase;
long diffPkt = value.getSketch_packets() - pktBase;
long diffByte = value.getSketch_bytes() - bitBase;
double diffSessionPercent = 0.0;
double diffPktPercent = 0.0;
double diffBitPercent = 0.0;
//todo 代码Review发现该部分存在bug23.11版本做修复,需测试。
if (sessionBase > 0) {
diffSessionPercent = getDiffPercent(diffSession, sessionBase) * 100;
}
if (pktBase > 0) {
diffPktPercent = getDiffPercent(diffPkt, pktBase) * 100;
}
if (bitBase > 0) {
diffBitPercent = getDiffPercent(diffByte, bitBase) * 100;
}
long profileId = 0;
DosEventLog result = null;
if (diffSessionPercent >= diffPktPercent && diffSessionPercent >= diffBitPercent) {
profileId = threshold.getProfile_id();
result = getDosEventLog(value, sessionBase, diffSession, profileId, STATIC_CONDITION_TYPE, SESSIONS_TAG);
} else if (diffPktPercent >= diffSessionPercent && diffPktPercent >= diffBitPercent) {
profileId = threshold.getProfile_id();
result = getDosEventLog(value, pktBase, diffPkt, profileId, STATIC_CONDITION_TYPE, PACKETS_TAG);
} else if (diffBitPercent >= diffPktPercent && diffBitPercent >= diffSessionPercent) {
profileId = threshold.getProfile_id();
result = getDosEventLog(value, bitBase, diffByte, profileId, STATIC_CONDITION_TYPE, BITS_TAG);
}
return result;
}
public static void main(String[] args) {
DosDetection dosDetection = new DosDetection();
// HashSet<String> strings = new HashSet<>();
// strings.add("13.46.241.36");
// strings.add("25.46.241.45");
// strings.add("133.46.241.53");
// strings.add("219.46.242.74");
// strings.add("153.146.241.196");
// strings.add("132.46.241.21");
// String join = StringUtils.join(strings, ",");
// System.out.println(IpUtils.ipLookup.countryLookup("192.168.50.150"));
System.out.println(Severity.CRITICAL.severity);
private DosEventLog getDosEventLog(DosSketchLog value, long base, long diff, long profileId, int type, String tag) {
DosEventLog result = null;
String destinationIp = value.getDestination_ip();
String attackType = value.getAttack_type();
if (diff > 0 && base != 0) {
double percent = getDiffPercent(diff, base);
Severity severity = judgeSeverity(percent);
Integer staticSensitivityThreshold = FlowWriteConfig.STATIC_SENSITIVITY_THRESHOLD;
if (severity != Severity.NORMAL) {
if (type == BASELINE_CONDITION_TYPE && percent < FlowWriteConfig.BASELINE_SENSITIVITY_THRESHOLD) {
logger.debug("当前server IP:{},类型:{},基线值{}百分比{}未超过基线敏感阈值,日志详情\n{}", destinationIp, attackType, base, percent, value);
} else if ((type == BASELINE_CONDITION_TYPE || type == SENSITIVITY_CONDITION_TYPE) && value.getSketch_sessions() < staticSensitivityThreshold) {
logger.debug("当前server IP:{},类型:{},基线值{}百分比{}未超过静态敏感阈值,日志详情\n{}", destinationIp, attackType, base, percent, value);
} else {
// result = getResult(value, base, profileId, severity, percent+1, type, tag);
result = getResult(value, base, profileId, severity, percent, type, tag);
if (type == SENSITIVITY_CONDITION_TYPE) {
result.setSeverity(Severity.MAJOR.severity);
}
logger.info("检测到当前server IP {} 存在 {} 异常,超出基线{} {}倍,基于{}:{}检测,日志详情\n {}", destinationIp, attackType, base, percent, type, tag, result);
}
} else {
logger.debug("当前server IP:{} 未出现 {} 异常,日志详情 {}", destinationIp, attackType, value);
}
}
return result;
}
private DosEventLog getResult(DosSketchLog value,Severity severity,String percent){
private DosEventLog getResult(DosSketchLog value, long base, long profileId, Severity severity, double percent, int type, String tag) {
DosEventLog dosEventLog = new DosEventLog();
dosEventLog.setRecv_time(value.getCommon_recv_time());
dosEventLog.setLog_id(SnowflakeId.generateId());
dosEventLog.setVsys_id(value.getVsys_id());
dosEventLog.setStart_time(value.getSketch_start_time());
dosEventLog.setEnd_time(value.getSketch_start_time()+CommonConfig.FLINK_WINDOW_MAX_TIME);
dosEventLog.setEnd_time(value.getSketch_start_time() + value.getSketch_duration());
dosEventLog.setProfile_id(profileId);
dosEventLog.setAttack_type(value.getAttack_type());
dosEventLog.setSeverity(severity.toString());
dosEventLog.setConditions(getConditions(percent));
dosEventLog.setSeverity(severity.severity);
dosEventLog.setConditions(getConditions(PERCENT_INSTANCE.format(percent), base, value.getSketch_sessions(), type, tag, dosEventLog));
// dosEventLog.setConditions(getConditions(percent, base, value.getSketch_sessions(), type, tag,dosEventLog));
dosEventLog.setDestination_ip(value.getDestination_ip());
dosEventLog.setDestination_country(IpUtils.ipLookup.countryLookup(value.getDestination_ip()));
dosEventLog.setDestination_country(IpLookupUtils.getCountryLookup(value.getDestination_ip()));
String ipList = value.getSource_ip();
dosEventLog.setSource_ip_list(ipList);
dosEventLog.setSource_country_list(getSourceCountryList(ipList));
@@ -120,47 +205,120 @@ public class DosDetection extends BroadcastProcessFunction<DosSketchLog, Map<Str
return dosEventLog;
}
private String getConditions(String percent){
return "sessions > "+percent+" of baseline";
}
private String getSourceCountryList(String sourceIpList){
String[] ipArr = sourceIpList.split(",");
HashSet<String> countrySet = new HashSet<>();
for (String ip:ipArr){
countrySet.add(IpUtils.ipLookup.countryLookup(ip));
private Integer getBaseValue(DosBaselineThreshold dosBaselineThreshold, DosSketchLog value) {
Integer base = 0;
try {
if (dosBaselineThreshold != null) {
ArrayList<Integer> baselines = dosBaselineThreshold.getSession_rate();
Integer defaultVaule = dosBaselineThreshold.getSession_rate_default_value();
Integer sessionRateBaselineType = dosBaselineThreshold.getSession_rate_baseline_type();
if (baselines != null && baselines.size() == BASELINE_SIZE) {
int timeIndex = getCurrentTimeIndex(value.getSketch_start_time());
base = baselines.get(timeIndex);
if (base == 0) {
logger.debug("获取到当前IP: {},类型: {} baseline值为0,替换为P95观测值{}", value.getDestination_ip(), value.getAttack_type(), defaultVaule);
base = defaultVaule;
}
if (sessionRateBaselineType == OTHER_BASELINE_TYPE && base < FlowWriteConfig.STATIC_SENSITIVITY_THRESHOLD) {
base = FlowWriteConfig.STATIC_SENSITIVITY_THRESHOLD;
}
}
}
} catch (Exception e) {
logger.error("解析baseline数据失败,返回默认值0", e);
}
return StringUtils.join(countrySet,",");
return base;
}
private int getCurrentTimeIndex(long sketchStartTime){
long currentDayTime = sketchStartTime / (60 * 60 * 24) * 60 * 60 * 24;
long indexLong = (sketchStartTime - currentDayTime) / 600;
return Integer.parseInt(Long.toString(indexLong));
private String getConditions(String percent, long base, long sessions, int type, String tag, DosEventLog dosEventLog) {
int condition = 0;
if ("Minor".equals(dosEventLog.getSeverity())) {
condition = 50;
} else if ("Warning".equals(dosEventLog.getSeverity())) {
condition = 100;
} else if ("Major".equals(dosEventLog.getSeverity())) {
condition = 250;
} else if ("Severe".equals(dosEventLog.getSeverity())) {
condition = 500;
} else if ("Critical".equals(dosEventLog.getSeverity())) {
condition = 800;
}
switch (type) {
case STATIC_CONDITION_TYPE:
return "Rate > " +
base + " " +
tag + "/s" + "(>" + condition + "%)";
case BASELINE_CONDITION_TYPE:
return tag + " > " +
percent + " of baseline";
case SENSITIVITY_CONDITION_TYPE:
return String.valueOf(sessions) + " " +
tag + "/s Unusually high " +
StringUtils.capitalize(tag);
default:
throw new IllegalArgumentException("Illegal Argument type:" + type + ", known types = [1,2,3]");
}
}
private String getDiffPercent(long diff,long sketchSessions){
double diffDou = Double.parseDouble(Long.toString(diff));
double sessDou = Double.parseDouble(Long.toString(sketchSessions));
return PERCENT_INSTANCE.format(diffDou / sessDou);
private String getSourceCountryList(String sourceIpList) {
if (StringUtil.isNotBlank(sourceIpList)) {
String countryList;
try {
String[] ipArr = sourceIpList.split(",");
HashSet<String> countrySet = new HashSet<>();
for (String ip : ipArr) {
String country = IpLookupUtils.getCountryLookup(ip);
if (StringUtil.isNotBlank(country)) {
countrySet.add(country);
}
}
countryList = StringUtils.join(countrySet, ", ");
return countryList;
} catch (Exception e) {
logger.error("{} source IP lists 获取国家失败", sourceIpList, e);
return StringUtil.EMPTY;
}
} else {
throw new IllegalArgumentException("Illegal Argument sourceIpList = null");
}
}
private double getDiffPercentDouble(String diffPercent) throws ParseException {
return PERCENT_INSTANCE.parse(diffPercent).doubleValue();
private int getCurrentTimeIndex(long sketchStartTime) {
int index = 0;
try {
long currentDayTime = DateUtils.getTimeFloor(new Date(sketchStartTime * 1000L), "P1D").getTime() / 1000;
long indexLong = (sketchStartTime - currentDayTime) / (86400 / BASELINE_SIZE);
index = Integer.parseInt(Long.toString(indexLong));
} catch (Exception e) {
logger.error("获取time index失败", e);
}
return index;
}
private Severity judgeSeverity(double diffPercent){
if (diffPercent >= CommonConfig.BASELINE_SESSIONS_MINOR_THRESHOLD && diffPercent < CommonConfig.BASELINE_SESSIONS_WARNING_THRESHOLD){
private Double getDiffPercent(long diff, long base) {
try {
return BigDecimal.valueOf((float) diff / base).setScale(4, BigDecimal.ROUND_HALF_UP).doubleValue();
} catch (Exception e) {
logger.info("当前阈值为0,进行下一阈值条件判断", e);
return 0.0;
}
}
private Severity judgeSeverity(double diffPercent) {
if (diffPercent >= FlowWriteConfig.BASELINE_SESSIONS_MINOR_THRESHOLD && diffPercent < FlowWriteConfig.BASELINE_SESSIONS_WARNING_THRESHOLD) {
return Severity.MINOR;
}else if (diffPercent >= CommonConfig.BASELINE_SESSIONS_WARNING_THRESHOLD && diffPercent < CommonConfig.BASELINE_SESSIONS_MAJOR_THRESHOLD){
} else if (diffPercent >= FlowWriteConfig.BASELINE_SESSIONS_WARNING_THRESHOLD && diffPercent < FlowWriteConfig.BASELINE_SESSIONS_MAJOR_THRESHOLD) {
return Severity.WARNING;
}else if (diffPercent >= CommonConfig.BASELINE_SESSIONS_MAJOR_THRESHOLD && diffPercent < CommonConfig.BASELINE_SESSIONS_SEVERE_THRESHOLD){
} else if (diffPercent >= FlowWriteConfig.BASELINE_SESSIONS_MAJOR_THRESHOLD && diffPercent < FlowWriteConfig.BASELINE_SESSIONS_SEVERE_THRESHOLD) {
return Severity.MAJOR;
}else if (diffPercent >= CommonConfig.BASELINE_SESSIONS_SEVERE_THRESHOLD && diffPercent < CommonConfig.BASELINE_SESSIONS_CRITICAL_THRESHOLD){
} else if (diffPercent >= FlowWriteConfig.BASELINE_SESSIONS_SEVERE_THRESHOLD && diffPercent < FlowWriteConfig.BASELINE_SESSIONS_CRITICAL_THRESHOLD) {
return Severity.SEVERE;
}else if (diffPercent >= CommonConfig.BASELINE_SESSIONS_CRITICAL_THRESHOLD){
} else if (diffPercent >= FlowWriteConfig.BASELINE_SESSIONS_CRITICAL_THRESHOLD) {
return Severity.CRITICAL;
}else {
} else {
return Severity.NORMAL;
}
}
@@ -188,4 +346,5 @@ public class DosDetection extends BroadcastProcessFunction<DosSketchLog, Map<Str
}
}
}

View File

@@ -1,15 +1,15 @@
package com.zdjizhi.etl;
import com.zdjizhi.common.CommonConfig;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.common.DosSketchLog;
import org.apache.commons.lang.StringUtils;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.api.java.tuple.Tuple6;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple7;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashSet;
@@ -18,18 +18,22 @@ import static com.zdjizhi.sink.OutputStreamSink.outputTag;
/**
* @author 94976
*/
public class EtlProcessFunction extends ProcessWindowFunction<DosSketchLog, DosSketchLog, Tuple4<String,String,String,String>, TimeWindow> {
public class EtlProcessFunction extends ProcessWindowFunction<DosSketchLog, DosSketchLog, Tuple3<String,String,Integer>, TimeWindow> {
// private static final Logger logger = LoggerFactory.getLogger(EtlProcessFunction.class);
private static final Log logger = LogFactory.get();
private static final String EMPTY_SOURCE_IP_IPV4 = "0.0.0.0";
private static final String EMPTY_SOURCE_IP_IPV6 = "::";
private static final Logger logger = LoggerFactory.getLogger(EtlProcessFunction.class);
@Override
public void process(Tuple4<String,String, String, String> keys,
public void process(Tuple3<String,String,Integer> keys,
Context context, Iterable<DosSketchLog> elements,
Collector<DosSketchLog> out) {
DosSketchLog middleResult = getMiddleResult(keys, elements);
try {
if (middleResult != null){
out.collect(middleResult);
logger.info("获取中间聚合结果:{}",middleResult.toString());
logger.debug("获取中间聚合结果:{}",middleResult.toString());
context.output(outputTag,TrafficServerIpMetrics.getOutputMetric(middleResult));
}
}catch (Exception e){
@@ -37,22 +41,22 @@ public class EtlProcessFunction extends ProcessWindowFunction<DosSketchLog, DosS
}
}
private DosSketchLog getMiddleResult(Tuple4<String,String, String, String> keys,Iterable<DosSketchLog> elements){
private DosSketchLog getMiddleResult(Tuple3<String,String,Integer> keys,Iterable<DosSketchLog> elements){
DosSketchLog midResuleLog = new DosSketchLog();
Tuple6<Long, Long, Long,String,Long,Long> values = sketchAggregate(elements);
Tuple7<Long, Long, Long,String,Long,Long,Long> values = sketchAggregate(elements);
try {
if (values != null){
midResuleLog.setCommon_sled_ip(keys.f0);
midResuleLog.setCommon_data_center(keys.f1);
midResuleLog.setDestination_ip(keys.f3);
midResuleLog.setAttack_type(keys.f2);
midResuleLog.setAttack_type(keys.f0);
midResuleLog.setDestination_ip(keys.f1);
midResuleLog.setVsys_id(keys.f2);
midResuleLog.setSketch_start_time(values.f4);
midResuleLog.setSketch_duration(values.f5);
midResuleLog.setSource_ip(values.f3);
midResuleLog.setSketch_sessions(values.f0);
midResuleLog.setSketch_packets(values.f1);
midResuleLog.setSketch_bytes(values.f2);
midResuleLog.setCommon_recv_time(values.f6);
return midResuleLog;
}
} catch (Exception e){
@@ -61,28 +65,40 @@ public class EtlProcessFunction extends ProcessWindowFunction<DosSketchLog, DosS
return null;
}
private Tuple6<Long, Long, Long,String,Long,Long> sketchAggregate(Iterable<DosSketchLog> elements){
int cnt = 1;
private Tuple7<Long, Long, Long,String,Long,Long,Long> sketchAggregate(Iterable<DosSketchLog> elements){
long sessions = 0;
long packets = 0 ;
long bytes = 0;
long startTime = 0;
long startTime = System.currentTimeMillis()/1000;
long endTime = System.currentTimeMillis()/1000;
long duration = 0;
long recvtime = 0;
HashSet<String> sourceIpSet = new HashSet<>();
try {
for (DosSketchLog newSketchLog : elements){
sessions += newSketchLog.getSketch_sessions();
packets += newSketchLog.getSketch_packets();
bytes += newSketchLog.getSketch_bytes();
startTime = newSketchLog.getSketch_start_time();
duration = newSketchLog.getSketch_duration();
cnt += 1;
if (sourceIpSet.size() < CommonConfig.SOURCE_IP_LIST_LIMIT){
sourceIpSet.add(newSketchLog.getSource_ip());
if (recvtime == 0){
recvtime = newSketchLog.getCommon_recv_time();
}else if (recvtime > newSketchLog.getCommon_recv_time()){
recvtime = newSketchLog.getCommon_recv_time();
}
String sourceIp = newSketchLog.getSource_ip();
if (StringUtils.equals(sourceIp,EMPTY_SOURCE_IP_IPV4) || StringUtils.equals(sourceIp,EMPTY_SOURCE_IP_IPV6)){
sessions += newSketchLog.getSketch_sessions();
packets += newSketchLog.getSketch_packets();
bytes += newSketchLog.getSketch_bytes();
startTime = newSketchLog.getSketch_start_time() > startTime ? startTime : newSketchLog.getSketch_start_time();
endTime = newSketchLog.getSketch_start_time() > endTime ? newSketchLog.getSketch_start_time() : endTime;
duration = endTime - startTime == 0 ? 5 : endTime - startTime;
}else {
if (sourceIpSet.size() < FlowWriteConfig.SOURCE_IP_LIST_LIMIT){
sourceIpSet.add(sourceIp);
}
}
}
String sourceIpList = StringUtils.join(sourceIpSet, ",");
return Tuple6.of(sessions/cnt,packets/cnt,bytes/cnt,sourceIpList,startTime,duration);
return Tuple7.of(sessions/ FlowWriteConfig.FLINK_WINDOW_MAX_TIME,packets/ FlowWriteConfig.FLINK_WINDOW_MAX_TIME,
bytes*8/ FlowWriteConfig.FLINK_WINDOW_MAX_TIME,sourceIpList,startTime,duration,recvtime);
}catch (Exception e){
logger.error("聚合中间结果集失败 {}",e);
}

View File

@@ -0,0 +1,87 @@
package com.zdjizhi.etl;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.geedgenetworks.utils.DateUtils;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.common.DosBaselineThreshold;
import com.zdjizhi.utils.HbaseUtils;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.*;
public class ParseBaselineThreshold {
private static final Log logger = LogFactory.get();
private static ArrayList<String> floodTypeList = new ArrayList<>();
private static Table table = null;
private static Scan scan = null;
static {
floodTypeList.add("TCP SYN Flood");
floodTypeList.add("UDP Flood");
floodTypeList.add("ICMP Flood");
floodTypeList.add("DNS Flood");
}
private static void prepareHbaseEnv() throws IOException {
org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
config.set("hbase.zookeeper.quorum", FlowWriteConfig.HBASE_ZOOKEEPER_QUORUM);
config.set("hbase.client.retries.number", "3");
config.set("hbase.bulkload.retries.number", "3");
config.set("zookeeper.recovery.retry", "3");
config.set("hbase.defaults.for.version", "2.2.3");
config.set("hbase.defaults.for.version.skip", "true");
config.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, FlowWriteConfig.HBASE_CLIENT_OPERATION_TIMEOUT);
config.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, FlowWriteConfig.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
TableName tableName = TableName.valueOf(FlowWriteConfig.HBASE_BASELINE_TABLE_NAME);
Connection conn = ConnectionFactory.createConnection(config);
table = conn.getTable(tableName);
long currentTimeMillis = System.currentTimeMillis();
scan = new Scan()
.setAllowPartialResults(true)
.setTimeRange(DateUtils.getSomeDate(new Date(currentTimeMillis), Math.negateExact(FlowWriteConfig.HBASE_BASELINE_TTL)).getTime(), currentTimeMillis)
.setLimit(FlowWriteConfig.HBASE_BASELINE_TOTAL_NUM);
logger.info("连接hbase成功正在读取baseline数据");
}
static Map<String, Map<String, DosBaselineThreshold>> readFromHbase() {
Map<String, Map<String, DosBaselineThreshold>> baselineMap = new HashMap<>();
try {
prepareHbaseEnv();
logger.info("开始读取baseline数据");
ResultScanner rs = table.getScanner(scan);
for (Result result : rs) {
Map<String, DosBaselineThreshold> floodTypeMap = new HashMap<>();
String rowkey = Bytes.toString(result.getRow());
for (String type:floodTypeList){
DosBaselineThreshold baselineThreshold = new DosBaselineThreshold();
ArrayList<Integer> sessionRate = HbaseUtils.getArraylist(result, type, "session_rate");
if (sessionRate != null && !sessionRate.isEmpty()){
Integer defaultValue = HbaseUtils.getIntegerValue(result, type, "session_rate_default_value");
Integer rateBaselineType = HbaseUtils.getIntegerValue(result, type, "session_rate_baseline_type");
baselineThreshold.setSession_rate(sessionRate);
baselineThreshold.setSession_rate_default_value(defaultValue);
baselineThreshold.setSession_rate_baseline_type(rateBaselineType);
floodTypeMap.put(type,baselineThreshold);
}
}
baselineMap.put(rowkey, floodTypeMap);
}
logger.info("格式化baseline数据成功读取IP共{}", baselineMap.size());
} catch (Exception e) {
logger.error("读取hbase数据失败", e);
}
return baselineMap;
}
}

View File

@@ -1,11 +1,11 @@
package com.zdjizhi.etl;
import com.zdjizhi.common.CommonConfig;
import com.alibaba.fastjson2.JSONObject;
import com.geedgenetworks.utils.StringUtil;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.common.DosSketchLog;
import com.zdjizhi.source.DosSketchSource;
import com.zdjizhi.utils.FlinkEnvironmentUtils;
import com.zdjizhi.utils.JsonMapper;
import com.zdjizhi.utils.StringUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
@@ -14,46 +14,54 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.*;
/**
* @author wlh
*/
public class ParseSketchLog {
private static Logger logger = LoggerFactory.getLogger(ParseSketchLog.class);
public static SingleOutputStreamOperator<DosSketchLog> getSketchSource(){
public static SingleOutputStreamOperator<DosSketchLog> getSketchSource() {
return flatSketchSource().assignTimestampsAndWatermarks(createWatermarkStrategy());
}
private static SingleOutputStreamOperator<DosSketchLog> flatSketchSource(){
private static SingleOutputStreamOperator<DosSketchLog> flatSketchSource() {
return DosSketchSource.createDosSketchSource().flatMap(new FlatSketchLog());
}
private static WatermarkStrategy<DosSketchLog> createWatermarkStrategy(){
private static WatermarkStrategy<DosSketchLog> createWatermarkStrategy() {
return WatermarkStrategy
.<DosSketchLog>forBoundedOutOfOrderness(Duration.ofSeconds(CommonConfig.FLINK_WATERMARK_MAX_ORDERNESS))
.<DosSketchLog>forBoundedOutOfOrderness(Duration.ofSeconds(FlowWriteConfig.FLINK_WATERMARK_MAX_ORDERNESS))
.withTimestampAssigner((event, timestamp) -> event.getSketch_start_time() * 1000);
}
private static class FlatSketchLog implements FlatMapFunction<String, DosSketchLog> {
@Override
public void flatMap(String s, Collector<DosSketchLog> collector) throws Exception {
public void flatMap(String s, Collector<DosSketchLog> collector) {
try {
if (StringUtil.isNotBlank(s)){
HashMap<String, Object> sketchSource = (HashMap<String, Object>) JsonMapper.fromJsonString(s, Object.class);
String commonSledIp = sketchSource.get("common_sled_ip").toString();
String commonDataCenter = sketchSource.get("common_data_center").toString();
if (StringUtil.isNotBlank(s)) {
final long recv_time = System.currentTimeMillis()/1000;
HashMap<String, Object> sketchSource = JSONObject.parseObject(s, HashMap.class);
long sketchStartTime = Long.parseLong(sketchSource.get("sketch_start_time").toString());
long sketchDuration = Long.parseLong(sketchSource.get("sketch_duration").toString());
String attackType = sketchSource.get("attack_type").toString();
ArrayList<HashMap<String, Object>> reportIpList = (ArrayList<HashMap<String, Object>>) sketchSource.get("report_ip_list");
int vsysId = Integer.parseInt(sketchSource.getOrDefault("common_vsys_id", 1).toString());
String report_ip_list = JSONObject.toJSONString(sketchSource.get("report_ip_list"));
ArrayList<HashMap<String, Object>> reportIpList = JSONObject.parseObject(report_ip_list, ArrayList.class);
for (HashMap<String, Object> obj : reportIpList) {
DosSketchLog dosSketchLog = new DosSketchLog();
dosSketchLog.setCommon_sled_ip(commonSledIp);
dosSketchLog.setCommon_data_center(commonDataCenter);
dosSketchLog.setCommon_recv_time(recv_time);
dosSketchLog.setSketch_start_time(sketchStartTime);
dosSketchLog.setSketch_duration(sketchDuration);
dosSketchLog.setAttack_type(attackType);
dosSketchLog.setVsys_id(vsysId);
String sourceIp = obj.get("source_ip").toString();
String destinationIp = obj.get("destination_ip").toString();
long sketchSessions = Long.parseLong(obj.get("sketch_sessions").toString());
@@ -61,23 +69,16 @@ public class ParseSketchLog {
long sketchBytes = Long.parseLong(obj.get("sketch_bytes").toString());
dosSketchLog.setSource_ip(sourceIp);
dosSketchLog.setDestination_ip(destinationIp);
dosSketchLog.setSketch_sessions(sketchSessions/sketchDuration);
dosSketchLog.setSketch_packets(sketchPackets/sketchDuration);
dosSketchLog.setSketch_bytes(sketchBytes*8/sketchDuration);
dosSketchLog.setSketch_sessions(sketchSessions);
dosSketchLog.setSketch_packets(sketchPackets);
dosSketchLog.setSketch_bytes(sketchBytes);
collector.collect(dosSketchLog);
logger.info("数据解析成功:{}",dosSketchLog.toString());
logger.debug("数据解析成功:{}", dosSketchLog.toString());
}
}
} catch (Exception e) {
logger.error("数据解析错误:{} \n{}",s,e);
logger.error("数据解析错误:{} \n{}", s, e);
}
}
}
public static void main(String[] args) throws Exception {
flatSketchSource().print();
FlinkEnvironmentUtils.streamExeEnv.execute();
}
}

View File

@@ -0,0 +1,282 @@
package com.zdjizhi.etl;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.common.DosDetectionThreshold;
import com.zdjizhi.common.DosVsysId;
import com.zdjizhi.utils.HttpClientUtils;
import inet.ipaddr.IPAddress;
import inet.ipaddr.IPAddressString;
import org.apache.flink.shaded.guava18.com.google.common.collect.Range;
import org.apache.flink.shaded.guava18.com.google.common.collect.TreeRangeMap;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.message.BasicHeader;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author wlh
*/
public class ParseStaticThreshold {
private static final Log logger = LogFactory.get();
private static String encryptpwd;
static {
//加载加密登录密码
encryptpwd = getEncryptpwd();
}
/**
* 获取加密密码
*/
private static String getEncryptpwd() {
String psw = HttpClientUtils.ERROR_MESSAGE;
try {
URIBuilder uriBuilder = new URIBuilder(FlowWriteConfig.BIFANG_SERVER_URI);
HashMap<String, Object> parms = new HashMap<>();
parms.put("password", "admin");
HttpClientUtils.setUrlWithParams(uriBuilder, FlowWriteConfig.BIFANG_SERVER_ENCRYPTPWD_PATH, parms);
String resposeJsonStr = HttpClientUtils.httpGet(uriBuilder.build());
if (!HttpClientUtils.ERROR_MESSAGE.equals(resposeJsonStr)) {
HashMap<String, Object> resposeMap = JSONObject.parseObject(resposeJsonStr, HashMap.class);
boolean success = (boolean) resposeMap.get("success");
String msg = resposeMap.get("msg").toString();
if (success) {
HashMap<String, Object> data = JSONObject.parseObject(JSONObject.toJSONString(resposeMap.get("data")), HashMap.class);
psw = data.get("encryptpwd").toString();
} else {
logger.error(msg);
}
}
} catch (URISyntaxException e) {
logger.error("构造URI异常", e);
} catch (Exception e) {
logger.error("获取encryptpwd失败", e);
}
return psw;
}
/**
* 获取vsysId配置列表
*
* @return vsysIdList
*/
private static ArrayList<DosVsysId> getVsysId() {
ArrayList<DosVsysId> vsysIdList = null;
try {
URIBuilder uriBuilder = new URIBuilder(FlowWriteConfig.BIFANG_SERVER_URI);
HashMap<String, Object> parms = new HashMap<>();
parms.put("page_size", -1);
// parms.put("orderBy", "vsysId desc");
parms.put("type", 1);
HttpClientUtils.setUrlWithParams(uriBuilder, FlowWriteConfig.BIFANG_SERVER_POLICY_VSYSID_PATH, parms);
String token = FlowWriteConfig.BIFANG_SERVER_TOKEN;
if (!HttpClientUtils.ERROR_MESSAGE.equals(token)) {
BasicHeader authorization = new BasicHeader("Authorization", token);
BasicHeader authorization1 = new BasicHeader("Content-Type", "application/x-www-form-urlencoded");
String resposeJsonStr = HttpClientUtils.httpGet(uriBuilder.build(), authorization, authorization1);
if (!HttpClientUtils.ERROR_MESSAGE.equals(resposeJsonStr)) {
HashMap<String, Object> resposeMap = JSONObject.parseObject(resposeJsonStr, HashMap.class);
boolean success = (boolean) resposeMap.get("success");
String msg = resposeMap.get("msg").toString();
if (success) {
HashMap<String, Object> data = JSONObject.parseObject(JSONObject.toJSONString(resposeMap.get("data")), HashMap.class);
Object list = data.get("vsys_list");
if (list != null) {
List<DosVsysId> dosVsysIds = JSON.parseArray(JSONObject.toJSONString(list), DosVsysId.class);
vsysIdList= (ArrayList)dosVsysIds;
logger.info("获取到vsysId {}条", vsysIdList.size());
} else {
logger.warn("vsysIdList为空");
}
} else {
logger.error(msg);
}
}
}
} catch (Exception e) {
logger.error("获取vsysId失败,请检查bifang服务或登录配置信息 ", e);
}
return vsysIdList;
}
/**
* 根据vsysId获取静态阈值配置列表
* @return thresholds
*/
private static ArrayList<DosDetectionThreshold> getDosDetectionThreshold() {
ArrayList<DosDetectionThreshold> vsysThresholds = new ArrayList<>();
ArrayList<DosVsysId> vsysIds = getVsysId();
try {
if (vsysIds != null) {
for (DosVsysId dosVsysId : vsysIds) {
Integer vsysId = dosVsysId.getId() == null ? 1 : dosVsysId.getId();
Integer[] superiorIds = dosVsysId.getSuperior_ids();
URIBuilder uriBuilder = new URIBuilder(FlowWriteConfig.BIFANG_SERVER_URI);
HashMap<String, Object> parms = new HashMap<>();
parms.put("page_size", -1);
// parms.put("order_by", "profileId asc");
parms.put("is_valid", 1);
parms.put("vsys_id", vsysId);
HttpClientUtils.setUrlWithParams(uriBuilder, FlowWriteConfig.BIFANG_SERVER_POLICY_THRESHOLD_PATH, parms);
String token = FlowWriteConfig.BIFANG_SERVER_TOKEN;
if (!HttpClientUtils.ERROR_MESSAGE.equals(token)) {
BasicHeader authorization = new BasicHeader("Authorization", token);
BasicHeader authorization1 = new BasicHeader("Content-Type", "application/x-www-form-urlencoded");
String resposeJsonStr = HttpClientUtils.httpGet(uriBuilder.build(), authorization, authorization1);
if (!HttpClientUtils.ERROR_MESSAGE.equals(resposeJsonStr)) {
HashMap<String, Object> resposeMap = JSONObject.parseObject(resposeJsonStr,HashMap.class);
boolean success = (boolean) resposeMap.get("success");
String msg = resposeMap.get("msg").toString();
if (success) {
HashMap<String, Object> data = JSONObject.parseObject(JSONObject.toJSONString(resposeMap.get("data")), HashMap.class);
Object list = data.get("dos_detections");
if (list != null) {
List<DosDetectionThreshold> dosDetectionThresholds = JSON.parseArray(JSONObject.toJSONString(list), DosDetectionThreshold.class);
ArrayList<DosDetectionThreshold> thresholds = (ArrayList)dosDetectionThresholds;
for (DosDetectionThreshold dosDetectionThreshold : thresholds) {
dosDetectionThreshold.setSuperior_ids(superiorIds);
vsysThresholds.add(dosDetectionThreshold);
}
logger.info("获取到vsys id是{}静态阈值配置{}条", vsysId, thresholds.size());
} else {
logger.warn("静态阈值配置为空");
}
} else {
logger.error(msg);
}
}
}
}
}
} catch (Exception e) {
logger.error("获取静态阈值配置失败,请检查bifang服务或登录配置信息 ", e);
}
return vsysThresholds;
}
/**
* 基于静态阈值构建threshold RangeMapk:IP段或具体IPv:配置信息
*
* @return threshold RangeMap
*/
static HashMap<Integer, HashMap<String, TreeRangeMap<IPAddress, DosDetectionThreshold>>> createStaticThreshold() {
HashMap<Integer, HashMap<String, TreeRangeMap<IPAddress, DosDetectionThreshold>>> thresholdRangeMap = new HashMap<>(4);
try {
ArrayList<DosDetectionThreshold> dosDetectionThreshold = getDosDetectionThreshold();
if (dosDetectionThreshold != null && !dosDetectionThreshold.isEmpty()) {
for (DosDetectionThreshold threshold : dosDetectionThreshold) {
String attackType = threshold.getAttack_type();
int vsysId = threshold.getVsys_id();
HashMap<String, TreeRangeMap<IPAddress, DosDetectionThreshold>> rangeMap = thresholdRangeMap.getOrDefault(vsysId, new HashMap<>());
TreeRangeMap<IPAddress, DosDetectionThreshold> treeRangeMap = rangeMap.getOrDefault(attackType, TreeRangeMap.create());
ArrayList<String> serverIpList = threshold.getServer_ip_list();
for (String sip : serverIpList) {
IPAddressString ipAddressString = new IPAddressString(sip);
if (ipAddressString.isIPAddress()) {
IPAddress address = ipAddressString.getAddress();
if (address.isPrefixed()) {
IPAddress lower = address.getLower();
IPAddress upper = address.getUpper();
if (!address.isMultiple()) {
lower = address.adjustPrefixLength(address.getBitCount());
upper = address.toMaxHost().withoutPrefixLength();
}
Map.Entry<Range<IPAddress>, DosDetectionThreshold> lowerEntry = treeRangeMap.getEntry(lower);
Map.Entry<Range<IPAddress>, DosDetectionThreshold> upperEntry = treeRangeMap.getEntry(upper);
if (lowerEntry != null && upperEntry == null) {
Range<IPAddress> lowerEntryKey = lowerEntry.getKey();
DosDetectionThreshold lowerEntryValue = lowerEntry.getValue();
treeRangeMap.put(Range.closedOpen(lowerEntryKey.lowerEndpoint(), lower), lowerEntryValue);
treeRangeMap.put(Range.closed(lower, upper), threshold);
} else if (lowerEntry == null && upperEntry != null) {
Range<IPAddress> upperEntryKey = upperEntry.getKey();
DosDetectionThreshold upperEntryValue = upperEntry.getValue();
treeRangeMap.put(Range.openClosed(upper, upperEntryKey.upperEndpoint()), upperEntryValue);
treeRangeMap.put(Range.closed(lower, upper), threshold);
} else {
treeRangeMap.put(Range.closed(lower, upper), threshold);
}
} else {
treeRangeMap.put(Range.closed(address, address), threshold);
}
}
}
rangeMap.put(attackType, treeRangeMap);
thresholdRangeMap.put(vsysId, rangeMap);
}
}
} catch (Exception e) {
logger.error("构建threshold RangeMap失败", e);
}
return thresholdRangeMap;
}
/**
* 登录bifang服务获取token
*
* @return token
*/
private static String loginBifangServer() {
String token = HttpClientUtils.ERROR_MESSAGE;
try {
final HashMap<String, Object> parmsMap = new HashMap<>();
String urlString = FlowWriteConfig.BIFANG_SERVER_URI+FlowWriteConfig.BIFANG_SERVER_LOGIN_PATH;
parmsMap.put("username","admin");
parmsMap.put("password",encryptpwd);
parmsMap.put("auth_mode","");
final String jsonInputString = JSON.toJSONString(parmsMap);
final URL url = new URL(urlString);
final HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.setRequestMethod("POST");
connection.setRequestProperty("Content-Type", "application/json");
connection.setRequestProperty("Accept", "application/json");
connection.setDoOutput(true);
OutputStream os = connection.getOutputStream();
os.write(jsonInputString.getBytes());
os.flush();
os.close();
int responseCode = connection.getResponseCode();
if (responseCode == 200 ) {
// 读取响应内容
BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String line;
StringBuilder response = new StringBuilder();
while ((line = reader.readLine()) != null) {
response.append(line);
}
reader.close();
HashMap<String, Object> body = JSONObject.parseObject(String.valueOf(response), HashMap.class);
final HashMap data = JSONObject.parseObject(String.valueOf( body.get("data")), HashMap.class);
token = (String) data.get("token");
}
} catch (Exception e) {
logger.error("登录失败,未获取到token ", e);
}
return token;
}
}

View File

@@ -1,32 +1,36 @@
package com.zdjizhi.etl;
import com.zdjizhi.common.CommonConfig;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.common.DosMetricsLog;
import com.zdjizhi.common.DosSketchLog;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class TrafficServerIpMetrics {
private static final Logger logger = LoggerFactory.getLogger(TrafficServerIpMetrics.class);
// private static final Logger logger = LoggerFactory.getLogger(TrafficServerIpMetrics.class);
private static final Log logger = LogFactory.get();
static DosMetricsLog getOutputMetric(DosSketchLog midResuleLog) {
DosMetricsLog dosMetricsLog = new DosMetricsLog();
dosMetricsLog.setSketch_start_time(timeFloor(System.currentTimeMillis()/1000));
dosMetricsLog.setCommon_sled_ip(midResuleLog.getCommon_sled_ip());
dosMetricsLog.setCommon_data_center(midResuleLog.getCommon_data_center());
dosMetricsLog.setDestination_ip(midResuleLog.getDestination_ip());
dosMetricsLog.setAttack_type(midResuleLog.getAttack_type());
dosMetricsLog.setSession_rate(midResuleLog.getSketch_sessions());
dosMetricsLog.setPacket_rate(midResuleLog.getSketch_packets());
dosMetricsLog.setBit_rate(midResuleLog.getSketch_bytes());
logger.info("metric 结果已加载:{}",dosMetricsLog.toString());
dosMetricsLog.setVsys_id(midResuleLog.getVsys_id());
dosMetricsLog.setPartition_num(getPartitionNumByIp(midResuleLog.getDestination_ip()));
logger.debug("metric 结果已加载:{}",dosMetricsLog.toString());
return dosMetricsLog;
}
private static long timeFloor(long sketchStartTime){
return sketchStartTime / CommonConfig.FLINK_WINDOW_MAX_TIME * CommonConfig.FLINK_WINDOW_MAX_TIME;
return sketchStartTime / FlowWriteConfig.FLINK_WINDOW_MAX_TIME * FlowWriteConfig.FLINK_WINDOW_MAX_TIME;
}
private static int getPartitionNumByIp(String destinationIp){
return Math.abs(destinationIp.hashCode()) % FlowWriteConfig.DESTINATION_IP_PARTITION_NUM;
}
}

View File

@@ -2,6 +2,10 @@ package com.zdjizhi.main;
import com.zdjizhi.sink.OutputStreamSink;
/**
* @author wlh
* 程序主类入口
*/
public class DosDetectionApplication {
public static void main(String[] args) {

View File

@@ -1,16 +1,23 @@
package com.zdjizhi.sink;
import com.zdjizhi.common.CommonConfig;
import com.alibaba.fastjson2.JSONObject;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.common.DosEventLog;
import com.zdjizhi.utils.JsonMapper;
//import com.zdjizhi.utils.JsonMapper;
import com.zdjizhi.utils.KafkaUtils;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import java.util.Objects;
class DosEventSink {
static void dosEventOutputSink(SingleOutputStreamOperator<DosEventLog> dosEventLogOutputStream){
dosEventLogOutputStream.map(JsonMapper::toJsonString).addSink(KafkaUtils.getKafkaSink(CommonConfig.KAFKA_OUTPUT_EVENT_TOPIC_NAME))
.setParallelism(CommonConfig.KAFKA_OUTPUT_EVENT_PARALLELISM);
dosEventLogOutputStream
.filter(Objects::nonNull)
// .map(JsonMapper::toJsonString)
.map(JSONObject::toJSONString)
.addSink(KafkaUtils.getKafkaSink(FlowWriteConfig.KAFKA_OUTPUT_EVENT_TOPIC_NAME))
.setParallelism(FlowWriteConfig.KAFKA_OUTPUT_EVENT_PARALLELISM);
}
}

View File

@@ -1,137 +1,64 @@
package com.zdjizhi.sink;
import com.zdjizhi.common.CommonConfig;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.common.DosEventLog;
import com.zdjizhi.common.DosMetricsLog;
import com.zdjizhi.common.DosSketchLog;
import com.zdjizhi.etl.EtlProcessFunction;
import com.zdjizhi.etl.DosDetection;
import com.zdjizhi.etl.EtlProcessFunction;
import com.zdjizhi.etl.ParseSketchLog;
import com.zdjizhi.source.BaselineSource;
import com.zdjizhi.utils.FlinkEnvironmentUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.api.java.typeutils.MapTypeInfo;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.OutputTag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* @author 94976
*/
public class OutputStreamSink {
private static final Logger logger = LoggerFactory.getLogger(OutputStreamSink.class);
// private static final Logger logger = LoggerFactory.getLogger(OutputStreamSink.class);
private static final Log logger = LogFactory.get();
public static OutputTag<DosMetricsLog> outputTag = new OutputTag<DosMetricsLog>("traffic server ip metrics"){};
private static MapStateDescriptor<String, Map<String, Map<String, List<Integer>>>> descriptor = new MapStateDescriptor<>("boradcast-state",
Types.STRING,
new MapTypeInfo<>(String.class, new MapTypeInfo<>(String.class, (Class<List<Integer>>) (Class<?>) List.class).getTypeClass()));
public static void finalOutputSink(){
try {
SingleOutputStreamOperator<DosSketchLog> middleStream = getMiddleStream();
SingleOutputStreamOperator<DosEventLog> dosEventLogOutputStream = getOutputSinkStream(middleStream);
DosEventSink.dosEventOutputSink(dosEventLogOutputStream);
DosEventSink.dosEventOutputSink(getEventSinkStream(middleStream));
TrafficServerIpMetricsSink.sideOutputMetricsSink(middleStream);
FlinkEnvironmentUtils.streamExeEnv.execute(CommonConfig.STREAM_EXECUTION_JOB_NAME);
FlinkEnvironmentUtils.streamExeEnv.execute(FlowWriteConfig.STREAM_EXECUTION_JOB_NAME);
} catch (Exception e) {
logger.error("任务启动失败 {}",e);
}
}
public static void main(String[] args) throws Exception {
SingleOutputStreamOperator<DosSketchLog> middleStream = getMiddleStream();
SingleOutputStreamOperator<DosEventLog> dosEventLogOutputStream = getOutputSinkStream(middleStream);
DosEventSink.dosEventOutputSink(dosEventLogOutputStream);
TrafficServerIpMetricsSink.sideOutputMetricsSink(middleStream);
dosEventLogOutputStream.print();
FlinkEnvironmentUtils.streamExeEnv.execute();
}
private static SingleOutputStreamOperator<DosEventLog> getEventSinkStream(SingleOutputStreamOperator<DosSketchLog> middleStream){
return middleStream
.process(new DosDetection()).setParallelism(FlowWriteConfig.FLINK_DETECTION_MAP_PARALLELISM);
private static SingleOutputStreamOperator<DosEventLog> getOutputSinkStream(SingleOutputStreamOperator<DosSketchLog> middleStream){
BroadcastStream<Map<String, Map<String,List<Integer>>>> broadcast = FlinkEnvironmentUtils.streamExeEnv
.addSource(new BaselineSource())
.setParallelism(CommonConfig.HBASE_INPUT_PARALLELISM)
.broadcast(descriptor);
logger.info("广播变量加载成功!!");
return middleStream.keyBy(new SecondKeySelector())
// .window(TumblingEventTimeWindows.of(Time.seconds(CommonConfig.FLINK_WINDOW_MAX_TIME)))
.reduce(new SecondReduceFunc())
.connect(broadcast)
.process(new DosDetection())
.setParallelism(CommonConfig.FLINK_SECOND_AGG_PARALLELISM);
}
private static SingleOutputStreamOperator<DosSketchLog> getMiddleStream(){
return ParseSketchLog.getSketchSource()
.keyBy(new FirstKeySelector())
.window(TumblingEventTimeWindows.of(Time.seconds(CommonConfig.FLINK_WINDOW_MAX_TIME)))
.keyBy(new KeysSelector())
.window(TumblingEventTimeWindows.of(Time.seconds(FlowWriteConfig.FLINK_WINDOW_MAX_TIME)))
.process(new EtlProcessFunction())
.setParallelism(CommonConfig.FLINK_FIRST_AGG_PARALLELISM);
.setParallelism(FlowWriteConfig.FLINK_FIRST_AGG_PARALLELISM);
}
private static String groupUniqSourceIp(String sourceIp1,String sourceIp2){
HashSet<String> sourceIpSet = new HashSet<>();
Collections.addAll(sourceIpSet, (sourceIp1 + "," + sourceIp2).split(","));
if (sourceIpSet.size() > CommonConfig.SOURCE_IP_LIST_LIMIT){
return StringUtils.join(takeUniqLimit(sourceIpSet,CommonConfig.SOURCE_IP_LIST_LIMIT),",");
}
return StringUtils.join(sourceIpSet,",");
}
private static<T> Collection<T> takeUniqLimit(Collection<T> collection, int limit){
int i =0;
Collection<T> newSet = new HashSet<>();
for (T t:collection){
if (i < limit){
newSet.add(t);
i += 1;
}
}
return newSet;
}
private static class FirstKeySelector implements KeySelector<DosSketchLog, Tuple4<String, String, String, String>>{
private static class KeysSelector implements KeySelector<DosSketchLog, Tuple3<String, String, Integer>>{
@Override
public Tuple4<String, String, String, String> getKey(DosSketchLog dosSketchLog) throws Exception {
return Tuple4.of(
dosSketchLog.getCommon_sled_ip(),
dosSketchLog.getCommon_data_center(),
public Tuple3<String, String, Integer> getKey(DosSketchLog dosSketchLog){
return Tuple3.of(
dosSketchLog.getAttack_type(),
dosSketchLog.getDestination_ip());
}
}
private static class SecondKeySelector implements KeySelector<DosSketchLog, Tuple2<String, String>> {
@Override
public Tuple2<String, String> getKey(DosSketchLog dosSketchLog) throws Exception {
return Tuple2.of(
dosSketchLog.getAttack_type(),
dosSketchLog.getDestination_ip());
}
}
private static class SecondReduceFunc implements ReduceFunction<DosSketchLog> {
@Override
public DosSketchLog reduce(DosSketchLog value1, DosSketchLog value2) throws Exception {
value1.setSketch_sessions((value1.getSketch_sessions()+value2.getSketch_sessions())/2);
value1.setSketch_bytes((value1.getSketch_bytes()+value2.getSketch_bytes())/2);
value1.setSketch_packets((value1.getSketch_packets()+value2.getSketch_packets())/2);
value1.setSource_ip(groupUniqSourceIp(value1.getSource_ip(),value2.getSource_ip()));
return value1;
dosSketchLog.getDestination_ip(),
dosSketchLog.getVsys_id());
}
}

View File

@@ -1,9 +1,10 @@
package com.zdjizhi.sink;
import com.zdjizhi.common.CommonConfig;
import com.alibaba.fastjson2.JSONObject;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.common.DosMetricsLog;
import com.zdjizhi.common.DosSketchLog;
import com.zdjizhi.utils.JsonMapper;
import com.zdjizhi.utils.KafkaUtils;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
@@ -14,8 +15,11 @@ class TrafficServerIpMetricsSink {
static void sideOutputMetricsSink(SingleOutputStreamOperator<DosSketchLog> outputStream){
DataStream<DosMetricsLog> sideOutput = outputStream.getSideOutput(outputTag);
sideOutput.map(JsonMapper::toJsonString).addSink(KafkaUtils.getKafkaSink(CommonConfig.KAFKA_OUTPUT_METRIC_TOPIC_NAME))
.setParallelism(CommonConfig.KAFKA_OUTPUT_METRIC_PARALLELISM);
// sideOutput.map(JsonMapper::toJsonString).addSink(KafkaUtils.getKafkaSink(CommonConfig.KAFKA_OUTPUT_METRIC_TOPIC_NAME))
sideOutput.map(JSONObject::toJSONString).addSink(KafkaUtils.getKafkaSink(FlowWriteConfig.KAFKA_OUTPUT_METRIC_TOPIC_NAME))
.setParallelism(FlowWriteConfig.KAFKA_OUTPUT_METRIC_PARALLELISM);
}
}

View File

@@ -1,128 +0,0 @@
package com.zdjizhi.source;
import com.zdjizhi.common.CommonConfig;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Writable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author wlh
*/
public class BaselineSource extends RichSourceFunction<Map<String, Map<String,List<Integer>>>> {
private static final Logger logger = LoggerFactory.getLogger(BaselineSource.class);
private Connection conn = null;
private Table table = null;
private Scan scan = null;
@Override
public void open(Configuration parameters) throws Exception {
org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
config.set("hbase.zookeeper.quorum", CommonConfig.HBASE_ZOOKEEPER_QUORUM);
config.set("hbase.client.retries.number", "3");
config.set("hbase.bulkload.retries.number", "3");
config.set("zookeeper.recovery.retry", "3");
config.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, CommonConfig.HBASE_CLIENT_OPERATION_TIMEOUT);
config.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, CommonConfig.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
TableName tableName = TableName.valueOf(CommonConfig.HBASE_BASELINE_TABLE_NAME);
conn = ConnectionFactory.createConnection(config);
table = conn.getTable(tableName);
scan = new Scan().setAllowPartialResults(true).setLimit(CommonConfig.HBASE_BASELINE_TOTAL_NUM);
logger.info("连接hbase成功正在读取baseline数据");
// .addFamily(Bytes.toBytes(CommonConfig.HBASE_BASELINE_FAMLIY_NAME));
}
@Override
public void close() throws Exception {
super.close();
}
@Override
public void run(SourceContext<Map<String, Map<String,List<Integer>>>> sourceContext) throws Exception {
logger.info("开始读取baseline数据");
ResultScanner rs = table.getScanner(scan);
// Map<String, List<Integer>[]> baselineMap = new HashMap<>();
Map<String, Map<String,List<Integer>>> baselineMap = new HashMap<>();
for (Result result : rs) {
Map<String, List<Integer>> floodTypeMap = new HashMap<>();
String rowkey = Bytes.toString(result.getRow());
ArrayList<Integer> tcp = getArraylist(result,"TCP SYN Flood", "session_num");
ArrayList<Integer> udp = getArraylist(result,"UDP Flood", "session_num");
ArrayList<Integer> icmp = getArraylist(result,"ICMP Flood", "session_num");
ArrayList<Integer> dns = getArraylist(result,"DNS Amplification", "session_num");
floodTypeMap.put("TCP SYN Flood",tcp);
floodTypeMap.put("UDP Flood",udp);
floodTypeMap.put("ICMP Flood",icmp);
floodTypeMap.put("DNS Amplification",dns);
// List[] arr = new ArrayList[]{tcp,udp,icmp,dns};
baselineMap.put(rowkey,floodTypeMap);
}
sourceContext.collect(baselineMap);
logger.info("格式化baseline数据成功读取IP共{}",baselineMap.size());
}
private static ArrayList<Integer> getArraylist(Result result,String family,String qualifier) throws IOException {
if (!result.containsColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier))){
return null;
}
ArrayWritable w = new ArrayWritable(IntWritable.class);
w.readFields(new DataInputStream(new ByteArrayInputStream(result.getValue(Bytes.toBytes(family), Bytes.toBytes(qualifier)))));
return fromWritable(w);
}
private static ArrayList<Integer> fromWritable(ArrayWritable writable) {
Writable[] writables = writable.get();
ArrayList<Integer> list = new ArrayList<>(writables.length);
for (Writable wrt : writables) {
list.add(((IntWritable)wrt).get());
}
return list;
}
@Override
public void cancel() {
try {
if (table != null) {
table.close();
}
if (conn != null) {
conn.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(5000);
DataStreamSource<Map<String, Map<String,List<Integer>>>> mapDataStreamSource = env.addSource(new BaselineSource());
DataStream<Map<String, Map<String,List<Integer>>>> broadcast = mapDataStreamSource.broadcast();
mapDataStreamSource.print();
env.execute();
}
}

View File

@@ -1,6 +1,8 @@
package com.zdjizhi.source;
import com.zdjizhi.common.CommonConfig;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.FlinkEnvironmentUtils;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
@@ -9,18 +11,27 @@ import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import java.util.Properties;
/**
* @author wlh
*/
public class DosSketchSource {
private static StreamExecutionEnvironment streamExeEnv = FlinkEnvironmentUtils.streamExeEnv;
public static DataStreamSource<String> createDosSketchSource(){
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", CommonConfig.KAFKA_INPUT_BOOTSTRAP_SERVERS);
properties.setProperty("group.id", CommonConfig.KAFKA_GROUP_ID);
properties.setProperty("bootstrap.servers", FlowWriteConfig.KAFKA_INPUT_BOOTSTRAP_SERVERS);
properties.setProperty("group.id", FlowWriteConfig.KAFKA_GROUP_ID);
if (FlowWriteConfig.SASL_JAAS_CONFIG_FLAG == 1){
properties.put("security.protocol", "SASL_PLAINTEXT");
properties.put("sasl.mechanism", "PLAIN");
properties.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username=\""+ FlowWriteConfig.SASL_JAAS_CONFIG_USER+"\" password=\""+ FlowWriteConfig.SASL_JAAS_CONFIG_PASSWORD+"\";");
}
return streamExeEnv.addSource(new FlinkKafkaConsumer<String>(
CommonConfig.KAFKA_INPUT_TOPIC_NAME,
FlowWriteConfig.KAFKA_INPUT_TOPIC_NAME,
new SimpleStringSchema(), properties))
.setParallelism(CommonConfig.KAFKA_INPUT_PARALLELISM);
.setParallelism(FlowWriteConfig.KAFKA_INPUT_PARALLELISM);
}
}

View File

@@ -1,9 +1,9 @@
package com.zdjizhi.utils;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
@@ -17,7 +17,8 @@ import java.util.concurrent.locks.Lock;
public class DistributedLock implements Lock, Watcher {
private static final Logger logger = LoggerFactory.getLogger(DistributedLock.class);
// private static final Logger logger = LoggerFactory.getLogger(DistributedLock.class);
private static final Log logger = LogFactory.get();
private ZooKeeper zk = null;
/**

View File

@@ -1,9 +1,7 @@
package com.zdjizhi.utils;
import com.zdjizhi.common.CommonConfig;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
/**
@@ -12,17 +10,35 @@ import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
public class FlinkEnvironmentUtils {
public static StreamExecutionEnvironment streamExeEnv = StreamExecutionEnvironment.getExecutionEnvironment();
static {
streamExeEnv.setParallelism(FlowWriteConfig.STREAM_EXECUTION_ENVIRONMENT_PARALLELISM);
public static StreamTableEnvironment getStreamTableEnv() {
streamExeEnv.setParallelism(CommonConfig.STREAM_EXECUTION_ENVIRONMENT_PARALLELISM);
/*
// 每 1000ms 开始一次 checkpoint
streamExeEnv.enableCheckpointing(CommonConfig.FLINK_WINDOW_MAX_TIME * 1000);
EnvironmentSettings settings = EnvironmentSettings.newInstance()
.useBlinkPlanner()
.inStreamingMode()
.build();
// 设置模式为精确一次 (这是默认值)
streamExeEnv.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
return StreamTableEnvironment.create(streamExeEnv, settings);
// 确认 checkpoints 之间的时间会进行 500 ms
streamExeEnv.getCheckpointConfig().setMinPauseBetweenCheckpoints(500);
// Checkpoint 必须在一分钟内完成,否则就会被抛弃
streamExeEnv.getCheckpointConfig().setCheckpointTimeout(60000);
// 允许两个连续的 checkpoint 错误
streamExeEnv.getCheckpointConfig().setTolerableCheckpointFailureNumber(2);
// 同一时间只允许一个 checkpoint 进行
streamExeEnv.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
// 使用 externalized checkpoints这样 checkpoint 在作业取消后仍就会被保留
streamExeEnv.getCheckpointConfig().enableExternalizedCheckpoints(
CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
// 开启实验性的 unaligned checkpoints
streamExeEnv.getCheckpointConfig().enableUnalignedCheckpoints();
*/
}
}

View File

@@ -1,5 +1,49 @@
package com.zdjizhi.utils;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Writable;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.*;
/**
* @author wlh
*/
public class HbaseUtils {
public static Integer getIntegerValue(Result result, String family, String qualifier) {
byte[] value = result.getValue(Bytes.toBytes(family), Bytes.toBytes(qualifier));
if (value != null){
return Bytes.toInt(value);
}
return 1;
}
public static ArrayList<Integer> getArraylist(Result result, String family, String qualifier) throws IOException {
if (containsColumn(result, family, qualifier)) {
ArrayWritable w = new ArrayWritable(IntWritable.class);
w.readFields(new DataInputStream(new ByteArrayInputStream(result.getValue(Bytes.toBytes(family), Bytes.toBytes(qualifier)))));
return fromWritable(w);
}
return null;
}
private static ArrayList<Integer> fromWritable(ArrayWritable writable) {
Writable[] writables = writable.get();
ArrayList<Integer> list = new ArrayList<>(writables.length);
for (Writable wrt : writables) {
list.add(((IntWritable) wrt).get());
}
return list;
}
private static boolean containsColumn(Result result, String family, String qualifier) {
return result.containsColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier));
}
}

View File

@@ -0,0 +1,274 @@
package com.zdjizhi.utils;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.geedgenetworks.utils.StringUtil;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.http.*;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpRequestRetryHandler;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.conn.ConnectionKeepAliveStrategy;
import org.apache.http.conn.HttpHostConnectException;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicHeaderElementIterator;
import org.apache.http.protocol.HTTP;
import org.apache.http.util.EntityUtils;
import javax.net.ssl.SSLException;
import javax.net.ssl.SSLHandshakeException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.net.URI;
import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
/**
* http client工具类
* @author wlh
*/
public class HttpClientUtils {
/** 全局连接池对象 */
private static final PoolingHttpClientConnectionManager CONN_MANAGER = new PoolingHttpClientConnectionManager();
// private static Logger logger = LoggerFactory.getLogger(HttpClientUtils.class);
private static final Log logger = LogFactory.get();
public static final String ERROR_MESSAGE = "-1";
/*
* 静态代码块配置连接池信息
*/
static {
// 设置最大连接数
CONN_MANAGER.setMaxTotal(FlowWriteConfig.HTTP_POOL_MAX_CONNECTION);
// 设置每个连接的路由数
CONN_MANAGER.setDefaultMaxPerRoute(FlowWriteConfig.HTTP_POOL_MAX_PER_ROUTE);
}
/**
* 获取Http客户端连接对象
* @return Http客户端连接对象
*/
private static CloseableHttpClient getHttpClient() {
// 创建Http请求配置参数
RequestConfig requestConfig = RequestConfig.custom()
// 获取连接超时时间
.setConnectionRequestTimeout(FlowWriteConfig.HTTP_POOL_REQUEST_TIMEOUT)
// 请求超时时间
.setConnectTimeout(FlowWriteConfig.HTTP_POOL_CONNECT_TIMEOUT)
// 响应超时时间
.setSocketTimeout(FlowWriteConfig.HTTP_POOL_RESPONSE_TIMEOUT)
.build();
/*
* 测出超时重试机制为了防止超时不生效而设置
* 如果直接放回false,不重试
* 这里会根据情况进行判断是否重试
*/
HttpRequestRetryHandler retry = (exception, executionCount, context) -> {
if (executionCount >= 3) {// 如果已经重试了3次就放弃
return false;
}
if (exception instanceof NoHttpResponseException) {// 如果服务器丢掉了连接,那么就重试
return true;
}
if (exception instanceof SSLHandshakeException) {// 不要重试SSL握手异常
return false;
}
if (exception instanceof UnknownHostException) {// 目标服务器不可达
return false;
}
if (exception instanceof ConnectTimeoutException) {// 连接被拒绝
return false;
}
if (exception instanceof HttpHostConnectException) {// 连接被拒绝
return false;
}
if (exception instanceof SSLException) {// ssl握手异常
return false;
}
if (exception instanceof InterruptedIOException) {// 超时
return true;
}
HttpClientContext clientContext = HttpClientContext.adapt(context);
HttpRequest request = clientContext.getRequest();
// 如果请求是幂等的,就再次尝试
return !(request instanceof HttpEntityEnclosingRequest);
};
ConnectionKeepAliveStrategy myStrategy = (response, context) -> {
HeaderElementIterator it = new BasicHeaderElementIterator
(response.headerIterator(HTTP.CONN_KEEP_ALIVE));
while (it.hasNext()) {
HeaderElement he = it.nextElement();
String param = he.getName();
String value = he.getValue();
if (value != null && "timeout".equalsIgnoreCase(param)) {
return Long.parseLong(value) * 1000;
}
}
return 60 * 1000;//如果没有约定则默认定义时长为60s
};
// 创建httpClient
return HttpClients.custom()
// 把请求相关的超时信息设置到连接客户端
.setDefaultRequestConfig(requestConfig)
// 把请求重试设置到连接客户端
.setRetryHandler(retry)
.setKeepAliveStrategy(myStrategy)
// 配置连接池管理对象
.setConnectionManager(CONN_MANAGER)
.build();
}
/**
* GET请求
*
* @param uri 请求地
* @return message
*/
public static String httpGet(URI uri, Header... headers) {
String msg = ERROR_MESSAGE;
// 获取客户端连接对象
CloseableHttpClient httpClient = getHttpClient();
CloseableHttpResponse response = null;
try {
logger.info("http get uri {}",uri);
// 创建GET请求对象
HttpGet httpGet = new HttpGet(uri);
if (StringUtil.isNotEmpty(headers)) {
for (Header h : headers) {
httpGet.addHeader(h);
logger.info("request header : {}",h);
}
}
// 执行请求
response = httpClient.execute(httpGet);
int statusCode = response.getStatusLine().getStatusCode();
// 获取响应实体
HttpEntity entity = response.getEntity();
// 获取响应信息
msg = EntityUtils.toString(entity, "UTF-8");
if (statusCode != HttpStatus.SC_OK) {
logger.error("Http get content is :{}" , msg);
}
} catch (ClientProtocolException e) {
logger.error("协议错误: {}", e.getMessage());
} catch (ParseException e) {
logger.error("解析错误: {}", e.getMessage());
} catch (IOException e) {
logger.error("IO错误: {}",e.getMessage());
} finally {
if (null != response) {
try {
EntityUtils.consume(response.getEntity());
response.close();
} catch (IOException e) {
logger.error("释放链接错误: {}", e.getMessage());
}
}
}
return msg;
}
/**
* POST 请求
* @param uri uri参数
* @param requestBody 请求体
* @return post请求返回结果
*/
public static String httpPost(URI uri, String requestBody, Header... headers) {
String msg = ERROR_MESSAGE;
// 获取客户端连接对象
CloseableHttpClient httpClient = getHttpClient();
// 创建POST请求对象
CloseableHttpResponse response = null;
try {
logger.info("http post uri:{} http post body:{}", uri, requestBody);
HttpPost httpPost = new HttpPost(uri);
httpPost.setHeader("Content-Type", "application/x-www-form-urlencoded");
if (StringUtil.isNotEmpty(headers)) {
for (Header h : headers) {
httpPost.addHeader(h);
logger.info("request header : {}",h);
}
}
if(StringUtil.isNotBlank(requestBody)) {
byte[] bytes = requestBody.getBytes(StandardCharsets.UTF_8);
httpPost.setEntity(new ByteArrayEntity(bytes));
}
response = httpClient.execute(httpPost);
int statusCode = response.getStatusLine().getStatusCode();
// 获取响应实体
HttpEntity entity = response.getEntity();
// 获取响应信息
msg = EntityUtils.toString(entity, "UTF-8");
if (statusCode != HttpStatus.SC_OK) {
logger.error("Http post content is :{}" , msg);
}
} catch (ClientProtocolException e) {
logger.error("协议错误: {}", e.getMessage());
} catch (ParseException e) {
logger.error("解析错误: {}", e.getMessage());
} catch (IOException e) {
logger.error("IO错误: {}", e.getMessage());
} finally {
if (null != response) {
try {
EntityUtils.consumeQuietly(response.getEntity());
response.close();
} catch (IOException e) {
logger.error("释放链接错误: {}", e.getMessage());
}
}
}
return msg;
}
/**
* 拼装url
* url ,参数map
*/
public static void setUrlWithParams(URIBuilder uriBuilder,String path, Map<String, Object> params) {
try {
uriBuilder.setPath(path);
if (params != null && !params.isEmpty()){
for (Map.Entry<String, Object> kv : params.entrySet()) {
uriBuilder.setParameter(kv.getKey(),kv.getValue().toString());
}
}
} catch (Exception e) {
logger.error("拼接url出错,uri : {}, path : {},参数: {}",uriBuilder.toString(),path,params);
}
}
}

View File

@@ -0,0 +1,182 @@
package com.zdjizhi.utils;
import cn.hutool.crypto.digest.DigestUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.*;
import com.geedgenetworks.utils.IpLookupV2;
import com.geedgenetworks.utils.StringUtil;
import com.google.common.base.Joiner;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.common.pojo.KnowlegeBaseMeta;
import com.zdjizhi.utils.connections.http.HttpClientService;
import org.apache.http.client.utils.URIBuilder;
import java.io.ByteArrayInputStream;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
/**
* @author wangchengcheng
* @version 2023/11/10 15:23
*/
public class IpLookupUtils {
private static final Log logger = LogFactory.get();
private static final String ipBuiltInName = "ip_builtin.mmdb";
private static final String ipUserDefinedName = "ip_user_defined.mmdb";
/**
* ip定位库
*/
private static IpLookupV2 ipLookup;
/**
* 定位库默认分隔符
*/
private static final String LOCATION_SEPARATOR = ".";
/**
* 最大重试次数
*/
private static final int TRY_TIMES = 5;
/**
* http connections
*/
private static final HttpClientService httpClientService;
/**
* 定位库元数据缓存
*/
private static final HashMap<String, KnowlegeBaseMeta> knowledgeMetaCache = new HashMap<>(16);
private static String currentSha256IpUserDefined = "";
private static String currentSha256IpBuiltin = "";
static {
httpClientService = new HttpClientService();
stuffKnowledgeMetaCache();
Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
stuffKnowledgeMetaCache();
}
}, 0, FlowWriteConfig.KNOWLEDGE_EXECUTION_INTERVAL);
}
private static void stuffKnowledgeMetaCache(){
final KnowlegeBaseMeta ipBuiltinknowlegeBaseMeta = getKnowlegeBaseMeta(FlowWriteConfig.IP_BUILTIN_KD_ID);
if (!currentSha256IpBuiltin.equals(ipBuiltinknowlegeBaseMeta.getSha256())) {
String fileName = Joiner.on(LOCATION_SEPARATOR).useForNull("").join(ipBuiltinknowlegeBaseMeta.getName(), ipBuiltinknowlegeBaseMeta.getFormat());
knowledgeMetaCache.put(fileName, ipBuiltinknowlegeBaseMeta);
}
final KnowlegeBaseMeta ipUserDefinedknowlegeBaseMeta = getKnowlegeBaseMeta(FlowWriteConfig.IP_USER_DEFINED_KD_ID);
if (!currentSha256IpUserDefined.equals(ipUserDefinedknowlegeBaseMeta.getSha256())) {
String fileName = Joiner.on(LOCATION_SEPARATOR).useForNull("").join(ipUserDefinedknowlegeBaseMeta.getName(), ipUserDefinedknowlegeBaseMeta.getFormat());
knowledgeMetaCache.put(fileName, ipUserDefinedknowlegeBaseMeta);
}
if (!currentSha256IpUserDefined.equals(ipUserDefinedknowlegeBaseMeta.getSha256()) || !currentSha256IpBuiltin.equals(ipBuiltinknowlegeBaseMeta.getSha256())) {
currentSha256IpBuiltin = ipBuiltinknowlegeBaseMeta.getSha256();
currentSha256IpUserDefined = ipUserDefinedknowlegeBaseMeta.getSha256();
reloadIpLookup();
}
}
/**
* 从HDFS下载文件更新IpLookup
*/
private static void reloadIpLookup() {
IpLookupV2.Builder builder = new IpLookupV2.Builder(false);
for (String fileName : knowledgeMetaCache.keySet()) {
int retryNum = 0;
KnowlegeBaseMeta knowlegeBaseMeta = knowledgeMetaCache.get(fileName);
String metaSha256 = knowlegeBaseMeta.getSha256();
while (retryNum < TRY_TIMES) {
System.out.println("download file " + fileName + ",HOS path :" + knowlegeBaseMeta.getPath());
Long startTime = System.currentTimeMillis();
byte[] httpGetByte = httpClientService.httpGetByte(knowlegeBaseMeta.getPath(), FlowWriteConfig.HTTP_SOCKET_TIMEOUT);
if (httpGetByte != null && httpGetByte.length > 0) {
String downloadFileSha256 = DigestUtil.sha256Hex(httpGetByte);
if (metaSha256.equals(downloadFileSha256)) {
ByteArrayInputStream inputStream = new ByteArrayInputStream(httpGetByte);
switch (fileName) {
case ipBuiltInName:
builder.loadDataFile(inputStream);
break;
case ipUserDefinedName:
builder.loadDataFilePrivate(inputStream);
break;
default:
}
System.out.println("update " + fileName + " finished, speed :" + (System.currentTimeMillis() - startTime) + "ms");
retryNum = TRY_TIMES;
} else {
logger.error("通过HOS下载{}的sha256为:{} ,网关内记录为:{} ,sha256不相等 开始第{}次重试下载文件", fileName, downloadFileSha256, metaSha256, retryNum);
retryNum++;
}
} else {
logger.error("通过HOS下载{}的流为空 ,开始第{}次重试下载文件", fileName, retryNum);
retryNum++;
}
}
}
ipLookup = builder.build();
}
/**
* 根据配置组合生成知识库元数据过滤参数
*
* @return 过滤参数
*/
private static String getFilterParameter() {
String expr = "[?(@.version=='latest')][?(@.name in ('ip_builtin','ip_user_defined'))]";
return expr;
}
public static String getCountryLookup(String ip) {
return ipLookup.countryLookup(ip);
}
private static KnowlegeBaseMeta getKnowlegeBaseMeta(String kd_id) {
KnowlegeBaseMeta knowlegeBaseMeta = null;
String knowledgeInfo = null;
try {
URIBuilder uriBuilder = new URIBuilder(FlowWriteConfig.KNOWLEDGE_BASE_URL);
HashMap<String, Object> parms = new HashMap<>();
parms.put("kb_id", kd_id);
HttpClientUtils.setUrlWithParams(uriBuilder, FlowWriteConfig.KNOWLEDGE_BASE_PATH, parms);
knowledgeInfo = HttpClientUtils.httpGet(uriBuilder.build());
if (knowledgeInfo.contains("200")) {
final Map<String, Object> jsonObject = JSONObject.parseObject(knowledgeInfo, Map.class);
JSONPath jsonPath = JSONPath.of(getFilterParameter());
String extract = jsonPath.extract(JSONReader.of(jsonObject.get("data").toString())).toString();
if (StringUtil.isNotBlank(extract)) {
JSONArray jsonArray = JSON.parseArray(extract);
if (jsonArray.size() > 0) {
for (int i = 0; i < jsonArray.size(); i++) {
knowlegeBaseMeta = JSONObject.parseObject(jsonArray.getString(i), KnowlegeBaseMeta.class);
}
}
}
} else {
logger.error("获取knowledge_base失败,请求回执为" + knowledgeInfo);
}
} catch (URISyntaxException e) {
logger.error("构造URI异常", e);
} catch (Exception e) {
logger.error("获取knowledge_base失败", e);
}
return knowlegeBaseMeta;
}
}

View File

@@ -1,23 +0,0 @@
package com.zdjizhi.utils;
import com.zdjizhi.common.CommonConfig;
public class IpUtils {
/**
* IP定位库工具类
*/
public static IpLookup ipLookup = new IpLookup.Builder(false)
// .loadDataFileV4(CommonConfig.IP_MMDB_PATH + "ip_v4.mmdb")
// .loadDataFileV6(CommonConfig.IP_MMDB_PATH + "ip_v6.mmdb")
.loadDataFilePrivateV4(CommonConfig.IP_MMDB_PATH + "ip_private_v4.mmdb")
// .loadDataFilePrivateV6(CommonConfig.IP_MMDB_PATH + "ip_private_v6.mmdb")
.build();
public static void main(String[] args) {
System.out.println(ipLookup.countryLookup("61.128.159.186"));
}
}

View File

@@ -1,26 +1,35 @@
package com.zdjizhi.utils;
import com.zdjizhi.common.CommonConfig;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import java.util.Optional;
import java.util.Properties;
public class KafkaUtils {
private static Properties getKafkaSinkProperty(){
Properties propertiesproducer = new Properties();
propertiesproducer.setProperty("bootstrap.servers", CommonConfig.KAFKA_OUTPUT_BOOTSTRAP_SERVERS);
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", FlowWriteConfig.KAFKA_OUTPUT_BOOTSTRAP_SERVERS);
if (FlowWriteConfig.SASL_JAAS_CONFIG_FLAG == 1){
properties.put("security.protocol", "SASL_PLAINTEXT");
properties.put("sasl.mechanism", "PLAIN");
properties.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username=\""+ FlowWriteConfig.SASL_JAAS_CONFIG_USER+"\" password=\""+ FlowWriteConfig.SASL_JAAS_CONFIG_PASSWORD+"\";");
}
return propertiesproducer;
return properties;
}
public static FlinkKafkaProducer<String> getKafkaSink(String topic){
return new FlinkKafkaProducer<String>(
FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<>(
topic,
new SimpleStringSchema(),
getKafkaSinkProperty()
getKafkaSinkProperty(),
Optional.empty()
);
kafkaProducer.setLogFailuresOnly(true);
return kafkaProducer;
}
}

View File

@@ -1,11 +1,12 @@
package com.zdjizhi.utils;
import com.zdjizhi.common.CommonConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
public class SnowflakeId {
private static final Logger logger = LoggerFactory.getLogger(SnowflakeId.class);
// private static final Logger logger = LoggerFactory.getLogger(SnowflakeId.class);
private static final Log logger = LogFactory.get();
/**
* 共64位 第一位为符号位 默认0
@@ -98,7 +99,7 @@ public class SnowflakeId {
private static ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
static {
idWorker = new SnowflakeId(CommonConfig.HBASE_ZOOKEEPER_QUORUM, CommonConfig.DATA_CENTER_ID_NUM);
idWorker = new SnowflakeId(FlowWriteConfig.HBASE_ZOOKEEPER_QUORUM, FlowWriteConfig.DATA_CENTER_ID_NUM);
}
//==============================Constructors=====================================
@@ -107,7 +108,7 @@ public class SnowflakeId {
* 构造函数
*/
private SnowflakeId(String zookeeperIp, long dataCenterIdNum) {
DistributedLock lock = new DistributedLock(CommonConfig.HBASE_ZOOKEEPER_QUORUM, "disLocks1");
DistributedLock lock = new DistributedLock(FlowWriteConfig.HBASE_ZOOKEEPER_QUORUM, "disLocks1");
try {
lock.lock();
int tmpWorkerId = zookeeperUtils.modifyNode("/Snowflake/" + "worker" + dataCenterIdNum, zookeeperIp);

View File

@@ -1,11 +1,11 @@
package com.zdjizhi.utils;
import cn.hutool.core.util.StrUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.List;
@@ -13,7 +13,8 @@ import java.util.concurrent.CountDownLatch;
public class ZookeeperUtils implements Watcher {
private static final Logger logger = LoggerFactory.getLogger(ZookeeperUtils.class);
// private static final Logger logger = LoggerFactory.getLogger(ZookeeperUtils.class);
private static final Log logger = LogFactory.get();
private ZooKeeper zookeeper;

View File

@@ -0,0 +1,261 @@
package com.zdjizhi.utils.connections.http;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.geedgenetworks.utils.StringUtil;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.exception.FlowWriteException;
import org.apache.commons.io.IOUtils;
import org.apache.http.*;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpRequestRetryHandler;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.config.Registry;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.conn.ConnectionKeepAliveStrategy;
import org.apache.http.conn.socket.ConnectionSocketFactory;
import org.apache.http.conn.socket.PlainConnectionSocketFactory;
import org.apache.http.conn.ssl.NoopHostnameVerifier;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicHeaderElementIterator;
import org.apache.http.protocol.HTTP;
import org.apache.http.util.EntityUtils;
import javax.net.ssl.*;
import java.io.IOException;
import java.io.InputStream;
import java.io.InterruptedIOException;
import java.net.SocketTimeoutException;
import java.net.UnknownHostException;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.X509Certificate;
public class HttpClientService {
private static final Log log = LogFactory.get();
/**
* 在调用SSL之前需要重写验证方法取消检测SSL
* 创建ConnectionManager添加Connection配置信息
*
* @return HttpClient 支持https
*/
private PoolingHttpClientConnectionManager getSslClientManager() {
try {
// 在调用SSL之前需要重写验证方法取消检测SSL
X509TrustManager trustManager = new X509TrustManager() {
@Override
public X509Certificate[] getAcceptedIssuers() {
return null;
}
@Override
public void checkClientTrusted(X509Certificate[] xcs, String str) {
}
@Override
public void checkServerTrusted(X509Certificate[] xcs, String str) {
}
};
SSLContext ctx = SSLContext.getInstance(SSLConnectionSocketFactory.TLS);
ctx.init(null, new TrustManager[]{trustManager}, null);
SSLConnectionSocketFactory socketFactory = new SSLConnectionSocketFactory(ctx, NoopHostnameVerifier.INSTANCE);
Registry<ConnectionSocketFactory> socketFactoryRegistry = RegistryBuilder.<ConnectionSocketFactory>create()
.register("http", PlainConnectionSocketFactory.INSTANCE)
.register("https", socketFactory).build();
// 创建ConnectionManager添加Connection配置信息
PoolingHttpClientConnectionManager connManager = new PoolingHttpClientConnectionManager(socketFactoryRegistry);
// 设置最大连接数
connManager.setMaxTotal(FlowWriteConfig.HTTP_POOL_MAX_CONNECTION);
// 设置每个连接的路由数
connManager.setDefaultMaxPerRoute(FlowWriteConfig.HTTP_POOL_MAX_PER_ROUTE);
return connManager;
} catch (KeyManagementException | NoSuchAlgorithmException e) {
throw new FlowWriteException(e.getMessage());
}
}
/**
* 获取Http客户端连接对象
*
* @param socketTimeOut 响应超时时间
* @return Http客户端连接对象
*/
private CloseableHttpClient getHttpClient(int socketTimeOut) {
// 创建Http请求配置参数
RequestConfig requestConfig = RequestConfig.custom()
// 获取连接超时时间
.setConnectionRequestTimeout(FlowWriteConfig.HTTP_POOL_REQUEST_TIMEOUT)
// 请求超时时间
.setConnectTimeout(FlowWriteConfig.HTTP_POOL_CONNECT_TIMEOUT)
// 响应超时时间
.setSocketTimeout(socketTimeOut)
.build();
/**
* 测出超时重试机制为了防止超时不生效而设置
* 如果直接放回false,不重试
* 这里会根据情况进行判断是否重试
*/
HttpRequestRetryHandler retry = (exception, executionCount, context) -> {
if (executionCount >= 3) {// 如果已经重试了3次就放弃
return false;
}
if (exception instanceof NoHttpResponseException) {// 如果服务器丢掉了连接,那么就重试
return true;
}
if (exception instanceof SSLHandshakeException) {// 不要重试SSL握手异常
return false;
}
if (exception instanceof SocketTimeoutException) {
if (exception.getMessage().contains("Read timed out")) {
return false;
}
}
if (exception instanceof UnknownHostException) {// 目标服务器不可达
return false;
}
if (exception instanceof ConnectTimeoutException) {// 连接被拒绝
return false;
}
if (exception instanceof SSLException) {// ssl握手异常
return false;
}
if (exception instanceof InterruptedIOException) {// 超时
return true;
}
HttpClientContext clientContext = HttpClientContext.adapt(context);
HttpRequest request = clientContext.getRequest();
// 如果请求是幂等的,就再次尝试
if (!(request instanceof HttpEntityEnclosingRequest)) {
return true;
}
return false;
};
ConnectionKeepAliveStrategy myStrategy = (response, context) -> {
HeaderElementIterator it = new BasicHeaderElementIterator
(response.headerIterator(HTTP.CONN_KEEP_ALIVE));
while (it.hasNext()) {
HeaderElement he = it.nextElement();
String param = he.getName();
String value = he.getValue();
if (value != null && param.equalsIgnoreCase("timeout")) {
return Long.parseLong(value) * 1000;
}
}
return 60 * 1000;//如果没有约定则默认定义时长为60s
};
// 创建httpClient
return HttpClients.custom()
// 把请求相关的超时信息设置到连接客户端
.setDefaultRequestConfig(requestConfig)
// 把请求重试设置到连接客户端
.setRetryHandler(retry)
.setKeepAliveStrategy(myStrategy)
// 配置连接池管理对象
.setConnectionManager(getSslClientManager())
.build();
}
public InputStream httpGetInputStream(String url, int socketTimeout, Header... headers) {
InputStream result = null;
// 获取客户端连接对象
CloseableHttpClient httpClient = getHttpClient(socketTimeout);
// 创建GET请求对象
HttpGet httpGet = new HttpGet(url);
if (StringUtil.isNotEmpty(headers)) {
for (Header h : headers) {
httpGet.addHeader(h);
}
}
CloseableHttpResponse response = null;
try {
// 执行请求
response = httpClient.execute(httpGet);
// 获取响应实体
result = IOUtils.toBufferedInputStream(response.getEntity().getContent());
// 获取响应信息
EntityUtils.consume(response.getEntity());
} catch (ClientProtocolException e) {
log.error("current file: {},Protocol error:{}", url, e.getMessage());
} catch (ParseException e) {
log.error("current file: {}, Parser error:{}", url, e.getMessage());
} catch (IOException e) {
log.error("current file: {},IO error:{}", url, e.getMessage());
} finally {
if (null != response) {
try {
EntityUtils.consume(response.getEntity());
response.close();
} catch (IOException e) {
log.error("Release Connection error:{}", e.getMessage());
}
}
return result;
}
}
public byte[] httpGetByte(String url, int socketTimeout, Header... headers) {
byte[] result = null;
// 获取客户端连接对象
CloseableHttpClient httpClient = getHttpClient(socketTimeout);
// 创建GET请求对象
HttpGet httpGet = new HttpGet(url);
if (StringUtil.isNotEmpty(headers)) {
for (Header h : headers) {
httpGet.addHeader(h);
}
}
CloseableHttpResponse response = null;
try {
// 执行请求
response = httpClient.execute(httpGet);
// 获取响应实体
result = IOUtils.toByteArray(response.getEntity().getContent());
// 获取响应信息
EntityUtils.consume(response.getEntity());
} catch (ClientProtocolException e) {
log.error("current file: {},Protocol error:{}", url, e.getMessage());
} catch (ParseException e) {
log.error("current file: {}, Parser error:{}", url, e.getMessage());
} catch (IOException e) {
log.error("current file: {},IO error:{}", url, e.getMessage());
} finally {
if (null != response) {
try {
EntityUtils.consume(response.getEntity());
response.close();
} catch (IOException e) {
log.error("Release Connection error:{}", e.getMessage());
}
}
return result;
}
}
}

View File

@@ -0,0 +1,13 @@
package com.zdjizhi.utils.exception;
public class FlowWriteException extends RuntimeException {
public FlowWriteException() {
}
public FlowWriteException(String message) {
super(message);
}
}

View File

@@ -1,41 +1,149 @@
#flink运行环境并行度其优先级低于算子并行度如果未设置算子并行度则使用该数值
stream.execution.environment.parallelism=1
stream.execution.job.name=dos-detection-job
kafka.input.parallelism=1
kafka.input.topic.name=DOS-SKETCH-LOG
kafka.input.bootstrap.servers=192.168.44.12:9092
kafka.input.scan.startup.mode=latest-offset
kafka.input.group.id=2108041426
#kafka.input.group.id=test
#flink任务名一般不变
stream.execution.job.name=DOS-DETECTION-APPLICATION
kafka.output.metric.parallelism=1
kafka.output.metric.topic.name=TRAFFIC-TOP-DESTINATION-IP-METRICS-LOG
kafka.output.event.parallelism=1
kafka.output.event.topic.name=DOS-EVENT-LOG
kafka.output.bootstrap.servers=192.168.44.12:9092
#输入kafka并行度大小
kafka.input.parallelism=3
hbase.input.parallelism=1
#输入kafka topic名
kafka.input.topic.name=DOS-SKETCH-RECORD
#输入kafka地址
#kafka.input.bootstrap.servers=192.168.44.12:9094
kafka.input.bootstrap.servers=192.168.44.11:9094,192.168.44.14:9094,192.168.44.15:9094
#读取kafka group id
kafka.input.group.id=dos-detection-job-221125-23132
#kafka.input.group.id=dos-detection-job-210813-1
#发送kafka metrics并行度大小
kafka.output.metric.parallelism=3
#发送kafka metrics topic名
#kafka.output.metric.topic.name=TRAFFIC-TOP-DESTINATION-IP-METRICS
kafka.output.metric.topic.name=test
#发送kafka event并行度大小
kafka.output.event.parallelism=3
#发送kafka event topic名
#kafka.output.event.topic.name=DOS-EVENT
kafka.output.event.topic.name=dos-test
#kafka输出地址
kafka.output.bootstrap.servers=192.168.44.12:9094
#kafka.output.bootstrap.servers=192.168.44.11:9092,192.168.44.14:9092,192.168.44.15:9092
#zookeeper地址
hbase.zookeeper.quorum=192.168.44.12:2181
#hbase.zookeeper.quorum=192.168.40.151:2181,192.168.40.152:2181,192.168.40.203:2181
#hbase.zookeeper.quorum=192.168.44.11:2181,192.168.44.14:2181,192.168.44.15:2181
#hbase客户端处理时间
hbase.client.operation.timeout=30000
hbase.client.scanner.timeout.period=30000
hbase.baseline.table.name=ddos_traffic_baselines
##hbase baseline表名
hbase.baseline.table.name=dos:ddos_traffic_baselines
#读取baseline限制
hbase.baseline.total.num=1000000
flink.first.agg.parallelism=1
flink.second.agg.parallelism=1
flink.watermark.max.orderness=1
flink.window.max.time=600
#baseline ttl单位
hbase.baseline.ttl=10
#设置聚合并行度2个key
flink.first.agg.parallelism=1
#设置结果判定并行度
flink.detection.map.parallelism=1
#watermark延迟
flink.watermark.max.orderness=300
#计算窗口大小默认600s
flink.window.max.time=60
#dos event结果中distinct source IP限制
source.ip.list.limit=10000
#基于目的IP的分区数默认为10000一般不变
destination.ip.partition.num=10000
data.center.id.num=15
ip.mmdb.path=D:\\data\\dat_test\\
#ip.mmdb.path=/home/bigdata/wlh/topology/dos-detection/dat/
#http请求相关参数
#最大连接数
http.pool.max.connection=400
baseline.sessions.minor.threshold=0.1
baseline.sessions.warning.threshold=0.5
baseline.sessions.major.threshold=1
baseline.sessions.severe.threshold=3
#单路由最大连接数
http.pool.max.per.route=80
#向服务端请求超时时间设置(单位:毫秒)
http.pool.request.timeout=60000
#向服务端连接超时时间设置(单位:毫秒)
http.pool.connect.timeout=60000
#服务端响应超时时间设置(单位:毫秒)
http.pool.response.timeout=60000
#获取静态阈值周期,默认十分钟
static.threshold.schedule.minutes=10
#获取baseline周期默认7天
baseline.threshold.schedule.days=1
#kafka用户认证配置参数
sasl.jaas.config.user=admin
#sasl.jaas.config.password=galaxy2019
sasl.jaas.config.password=6MleDyA3Z73HSaXiKsDJ2k7Ys8YWLhEJ
#是否开启kafka用户认证配置10
sasl.jaas.config.flag=1
http.socket.timeout=90000
############################## Knowledge Base 配置 ######################################
knowledge.execution.interval=30000
knowledge.base.uri=http://192.168.44.12:9999
knowledge.base.path=/v1/knowledge_base
ip.user.defined.kd.id=004390bc-3135-4a6f-a492-3662ecb9e289
ip.builtin.kd.id=64af7077-eb9b-4b8f-80cf-2ceebc89bea9
############################## Bifang Server 配置 ######################################
bifang.server.token=aa2bdec5518ad131f71944b13ce5c298&1&
#bifang服务访问地址
bifang.server.uri=http://192.168.44.72
#bifang.server.uri=http://192.168.44.3:80
#加密密码路径信息
bifang.server.encryptpwd.path=/v1/user/encryptpwd
#登录bifang服务路径信息
bifang.server.login.path=/v1/user/login
#获取vaysId路径信息
bifang.server.policy.vaysid.path=/v1/admin/vsys
#获取静态阈值路径信息
bifang.server.policy.threshold.path=/v1/policy/profile/dos_detection
############################## 基线 配置 ######################################
static.sensitivity.threshold=1
#基线敏感阈值
baseline.sensitivity.threshold=0.2
#基于baseline判定dos攻击的上下限
baseline.sessions.minor.threshold=0.2
baseline.sessions.warning.threshold=1
baseline.sessions.major.threshold=2.5
baseline.sessions.severe.threshold=5
baseline.sessions.critical.threshold=8

View File

@@ -0,0 +1,23 @@
#Log4j
log4j.rootLogger=info,console,file
# 控制台日志设置
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.Threshold=info
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
# 文件日志设置
log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
log4j.appender.file.Threshold=info
log4j.appender.file.encoding=UTF-8
log4j.appender.file.Append=true
#路径请用相对路径,做好相关测试输出到应用目下
log4j.appender.file.file=${nis.root}/log/flink-dos-detection.log
log4j.appender.file.DatePattern='.'yyyy-MM-dd
log4j.appender.file.layout=org.apache.log4j.PatternLayout
#log4j.appender.file.layout.ConversionPattern=%d{HH:mm:ss} %X{ip} [%t] %5p %c{1} %m%n
log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] %X{ip} [Thread\:%t] %l %x - %m%n
#MyBatis 配置com.nis.web.dao是mybatis接口所在包
log4j.logger.com.nis.web.dao=debug
#bonecp数据源配置
log4j.category.com.jolbox=debug,console

View File

@@ -0,0 +1,50 @@
package com.zdjizhi.Http;
import com.alibaba.fastjson2.JSON;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.HttpClientUtils;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.HashMap;
public class HttpTest {
public static void main(String[] args) {
String token = HttpClientUtils.ERROR_MESSAGE;
try {
String urlString = FlowWriteConfig.BIFANG_SERVER_URI+"/v1/user/encryptpwd";
final HashMap<String, Object> parmsMap = new HashMap<>();
parmsMap.put("username","admin");
final String jsonInputString = JSON.toJSONString(parmsMap);
System.out.println("URL:"+urlString);
System.out.println("parmsString:"+jsonInputString);
final URL url = new URL(urlString);
final HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.setRequestMethod("POST");
connection.setRequestProperty("Content-Type", "application/json");
connection.setRequestProperty("Accept", "application/json");
connection.setDoOutput(true);
OutputStream os = connection.getOutputStream();
os.write(jsonInputString.getBytes());
os.flush();
os.close();
int responseCode = connection.getResponseCode();
System.out.println("Response Code: " + responseCode);
} catch (Exception e) {
System.out.println("失败");
}
}
}

View File

@@ -0,0 +1,8 @@
--DosSketchLog
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277141, sketch_duration=59, attack_type='DNS Flood', source_ip='23.91.128.115', destination_ip='102.219.30.33', sketch_sessions=945, sketch_packets=945, sketch_bytes=446370, vsys_id=23}
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277205, sketch_duration=86, attack_type='DNS Flood', source_ip='172.217.160.68', destination_ip='10.113.83.88', sketch_sessions=730, sketch_packets=730, sketch_bytes=344575, vsys_id=1}
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277244, sketch_duration=47, attack_type='DNS Flood', source_ip='45.135.144.112', destination_ip='42.62.192.132', sketch_sessions=0, sketch_packets=0, sketch_bytes=47, vsys_id=1}
--DosDetectionThreshold
{profileId='6091', attackType='DNS Flood', serverIpList=[113.113.83.213, 42.62.192.132/28, 10.113.83.1/25, 102.219.30.33/29], serverIpAddr='null', packetsPerSec=1, bitsPerSec=1, sessionsPerSec=1, isValid=1, vsysId=1, superiorIds=[4, 12, 5, 27]}
{profileId='5679', attackType='DNS Flood', serverIpList=[102.219.30.33], serverIpAddr='null', packetsPerSec=500, bitsPerSec=1000000, sessionsPerSec=100000, isValid=1, vsysId=23, superiorIds=[4, 5]}

View File

@@ -0,0 +1,6 @@
--DosSketchLog
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277232, sketch_duration=59, attack_type='ICMP Flood', source_ip='45.170.244.25', destination_ip='24.152.57.56', sketch_sessions=499, sketch_packets=499, sketch_bytes=111970, vsys_id=1}
--DosDetectionThreshold
{profileId='6093', attackType='ICMP Flood', serverIpList=[31.131.80.88/29, 24.152.57.56/29, 47.93.59.1/25], serverIpAddr='null', packetsPerSec=210, bitsPerSec=0, sessionsPerSec=0, isValid=1, vsysId=1, superiorIds=[4, 12, 5, 27]}

View File

@@ -0,0 +1,7 @@
--DosSketchLog
{common_sled_ip='null', common_data_center='null', sketch_start_time=1685003938, sketch_duration=63714, attack_type='TCP SYN Flood', source_ip='5.32.144.55', destination_ip='45.188.134.11', sketch_sessions=0, sketch_packets=0, sketch_bytes=4195, vsys_id=1}
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277234, sketch_duration=57, attack_type='TCP SYN Flood', source_ip='18.65.148.128', destination_ip='23.200.74.224', sketch_sessions=54, sketch_packets=54, sketch_bytes=73427, vsys_id=1}
--DosDetectionThreshold
{profileId='6095', attackType='TCP SYN Flood', serverIpList=[23.200.74.224, 45.188.134.11/29, 41.183.0.15/29, 41.183.0.16/30], serverIpAddr='null', packetsPerSec=1, bitsPerSec=1, sessionsPerSec=1, isValid=1, vsysId=1, superiorIds=[5, 4, 12, 27]}

View File

@@ -0,0 +1,8 @@
--DosSketchLog
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277291, sketch_duration=0, attack_type='UDP Flood', source_ip='121.14.89.209', destination_ip='192.168.50.11', sketch_sessions=0, sketch_packets=0, sketch_bytes=0, vsys_id=1}
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277233, sketch_duration=58, attack_type='UDP Flood', source_ip='192.168.50.56,192.168.50.34,192.168.50.11,192.168.50.33,192.168.50.55,192.168.50.58,192.168.50.36,192.168.50.14,192.168.50.35,192.168.50.13,192.168.50.57,192.168.50.30,192.168.50.51,192.168.50.54,192.168.50.10,192.168.50.32,192.168.50.53,192.168.50.31,192.168.50.16,192.168.50.38,192.168.50.15,192.168.50.37,192.168.50.18,192.168.50.17,192.168.50.50,192.168.50.45,192.168.50.23,192.168.50.22,192.168.50.44,192.168.50.25,192.168.50.47,192.168.50.46,192.168.50.24,192.168.50.63,192.168.50.41,192.168.50.40,192.168.50.62,192.168.50.43,192.168.50.21,192.168.50.20,192.168.50.42,192.168.50.27,192.168.50.26,192.168.50.48,192.168.50.28,192.168.50.61,192.168.50.60', destination_ip='121.14.89.209', sketch_sessions=297, sketch_packets=297, sketch_bytes=371404, vsys_id=1}
--DosDetectionThreshold
{profileId='5333', attackType='UDP Flood', serverIpList=[192.168.50.11, 192.168.50.12], serverIpAddr='null', packetsPerSec=50, bitsPerSec=50, sessionsPerSec=50, isValid=1, vsysId=1, superiorIds=[4, 12, 5, 27]}

View File

@@ -19,12 +19,12 @@ public class HbaseTest {
public static void main(String[] args) throws IOException {
org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
config.set("hbase.zookeeper.quorum", CommonConfig.HBASE_ZOOKEEPER_QUORUM);
config.set("hbase.zookeeper.quorum", FlowWriteConfig.HBASE_ZOOKEEPER_QUORUM);
config.set("hbase.client.retries.number", "3");
config.set("hbase.bulkload.retries.number", "3");
config.set("zookeeper.recovery.retry", "3");
config.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, CommonConfig.HBASE_CLIENT_OPERATION_TIMEOUT);
config.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, CommonConfig.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
config.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, FlowWriteConfig.HBASE_CLIENT_OPERATION_TIMEOUT);
config.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, FlowWriteConfig.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
TableName tableName = TableName.valueOf("dos_test");
Connection conn = ConnectionFactory.createConnection(config);

View File

@@ -0,0 +1,7 @@
package com.zdjizhi.common;
public class HttpTest {
public static void main(String[] args) throws Exception {
}
}

View File

@@ -0,0 +1,106 @@
package com.zdjizhi.common;
import inet.ipaddr.Address;
import inet.ipaddr.AddressStringException;
import inet.ipaddr.IPAddress;
import inet.ipaddr.IPAddressString;
import inet.ipaddr.format.util.AddressTrieMap;
import inet.ipaddr.format.util.AssociativeAddressTrie;
import inet.ipaddr.ipv4.IPv4Address;
import inet.ipaddr.ipv4.IPv4AddressAssociativeTrie;
import org.apache.flink.shaded.guava18.com.google.common.collect.Range;
import org.apache.flink.shaded.guava18.com.google.common.collect.TreeRangeMap;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
public class IpTest {
public static void main(String[] args) throws Exception {
IPv4AddressAssociativeTrie<Integer> trie = new IPv4AddressAssociativeTrie<>();
IPAddress str1 = new IPAddressString("1.2.3.4").getAddress();
IPAddress str2 = new IPAddressString("10.0.0.0/15").getAddress();
IPAddress str3 = new IPAddressString("25.4.2.0/23").getAddress();
IPAddress str4 = new IPAddressString("192.168.8.0/21").getAddress();
IPAddress str5 = new IPAddressString("240.0.0.0/4").getAddress();
IPAddress str6 = new IPAddressString("fc00::0/64").getAddress();
IPAddress str7 = new IPAddressString("fc00::10:1").getAddress();
TreeRangeMap<IPAddress, Object> rangeMap = TreeRangeMap.create();
rangeMap.put(Range.closed(str1.getLower(),str1.getUpper()),1);
rangeMap.put(Range.closed(str2.getLower(),str2.getUpper()),2);
rangeMap.put(Range.closed(str3.getLower(),str3.getUpper()),3);
rangeMap.put(Range.closed(str4.getLower(),str4.getUpper()),4);
rangeMap.put(Range.closed(str5.getLower(),str5.getUpper()),5);
rangeMap.put(Range.closed(str6.getLower(),str6.getUpper()),6);
rangeMap.put(Range.closed(str7.getLower(),str7.getUpper()),7);
IPAddress pv4 = new IPAddressString("255.255.14.255").getAddress();
IPAddress pv42 = new IPAddressString("1.2.3.4").getAddress();
IPAddress pv43 = new IPAddressString("fc00::").getAddress();
IPAddress pv44 = new IPAddressString("fc00::10:1").getAddress();
IPAddress pv45 = new IPAddressString("192.168.42.1").getAddress();
IPAddress pv46 = new IPAddressString("192.168.42.1/32").getAddress();
IPAddress pv47 = new IPAddressString("12.56.4.0").getAddress();
IPAddress mask = pv45.getNetwork().getNetworkMask(24, false);
System.out.println(pv45.isMultiple());
System.out.println(pv46.isMultiple());
System.out.println(pv46.isPrefixed());
System.out.println(pv47.isPrefixed());
System.out.println(pv45+"---"+pv45.toMaxHost().withoutPrefixLength()+"---"+pv45.adjustPrefixLength(pv45.getBitCount()));
System.out.println(pv45+"---mask:"+pv45.mask(mask).toString());
System.out.println(pv45.adjustPrefixLength(pv45.getBitCount())+"---"+pv45.toMaxHost().withoutPrefixLength());
/*
System.out.println(str5.getUpper()+"---"+str5.getLower());
System.out.println(rangeMap.span().contains(pv4));
System.out.println(rangeMap.get(pv4));
System.out.println(rangeMap.get(pv42));
System.out.println(rangeMap.get(pv43));
System.out.println(rangeMap.get(pv44));
*/
/*
System.out.println(str5.toSequentialRange());
// System.out.println(str2.contains(new IPAddressString("10.0.0.2")));
// System.out.println(str5.toAddress().toIPv4().toSequentialRange());
trie.put(str1,1);
trie.put(str2,2);
trie.put(str3,3);
trie.put(str4,4);
trie.put(str5,5);
AddressTrieMap<IPv4Address, Integer> trieMap = new AddressTrieMap<>(trie);
trieMap.forEach((k,v) -> {
System.out.println(k.toString() + "--" + v);
});
System.out.println("-----------------");
trie.forEach((k) -> System.out.println(k.toString()));
System.out.println(str5.contains(pv4));
System.out.println(trie.contains(pv4));
System.out.println(trieMap.get(pv4));
System.out.println(trieMap.containsKey(pv4));
// System.out.println(trieMap.getRange());
// IPAddress str3 = new IPAddressString("fc00::10:1").getAddress();
// IPAddress str4 = new IPAddressString("fc00::10:2/64").getAddress();
// System.out.println(Arrays.toString(str1.mergeToPrefixBlocks(str2,str3,str4)));
*/
}
}

View File

@@ -0,0 +1,101 @@
package com.zdjizhi.common;
import com.alibaba.nacos.api.NacosFactory;
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.api.config.listener.Listener;
import com.alibaba.nacos.api.exception.NacosException;
import org.junit.Test;
import java.io.IOException;
import java.io.StringReader;
import java.util.Properties;
import java.util.concurrent.Executor;
/**
* @author qidaijie
* @Package com.zdjizhi
* @Description:
* @date 2022/3/1016:58
*/
public class NacosTest {
/**
* <dependency>
* <groupId>com.alibaba.nacos</groupId>
* <artifactId>nacos-client</artifactId>
* <version>1.2.0</version>
* </dependency>
*/
private static Properties properties = new Properties();
/**
* config data id = config name
*/
private static final String DATA_ID = "dos_baseline.properties";
/**
* config group
*/
private static final String GROUP = "Galaxy";
private void getProperties() {
properties.setProperty(PropertyKeyConst.SERVER_ADDR, "192.168.44.12:8848");
properties.setProperty(PropertyKeyConst.NAMESPACE, "test");
properties.setProperty(PropertyKeyConst.USERNAME, "nacos");
properties.setProperty(PropertyKeyConst.PASSWORD, "nacos");
}
@Test
public void GetConfigurationTest() {
try {
getProperties();
ConfigService configService = NacosFactory.createConfigService(properties);
String content = configService.getConfig(DATA_ID, GROUP, 5000);
Properties nacosConfigMap = new Properties();
nacosConfigMap.load(new StringReader(content));
System.out.println(FlowWriteConfig.STATIC_SENSITIVITY_THRESHOLD);
} catch (Exception e) {
e.printStackTrace();
}
}
@Test
public void ListenerConfigurationTest() {
getProperties();
try {
//first get config
ConfigService configService = NacosFactory.createConfigService(properties);
String config = configService.getConfig(DATA_ID, GROUP, 5000);
// System.out.println(config);
//start listenner
configService.addListener(DATA_ID, GROUP, new Listener() {
@Override
public Executor getExecutor() {
return null;
}
@Override
public void receiveConfigInfo(String configMsg) {
System.out.println(configMsg);
}
});
} catch (Exception e) {
e.printStackTrace();
}
//keep running,change nacos config,print new config
/*
while (true) {
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
*/
}
}

View File

@@ -1,17 +0,0 @@
package com.zdjizhi.common;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.types.Row;
public class UdtfTest extends TableFunction<Row> {
public void eval(Row[] rows) {
for (Row row : rows) {
collect(row);
}
}
public static void main(String[] args) {
}
}

View File

@@ -0,0 +1,237 @@
package com.zdjizhi.etl;
import com.geedgenetworks.utils.StringUtil;
import com.zdjizhi.common.DosDetectionThreshold;
import com.zdjizhi.common.DosEventLog;
import com.zdjizhi.common.DosSketchLog;
import com.zdjizhi.utils.IpLookupUtils;
import com.zdjizhi.utils.SnowflakeId;
import org.apache.commons.lang3.StringUtils;
import org.junit.Test;
import java.math.BigDecimal;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.HashSet;
public class DosDetectionTest {
private final static NumberFormat PERCENT_INSTANCE = NumberFormat.getPercentInstance();
private final static int BASELINE_SIZE = 144;
private final static int STATIC_CONDITION_TYPE = 1;
private final static int BASELINE_CONDITION_TYPE = 2;
private final static int SENSITIVITY_CONDITION_TYPE = 3;
private final static String SESSIONS_TAG = "sessions";
private final static String PACKETS_TAG = "packets";
private final static String BITS_TAG = "bits";
@Test
public void dosDetectionTest(){
DosDetectionThreshold dosDetectionThreshold = new DosDetectionThreshold();
ArrayList<String> serverIpList = new ArrayList<>();
serverIpList.add("192.168.50.11");
serverIpList.add("192.168.50.1/24");
serverIpList.add("FC::12:0:0/54");
serverIpList.add("FC::12:0:0");
dosDetectionThreshold.setProfile_id(4437);
dosDetectionThreshold.setAttack_type("DNS Flood");
dosDetectionThreshold.setServer_ip_list(serverIpList);
dosDetectionThreshold.setSessions_per_sec(1);
dosDetectionThreshold.setPackets_per_sec(1);
dosDetectionThreshold.setBits_per_sec(100000);
dosDetectionThreshold.setIs_valid(1);
dosDetectionThreshold.setSuperior_ids(new Integer[]{5,4,12,27});
DosSketchLog dosSketchLog = new DosSketchLog ();
dosSketchLog.setSketch_sessions(68);
dosSketchLog.setSketch_packets(68);
dosSketchLog.setSketch_bytes(285820);//185.82
dosSketchLog.setVsys_id(1);
dosSketchLog.setAttack_type("ICMP Flood");
dosSketchLog.setSource_ip("45.170.244.25");
dosSketchLog.setDestination_ip("24.152.57.56");
//静态阈值获取
long sessionBase = dosDetectionThreshold.getSessions_per_sec();
long pktBase=dosDetectionThreshold.getPackets_per_sec();
long bitBase=dosDetectionThreshold.getBits_per_sec();
//基于速率进行计算
long diffSession = dosSketchLog.getSketch_sessions() - sessionBase;
long diffPkt = dosSketchLog.getSketch_packets() - pktBase;
long diffByte = dosSketchLog.getSketch_bytes() - bitBase;
Double diffSessionPercent = getDiffPercent(diffSession, sessionBase)*100;
Double diffPktPercent = getDiffPercent(diffPkt, pktBase)*100;
Double diffBitPercent = getDiffPercent(diffByte, bitBase)*100;
long profileId = 0;
DosEventLog result =null;
if (diffSessionPercent >= diffPktPercent && diffSessionPercent >= diffBitPercent){
profileId = dosDetectionThreshold.getProfile_id();
result= getDosEventLog(dosSketchLog, sessionBase, diffSession, profileId, STATIC_CONDITION_TYPE, SESSIONS_TAG);
System.out.println(result);
}else if (diffPktPercent >= diffSessionPercent && diffPktPercent >= diffBitPercent){
profileId = dosDetectionThreshold.getProfile_id();
result = getDosEventLog(dosSketchLog, pktBase, diffPkt,profileId, STATIC_CONDITION_TYPE, PACKETS_TAG);
System.out.println(result);
}else if (diffBitPercent >= diffPktPercent && diffBitPercent >= diffSessionPercent){
profileId = dosDetectionThreshold.getProfile_id();
result = getDosEventLog(dosSketchLog, bitBase, diffByte, profileId, STATIC_CONDITION_TYPE, BITS_TAG);
System.out.println(result);
}
}
private DosEventLog getDosEventLog(DosSketchLog value, long base, long diff, long profileId, int type, String tag) {
DosEventLog result = null;
String destinationIp = value.getDestination_ip();
String attackType = value.getAttack_type();
if (diff > 0 && base != 0) {
double percent = getDiffPercent(diff, base);
Severity severity = judgeSeverity(percent);
Integer staticSensitivityThreshold = 100;
if (severity != Severity.NORMAL) {
if (type == BASELINE_CONDITION_TYPE && percent < 0.2) {
// logger.debug("当前server IP:{},类型:{},基线值{}百分比{}未超过基线敏感阈值,日志详情\n{}", destinationIp, attackType, base, percent, value);
}else if ((type == BASELINE_CONDITION_TYPE || type == SENSITIVITY_CONDITION_TYPE) && value.getSketch_sessions() < staticSensitivityThreshold){
// logger.debug("当前server IP:{},类型:{},基线值{}百分比{}未超过静态敏感阈值,日志详情\n{}",destinationIp, attackType, base, percent, value);
}else {
result = getResult(value, base, profileId, severity, percent+1, type, tag);
if (type == SENSITIVITY_CONDITION_TYPE){
result.setSeverity(Severity.MAJOR.severity);
}
// logger.info("检测到当前server IP {} 存在 {} 异常,超出基线{} {}倍,基于{}:{}检测,日志详情\n {}", destinationIp,attackType,base,percent,type,tag,result);
}
}
// else {
// logger.debug("当前server IP:{} 未出现 {} 异常,日志详情 {}", destinationIp, attackType, value);
// }
}
return result;
}
private DosEventLog getResult(DosSketchLog value, long base, long profileId, Severity severity, double percent, int type, String tag) {
DosEventLog dosEventLog = new DosEventLog();
// dosEventLog.setLog_id(SnowflakeId.generateId());
dosEventLog.setVsys_id(value.getVsys_id());
dosEventLog.setStart_time(value.getSketch_start_time());
dosEventLog.setEnd_time(value.getSketch_start_time() + value.getSketch_duration());
dosEventLog.setProfile_id(profileId);
dosEventLog.setAttack_type(value.getAttack_type());
dosEventLog.setSeverity(severity.severity);
// dosEventLog.setConditions(getConditions(PERCENT_INSTANCE.format(percent), base, value.getSketch_sessions(), type, tag));
dosEventLog.setConditions(getConditions(percent, base, value.getSketch_sessions(), type, tag,dosEventLog));
dosEventLog.setDestination_ip(value.getDestination_ip());
// dosEventLog.setDestination_country(IpUtils.ipLookup.countryLookup(value.getDestination_ip()));
String ipList = value.getSource_ip();
dosEventLog.setSource_ip_list(ipList);
dosEventLog.setSource_country_list(getSourceCountryList(ipList));
dosEventLog.setSession_rate(value.getSketch_sessions());
dosEventLog.setPacket_rate(value.getSketch_packets());
dosEventLog.setBit_rate(value.getSketch_bytes());
return dosEventLog;
}
public String getConditions(double percent, long base, long sessions, int type, String tag,DosEventLog dosEventLog) {
int condition =0;
if ("Minor".equals(dosEventLog.getSeverity())){
condition=50;
}else if ("Warning".equals(dosEventLog.getSeverity())){
condition=100;
}else if ("Major".equals(dosEventLog.getSeverity())){
condition=250;
}else if ("Severe".equals(dosEventLog.getSeverity())){
condition=500;
}else if ("Critical".equals(dosEventLog.getSeverity())){
condition =800;
}
switch (type) {
case STATIC_CONDITION_TYPE:
return "Rate > " +
base + " " +
tag + "/s" + "(>"+condition+"%)";
case BASELINE_CONDITION_TYPE:
return tag + " > " +
PERCENT_INSTANCE.format(percent) + " of baseline";
case SENSITIVITY_CONDITION_TYPE:
return String.valueOf(sessions) + " " +
tag + "/s Unusually high " +
StringUtils.capitalize(tag);
default:
throw new IllegalArgumentException("Illegal Argument type:" + type + ", known types = [1,2,3]");
}
}
private String getSourceCountryList(String sourceIpList) {
if (StringUtil.isNotBlank(sourceIpList)) {
String countryList;
try {
String[] ipArr = sourceIpList.split(",");
HashSet<String> countrySet = new HashSet<>();
for (String ip : ipArr) {
String country = IpLookupUtils.getCountryLookup(ip);
if (StringUtil.isNotBlank(country)){
countrySet.add(country);
}
}
countryList = StringUtils.join(countrySet, ", ");
return countryList;
} catch (Exception e) {
// logger.error("{} source IP lists 获取国家失败", sourceIpList, e);
return StringUtil.EMPTY;
}
} else {
throw new IllegalArgumentException("Illegal Argument sourceIpList = null");
}
}
private Double getDiffPercent(long diff, long base) {
return BigDecimal.valueOf((float) diff / base).setScale(4, BigDecimal.ROUND_HALF_UP).doubleValue();
}
private Severity judgeSeverity(double diffPercent) {
if (diffPercent >= 0.5 && diffPercent < 1) {
return Severity.MINOR;
} else if (diffPercent >= 1 && diffPercent < 2.5) {
return Severity.WARNING;
} else if (diffPercent >= 2.5 && diffPercent < 5) {
return Severity.MAJOR;
} else if (diffPercent >= 5 && diffPercent < 8) {
return Severity.SEVERE;
} else if (diffPercent >= 8) {
return Severity.CRITICAL;
} else {
return Severity.NORMAL;
}
}
private enum Severity {
/**
* 判断严重程度枚举类型
*/
CRITICAL("Critical"),
SEVERE("Severe"),
MAJOR("Major"),
WARNING("Warning"),
MINOR("Minor"),
NORMAL("Normal");
private final String severity;
@Override
public String toString() {
return this.severity;
}
Severity(String severity) {
this.severity = severity;
}
}
}

View File

@@ -0,0 +1,11 @@
package com.zdjizhi.etl;
import org.junit.Test;
public class EtlProcessFunctionTest {
@Test
public void EtlProcessFunctionTest(){
}
}