Compare commits
87 Commits
dos-detect
...
feature/24
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f49b8090a9 | ||
|
|
824d2c058f | ||
|
|
79e23c1000 | ||
|
|
e4985618e1 | ||
|
|
c08b94fec2 | ||
|
|
2d0dcf89e3 | ||
|
|
2d4f255a8d | ||
|
|
dff537f411 | ||
|
|
49f1200787 | ||
|
|
fae16e5804 | ||
|
|
046b7fea80 | ||
|
|
62bffa9731 | ||
|
|
77ec061844 | ||
|
|
a17666abff | ||
|
|
322bb1e4cb | ||
|
|
bdfe5f73db | ||
|
|
91cb1ce5d2 | ||
|
|
52336accbd | ||
|
|
c8a2a6b627 | ||
|
|
24d70f690e | ||
|
|
77e982b22f | ||
|
|
b3a23686a0 | ||
|
|
b9a694ddb9 | ||
|
|
6fb37324ff | ||
|
|
315b638470 | ||
|
|
bd48417eb8 | ||
|
|
72acc976e3 | ||
|
|
6be3ea7f1e | ||
|
|
04ee45f77d | ||
|
|
d8b0a7637b | ||
|
|
b56a2ec31e | ||
|
|
11747d9964 | ||
|
|
ce15a27a1b | ||
|
|
01bbe562c9 | ||
|
|
f07651cf14 | ||
|
|
7c201a8a3f | ||
|
|
78435d54ea | ||
|
|
76c9247bb9 | ||
|
|
488b7c6644 | ||
|
|
0662d265dd | ||
|
|
87fe11dc93 | ||
|
|
9a2a5b3957 | ||
|
|
c58acdcfc9 | ||
|
|
b409150532 | ||
|
|
7e6d5fcfc5 | ||
|
|
859cd379e5 | ||
|
|
47ddef9bca | ||
|
|
0a6f36393c | ||
|
|
84a1e6879a | ||
|
|
ab8f6aba81 | ||
|
|
94e8fb807a | ||
|
|
cead1d4d99 | ||
|
|
2d98c3b6e6 | ||
|
|
3dc29a07be | ||
|
|
1fcdb79739 | ||
|
|
3d974217d9 | ||
|
|
db17064f73 | ||
|
|
065e5abb09 | ||
|
|
75bbdd2962 | ||
|
|
c46a395d9b | ||
|
|
cc3f0cf620 | ||
|
|
0617b1e614 | ||
|
|
0125b031dd | ||
|
|
177e7461cc | ||
|
|
be916531fb | ||
|
|
c692112445 | ||
|
|
b03ab9642d | ||
|
|
c44250bf73 | ||
|
|
77bc6a844e | ||
|
|
e930fa23ed | ||
|
|
8cd4dea19e | ||
|
|
62f3c65d66 | ||
|
|
8cfb442c44 | ||
|
|
4f8807dfa1 | ||
|
|
81f6499458 | ||
|
|
b4237bb4a9 | ||
|
|
c5943298bd | ||
|
|
b4f919647a | ||
|
|
55af33b508 | ||
|
|
28e7275674 | ||
|
|
f744677021 | ||
|
|
c957f3ec1c | ||
|
|
9bda526d48 | ||
|
|
e89e1b08c9 | ||
|
|
e0de04886b | ||
|
|
30a24683e3 | ||
|
|
5190654a8f |
267
pom.xml
267
pom.xml
@@ -6,19 +6,26 @@
|
||||
|
||||
<groupId>com.zdjizhi</groupId>
|
||||
<artifactId>flink-dos-detection</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<version>24-09-29</version>
|
||||
|
||||
<name>flink-dos-detection</name>
|
||||
<url>http://www.example.com</url>
|
||||
|
||||
|
||||
<properties>
|
||||
<galaxy.tools.version>2.0.2</galaxy.tools.version>
|
||||
<flink.version>1.13.1</flink.version>
|
||||
<hive.version>2.1.1</hive.version>
|
||||
<hadoop.version>2.7.1</hadoop.version>
|
||||
<scala.binary.version>2.12</scala.binary.version>
|
||||
<jsonpath.version>2.4.0</jsonpath.version>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>nexus</id>
|
||||
<name>Team Nexus Repository</name>
|
||||
<url>http://192.168.40.125:8099/content/groups/public</url>
|
||||
<url>http://192.168.40.153:8099/content/groups/public</url>
|
||||
</repository>
|
||||
|
||||
<repository>
|
||||
@@ -49,14 +56,15 @@
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>3.8.0</version>
|
||||
<configuration>
|
||||
<source>1.8</source>
|
||||
<target>1.8</target>
|
||||
<source>11</source>
|
||||
<target>11</target>
|
||||
<!-- The semantics of this option are reversed, see MCOMPILER-209. -->
|
||||
<useIncrementalCompilation>false</useIncrementalCompilation>
|
||||
<compilerArgs>
|
||||
<!-- Prevents recompilation due to missing package-info.class, see MCOMPILER-205 -->
|
||||
<arg>-Xpkginfo:always</arg>
|
||||
</compilerArgs>
|
||||
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
@@ -72,7 +80,13 @@
|
||||
</goals>
|
||||
|
||||
<configuration>
|
||||
<finalName>flink-dos-detection</finalName>
|
||||
<finalName>flink-dos-detection-24-09-29</finalName>
|
||||
<relocations>
|
||||
<relocation>
|
||||
<pattern>org.apache.http</pattern>
|
||||
<shadedPattern>shade.org.apache.http</shadedPattern>
|
||||
</relocation>
|
||||
</relocations>
|
||||
<filters>
|
||||
<filter>
|
||||
<!-- Do not copy the signatures in the META-INF folder.
|
||||
@@ -96,96 +110,90 @@
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
</plugins>
|
||||
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>properties</directory>
|
||||
<includes>
|
||||
<include>**/*.properties</include>
|
||||
<include>**/*.xml</include>
|
||||
</includes>
|
||||
<filtering>false</filtering>
|
||||
</resource>
|
||||
|
||||
<resource>
|
||||
<directory>src\main</directory>
|
||||
<includes>
|
||||
<include>log4j.properties</include>
|
||||
</includes>
|
||||
<filtering>false</filtering>
|
||||
</resource>
|
||||
</resources>
|
||||
|
||||
</build>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>1.7.21</version>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-test-utils_2.12</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.jasypt</groupId>
|
||||
<artifactId>jasypt</artifactId>
|
||||
<version>1.9.3</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<version>1.7.21</version>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path</artifactId>
|
||||
<version>${jsonpath.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.xerial.snappy</groupId>
|
||||
<artifactId>snappy-java</artifactId>
|
||||
<version>1.1.8.3</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-sql-connector-kafka_2.11</artifactId>
|
||||
<artifactId>flink-connector-kafka_2.12</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
<!--<scope>provided</scope>-->
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>snappy-java</artifactId>
|
||||
<groupId>org.xerial.snappy</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
||||
<!-- <dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-connector-kafka_2.11</artifactId>
|
||||
<artifactId>flink-runtime-web_2.12</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
<!--<scope>provided</scope>-->
|
||||
</dependency>
|
||||
</dependency>-->
|
||||
|
||||
<!--<!– https://mvnrepository.com/artifact/org.apache.flink/flink-table –>-->
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>org.apache.flink</groupId>-->
|
||||
<!--<artifactId>flink-table</artifactId>-->
|
||||
<!--<version>${flink.version}</version>-->
|
||||
<!--<!–<scope>provided</scope>–>-->
|
||||
<!--</dependency>-->
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-json -->
|
||||
<dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-json</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
|
||||
<dependency>
|
||||
<groupId>org.apache.kafka</groupId>
|
||||
<artifactId>kafka-clients</artifactId>
|
||||
<version>1.0.0</version>
|
||||
</dependency>
|
||||
|
||||
<!--Flink modules-->
|
||||
<dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-table-api-java</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
<!--<scope>provided</scope>-->
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-table-planner-blink_2.11</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
<!--<scope>provided</scope>-->
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-table-planner_2.11</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
<!--<scope>provided</scope>-->
|
||||
</dependency>
|
||||
|
||||
<!-- CLI dependencies -->
|
||||
<dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-clients_2.11</artifactId>
|
||||
<artifactId>flink-clients_2.12</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
<!--<scope>provided</scope>-->
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
<version>2.7.1</version>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>zookeeper</artifactId>
|
||||
@@ -195,6 +203,44 @@
|
||||
<artifactId>jdk.tools</artifactId>
|
||||
<groupId>jdk.tools</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>guava</artifactId>
|
||||
<groupId>com.google.guava</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-auth</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-client</artifactId>
|
||||
<version>2.2.3</version>
|
||||
<!-- <scope>provided</scope>-->
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>log4j-over-slf4j</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
@@ -215,12 +261,6 @@
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-connector-hbase-2.2_2.11</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>cn.hutool</groupId>
|
||||
<artifactId>hutool-all</artifactId>
|
||||
@@ -228,25 +268,72 @@
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.zdjizhi</groupId>
|
||||
<groupId>com.github.seancfoley</groupId>
|
||||
<artifactId>ipaddress</artifactId>
|
||||
<version>5.3.3</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>com.alibaba.fastjson2</groupId>
|
||||
<artifactId>fastjson2</artifactId>
|
||||
<version>2.0.32</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
<version>1.11</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.12</version>
|
||||
</dependency>
|
||||
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-databind -->
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>2.9.10</version>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/com.google.guava/guava -->
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>11.0.2</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>1.18.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path</artifactId>
|
||||
<version>2.4.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.geedgenetworks</groupId>
|
||||
<artifactId>galaxy</artifactId>
|
||||
<version>1.0.4</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>log4j-over-slf4j</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
<version>${galaxy.tools.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
</project>
|
||||
</project>
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
import com.zdjizhi.utils.CommonConfigurations;
|
||||
|
||||
/**
|
||||
* Created by wk on 2021/1/6.
|
||||
*/
|
||||
public class CommonConfig {
|
||||
|
||||
public static final int STREAM_EXECUTION_ENVIRONMENT_PARALLELISM = CommonConfigurations.getIntProperty("stream.execution.environment.parallelism");
|
||||
public static final String STREAM_EXECUTION_JOB_NAME = CommonConfigurations.getStringProperty("stream.execution.job.name");
|
||||
|
||||
public static final int KAFKA_INPUT_PARALLELISM = CommonConfigurations.getIntProperty("kafka.input.parallelism");
|
||||
public static final String KAFKA_INPUT_TOPIC_NAME = CommonConfigurations.getStringProperty("kafka.input.topic.name");
|
||||
public static final String KAFKA_INPUT_BOOTSTRAP_SERVERS = CommonConfigurations.getStringProperty("kafka.input.bootstrap.servers");
|
||||
public static final String KAFKA_SCAN_STARTUP_MODE = CommonConfigurations.getStringProperty("kafka.input.scan.startup.mode");
|
||||
public static final String KAFKA_GROUP_ID = CommonConfigurations.getStringProperty("kafka.input.group.id");
|
||||
|
||||
public static final int KAFKA_OUTPUT_METRIC_PARALLELISM = CommonConfigurations.getIntProperty("kafka.output.metric.parallelism");
|
||||
public static final String KAFKA_OUTPUT_METRIC_TOPIC_NAME = CommonConfigurations.getStringProperty("kafka.output.metric.topic.name");
|
||||
public static final int KAFKA_OUTPUT_EVENT_PARALLELISM = CommonConfigurations.getIntProperty("kafka.output.event.parallelism");
|
||||
public static final String KAFKA_OUTPUT_EVENT_TOPIC_NAME = CommonConfigurations.getStringProperty("kafka.output.event.topic.name");
|
||||
public static final String KAFKA_OUTPUT_BOOTSTRAP_SERVERS = CommonConfigurations.getStringProperty("kafka.output.bootstrap.servers");
|
||||
|
||||
public static final int HBASE_INPUT_PARALLELISM = CommonConfigurations.getIntProperty("hbase.input.parallelism");
|
||||
public static final String HBASE_ZOOKEEPER_QUORUM = CommonConfigurations.getStringProperty("hbase.zookeeper.quorum");
|
||||
public static final int HBASE_CLIENT_OPERATION_TIMEOUT = CommonConfigurations.getIntProperty("hbase.client.operation.timeout");
|
||||
public static final int HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD = CommonConfigurations.getIntProperty("hbase.client.scanner.timeout.period");
|
||||
|
||||
public static final String HBASE_BASELINE_TABLE_NAME = CommonConfigurations.getStringProperty("hbase.baseline.table.name");
|
||||
public static final int HBASE_BASELINE_TOTAL_NUM = CommonConfigurations.getIntProperty("hbase.baseline.total.num");
|
||||
|
||||
public static final int FLINK_FIRST_AGG_PARALLELISM = CommonConfigurations.getIntProperty("flink.first.agg.parallelism");
|
||||
public static final int FLINK_SECOND_AGG_PARALLELISM = CommonConfigurations.getIntProperty("flink.second.agg.parallelism");
|
||||
public static final int FLINK_WATERMARK_MAX_ORDERNESS = CommonConfigurations.getIntProperty("flink.watermark.max.orderness");
|
||||
public static final int FLINK_WINDOW_MAX_TIME = CommonConfigurations.getIntProperty("flink.window.max.time");
|
||||
|
||||
public static final int SOURCE_IP_LIST_LIMIT = CommonConfigurations.getIntProperty("source.ip.list.limit");
|
||||
public static final int DATA_CENTER_ID_NUM = CommonConfigurations.getIntProperty("data.center.id.num");
|
||||
|
||||
public static final String IP_MMDB_PATH = CommonConfigurations.getStringProperty("ip.mmdb.path");
|
||||
|
||||
public static final double BASELINE_SESSIONS_MINOR_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.minor.threshold");
|
||||
public static final double BASELINE_SESSIONS_WARNING_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.warning.threshold");
|
||||
public static final double BASELINE_SESSIONS_MAJOR_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.major.threshold");
|
||||
public static final double BASELINE_SESSIONS_SEVERE_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.severe.threshold");
|
||||
public static final double BASELINE_SESSIONS_CRITICAL_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.critical.threshold");
|
||||
|
||||
}
|
||||
63
src/main/java/com/zdjizhi/common/DosBaselineThreshold.java
Normal file
63
src/main/java/com/zdjizhi/common/DosBaselineThreshold.java
Normal file
@@ -0,0 +1,63 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DosBaselineThreshold implements Serializable {
|
||||
private ArrayList<Integer> session_rate;
|
||||
private Integer session_rate_baseline_type;
|
||||
private Integer session_rate_default_value;
|
||||
|
||||
public ArrayList<Integer> getSession_rate() {
|
||||
return session_rate;
|
||||
}
|
||||
|
||||
public void setSession_rate(ArrayList<Integer> session_rate) {
|
||||
this.session_rate = session_rate;
|
||||
}
|
||||
|
||||
public Integer getSession_rate_baseline_type() {
|
||||
return session_rate_baseline_type;
|
||||
}
|
||||
|
||||
public void setSession_rate_baseline_type(Integer session_rate_baseline_type) {
|
||||
this.session_rate_baseline_type = session_rate_baseline_type;
|
||||
}
|
||||
|
||||
public Integer getSession_rate_default_value() {
|
||||
return session_rate_default_value;
|
||||
}
|
||||
|
||||
public void setSession_rate_default_value(Integer session_rate_default_value) {
|
||||
this.session_rate_default_value = session_rate_default_value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "DosBaselineThreshold{" +
|
||||
"session_rate=" + session_rate +
|
||||
", session_rate_baseline_type=" + session_rate_baseline_type +
|
||||
", session_rate_default_value=" + session_rate_default_value +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof DosBaselineThreshold)) {
|
||||
return false;
|
||||
}
|
||||
DosBaselineThreshold that = (DosBaselineThreshold) o;
|
||||
return Objects.equals(getSession_rate(), that.getSession_rate()) &&
|
||||
Objects.equals(getSession_rate_baseline_type(), that.getSession_rate_baseline_type()) &&
|
||||
Objects.equals(getSession_rate_default_value(), that.getSession_rate_default_value());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(getSession_rate(), getSession_rate_baseline_type(), getSession_rate_default_value());
|
||||
}
|
||||
}
|
||||
117
src/main/java/com/zdjizhi/common/DosDetectionThreshold.java
Normal file
117
src/main/java/com/zdjizhi/common/DosDetectionThreshold.java
Normal file
@@ -0,0 +1,117 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* @author wlh
|
||||
*/
|
||||
public class DosDetectionThreshold implements Serializable {
|
||||
private long id;
|
||||
private String attack_type;
|
||||
private ArrayList<String> server_ip_list;
|
||||
private String server_ip_addr;
|
||||
private long packets_per_sec;
|
||||
private long bits_per_sec;
|
||||
private long sessions_per_sec;
|
||||
private int is_enabled;
|
||||
private int vsys_id;
|
||||
private Integer[] superior_ids;
|
||||
|
||||
public long getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(long id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getAttack_type() {
|
||||
return attack_type;
|
||||
}
|
||||
|
||||
public void setAttack_type(String attack_type) {
|
||||
this.attack_type = attack_type;
|
||||
}
|
||||
|
||||
public ArrayList<String> getServer_ip_list() {
|
||||
return server_ip_list;
|
||||
}
|
||||
|
||||
public void setServer_ip_list(ArrayList<String> server_ip_list) {
|
||||
this.server_ip_list = server_ip_list;
|
||||
}
|
||||
|
||||
public String getServer_ip_addr() {
|
||||
return server_ip_addr;
|
||||
}
|
||||
|
||||
public void setServer_ip_addr(String server_ip_addr) {
|
||||
this.server_ip_addr = server_ip_addr;
|
||||
}
|
||||
|
||||
public long getPackets_per_sec() {
|
||||
return packets_per_sec;
|
||||
}
|
||||
|
||||
public void setPackets_per_sec(long packets_per_sec) {
|
||||
this.packets_per_sec = packets_per_sec;
|
||||
}
|
||||
|
||||
public long getBits_per_sec() {
|
||||
return bits_per_sec;
|
||||
}
|
||||
|
||||
public void setBits_per_sec(long bits_per_sec) {
|
||||
this.bits_per_sec = bits_per_sec;
|
||||
}
|
||||
|
||||
public long getSessions_per_sec() {
|
||||
return sessions_per_sec;
|
||||
}
|
||||
|
||||
public void setSessions_per_sec(long sessions_per_sec) {
|
||||
this.sessions_per_sec = sessions_per_sec;
|
||||
}
|
||||
|
||||
public int getIs_enabled() {
|
||||
return is_enabled;
|
||||
}
|
||||
|
||||
public void setIs_enabled(int is_enabled) {
|
||||
this.is_enabled = is_enabled;
|
||||
}
|
||||
|
||||
public int getVsys_id() {
|
||||
return vsys_id;
|
||||
}
|
||||
|
||||
public void setVsys_id(int vsys_id) {
|
||||
this.vsys_id = vsys_id;
|
||||
}
|
||||
|
||||
public Integer[] getSuperior_ids() {
|
||||
return superior_ids;
|
||||
}
|
||||
|
||||
public void setSuperior_ids(Integer[] superior_ids) {
|
||||
this.superior_ids = superior_ids;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "DosDetectionThreshold{" +
|
||||
"id=" + id +
|
||||
", attack_type='" + attack_type + '\'' +
|
||||
", server_ip_list=" + server_ip_list +
|
||||
", server_ip_addr='" + server_ip_addr + '\'' +
|
||||
", packets_per_sec=" + packets_per_sec +
|
||||
", bits_per_sec=" + bits_per_sec +
|
||||
", sessions_per_sec=" + sessions_per_sec +
|
||||
", is_enabled=" + is_enabled +
|
||||
", vsys_id=" + vsys_id +
|
||||
", superior_ids=" + Arrays.toString(superior_ids) +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -2,11 +2,13 @@ package com.zdjizhi.common;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class DosEventLog implements Serializable {
|
||||
|
||||
public class DosEventLog implements Serializable, Cloneable {
|
||||
private long recv_time;
|
||||
private long log_id;
|
||||
private int vsys_id;
|
||||
private long start_time;
|
||||
private long end_time;
|
||||
private long profile_id;
|
||||
private String attack_type;
|
||||
private String severity;
|
||||
private String conditions;
|
||||
@@ -17,6 +19,18 @@ public class DosEventLog implements Serializable {
|
||||
private long session_rate;
|
||||
private long packet_rate;
|
||||
private long bit_rate;
|
||||
private long sessions;
|
||||
private long packets;
|
||||
private long bytes;
|
||||
|
||||
private int rule_id;
|
||||
public long getRecv_time() {
|
||||
return recv_time;
|
||||
}
|
||||
|
||||
public void setRecv_time(long recv_time) {
|
||||
this.recv_time = recv_time;
|
||||
}
|
||||
|
||||
public long getLog_id() {
|
||||
return log_id;
|
||||
@@ -26,6 +40,14 @@ public class DosEventLog implements Serializable {
|
||||
this.log_id = log_id;
|
||||
}
|
||||
|
||||
public int getVsys_id() {
|
||||
return vsys_id;
|
||||
}
|
||||
|
||||
public void setVsys_id(int vsys_id) {
|
||||
this.vsys_id = vsys_id;
|
||||
}
|
||||
|
||||
public long getStart_time() {
|
||||
return start_time;
|
||||
}
|
||||
@@ -42,6 +64,14 @@ public class DosEventLog implements Serializable {
|
||||
this.end_time = end_time;
|
||||
}
|
||||
|
||||
public long getProfile_id() {
|
||||
return profile_id;
|
||||
}
|
||||
|
||||
public void setProfile_id(long profile_id) {
|
||||
this.profile_id = profile_id;
|
||||
}
|
||||
|
||||
public String getAttack_type() {
|
||||
return attack_type;
|
||||
}
|
||||
@@ -122,12 +152,47 @@ public class DosEventLog implements Serializable {
|
||||
this.bit_rate = bit_rate;
|
||||
}
|
||||
|
||||
public long getSessions() {
|
||||
return sessions;
|
||||
}
|
||||
|
||||
public void setSessions(long sessions) {
|
||||
this.sessions = sessions;
|
||||
}
|
||||
|
||||
public long getPackets() {
|
||||
return packets;
|
||||
}
|
||||
|
||||
public void setPackets(long packets) {
|
||||
this.packets = packets;
|
||||
}
|
||||
|
||||
public long getBytes() {
|
||||
return bytes;
|
||||
}
|
||||
|
||||
public void setBytes(long bytes) {
|
||||
this.bytes = bytes;
|
||||
}
|
||||
|
||||
public int getRule_id() {
|
||||
return rule_id;
|
||||
}
|
||||
|
||||
public void setRule_id(int rule_id) {
|
||||
this.rule_id = rule_id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "dosEventLog{" +
|
||||
"log_id=" + log_id +
|
||||
return "DosEventLog{" +
|
||||
"recv_time=" + recv_time +
|
||||
", log_id=" + log_id +
|
||||
", vsys_id=" + vsys_id +
|
||||
", start_time=" + start_time +
|
||||
", end_time=" + end_time +
|
||||
", profile_id=" + profile_id +
|
||||
", attack_type='" + attack_type + '\'' +
|
||||
", severity='" + severity + '\'' +
|
||||
", conditions='" + conditions + '\'' +
|
||||
@@ -140,4 +205,10 @@ public class DosEventLog implements Serializable {
|
||||
", bit_rate=" + bit_rate +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object clone() throws CloneNotSupportedException {
|
||||
return super.clone();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,17 +1,26 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DosMetricsLog implements Serializable {
|
||||
|
||||
private long sketch_start_time;
|
||||
private String common_sled_ip;
|
||||
private String common_data_center;
|
||||
private String attack_type;
|
||||
private String destination_ip;
|
||||
private long session_rate;
|
||||
private long packet_rate;
|
||||
private long bit_rate;
|
||||
private int partition_num;
|
||||
private int vsys_id;
|
||||
|
||||
public int getPartition_num() {
|
||||
return partition_num;
|
||||
}
|
||||
|
||||
public void setPartition_num(int partition_num) {
|
||||
this.partition_num = partition_num;
|
||||
}
|
||||
|
||||
public long getSketch_start_time() {
|
||||
return sketch_start_time;
|
||||
@@ -21,22 +30,6 @@ public class DosMetricsLog implements Serializable {
|
||||
this.sketch_start_time = sketch_start_time;
|
||||
}
|
||||
|
||||
public String getCommon_sled_ip() {
|
||||
return common_sled_ip;
|
||||
}
|
||||
|
||||
public void setCommon_sled_ip(String common_sled_ip) {
|
||||
this.common_sled_ip = common_sled_ip;
|
||||
}
|
||||
|
||||
public String getCommon_data_center() {
|
||||
return common_data_center;
|
||||
}
|
||||
|
||||
public void setCommon_data_center(String common_data_center) {
|
||||
this.common_data_center = common_data_center;
|
||||
}
|
||||
|
||||
public String getAttack_type() {
|
||||
return attack_type;
|
||||
}
|
||||
@@ -77,17 +70,25 @@ public class DosMetricsLog implements Serializable {
|
||||
this.bit_rate = bit_rate;
|
||||
}
|
||||
|
||||
public int getVsys_id() {
|
||||
return vsys_id;
|
||||
}
|
||||
|
||||
public void setVsys_id(int vsys_id) {
|
||||
this.vsys_id = vsys_id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "DosMetricsLog{" +
|
||||
"sketch_start_time=" + sketch_start_time +
|
||||
", common_sled_ip='" + common_sled_ip + '\'' +
|
||||
", common_data_center='" + common_data_center + '\'' +
|
||||
", attack_type='" + attack_type + '\'' +
|
||||
", destination_ip='" + destination_ip + '\'' +
|
||||
", session_rate=" + session_rate +
|
||||
", packet_rate=" + packet_rate +
|
||||
", bit_rate=" + bit_rate +
|
||||
", partition_num=" + partition_num +
|
||||
", vsys_id=" + vsys_id +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,66 +1,238 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DosSketchLog implements Serializable {
|
||||
|
||||
private String common_sled_ip;
|
||||
private String common_data_center;
|
||||
private long sketch_start_time;
|
||||
private long sketch_duration;
|
||||
private String name;
|
||||
private long recv_time;
|
||||
private long timestamp_ms;
|
||||
private String device_id;
|
||||
private String data_center;
|
||||
private String device_group;
|
||||
private String decoded_as;
|
||||
private int rule_id;
|
||||
private String client_country;
|
||||
private String server_country;
|
||||
private String client_ip;
|
||||
private String server_ip;
|
||||
private long sessions;
|
||||
private long pkts;
|
||||
private long bytes;
|
||||
private int vsys_id;
|
||||
private long start_timestamp_ms;
|
||||
private long end_timestamp_ms;
|
||||
private long duration;
|
||||
private HashSet<String> client_ips;
|
||||
private HashSet<String> client_countrys;
|
||||
|
||||
private long session_rate;
|
||||
private long packet_rate;
|
||||
private long bit_rate;
|
||||
private String attack_type;
|
||||
private String source_ip;
|
||||
private String destination_ip;
|
||||
private long sketch_sessions;
|
||||
private long sketch_packets;
|
||||
private long sketch_bytes;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "DosSketchLog{" +
|
||||
"common_sled_ip='" + common_sled_ip + '\'' +
|
||||
", common_data_center='" + common_data_center + '\'' +
|
||||
", sketch_start_time=" + sketch_start_time +
|
||||
", sketch_duration=" + sketch_duration +
|
||||
", attack_type='" + attack_type + '\'' +
|
||||
", source_ip='" + source_ip + '\'' +
|
||||
", destination_ip='" + destination_ip + '\'' +
|
||||
", sketch_sessions=" + sketch_sessions +
|
||||
", sketch_packets=" + sketch_packets +
|
||||
", sketch_bytes=" + sketch_bytes +
|
||||
"name=" + name +
|
||||
", timestamp_ms='" + timestamp_ms + '\'' +
|
||||
", device_id='" + device_id + '\'' +
|
||||
", data_center=" + data_center +
|
||||
", device_group=" + device_group +
|
||||
", decoded_as='" + decoded_as + '\'' +
|
||||
", client_country='" + client_country + '\'' +
|
||||
", server_country='" + server_country + '\'' +
|
||||
", client_ip=" + client_ip +
|
||||
", server_ip=" + server_ip +
|
||||
", vsys_id=" + vsys_id +
|
||||
'}';
|
||||
}
|
||||
|
||||
public String getCommon_sled_ip() {
|
||||
return common_sled_ip;
|
||||
public HashSet<String> getClient_ips() {
|
||||
return client_ips;
|
||||
}
|
||||
|
||||
public void setCommon_sled_ip(String common_sled_ip) {
|
||||
this.common_sled_ip = common_sled_ip;
|
||||
public void setClient_ips(HashSet<String> client_ips) {
|
||||
this.client_ips = client_ips;
|
||||
}
|
||||
|
||||
public String getCommon_data_center() {
|
||||
return common_data_center;
|
||||
public HashSet<String> getClient_countrys() {
|
||||
return client_countrys;
|
||||
}
|
||||
|
||||
public void setCommon_data_center(String common_data_center) {
|
||||
this.common_data_center = common_data_center;
|
||||
public void setClient_countrys(HashSet<String> client_countrys) {
|
||||
this.client_countrys = client_countrys;
|
||||
}
|
||||
|
||||
public long getSketch_start_time() {
|
||||
return sketch_start_time;
|
||||
public long getRecv_time() {
|
||||
return recv_time;
|
||||
}
|
||||
|
||||
public void setSketch_start_time(long sketch_start_time) {
|
||||
this.sketch_start_time = sketch_start_time;
|
||||
public void setRecv_time(long recv_time) {
|
||||
this.recv_time = recv_time;
|
||||
}
|
||||
|
||||
public long getSketch_duration() {
|
||||
return sketch_duration;
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setSketch_duration(long sketch_duration) {
|
||||
this.sketch_duration = sketch_duration;
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public long getTimestamp_ms() {
|
||||
return timestamp_ms;
|
||||
}
|
||||
|
||||
public void setTimestamp_ms(long timestamp_ms) {
|
||||
this.timestamp_ms = timestamp_ms;
|
||||
}
|
||||
|
||||
public String getDevice_id() {
|
||||
return device_id;
|
||||
}
|
||||
|
||||
public void setDevice_id(String device_id) {
|
||||
this.device_id = device_id;
|
||||
}
|
||||
|
||||
public String getData_center() {
|
||||
return data_center;
|
||||
}
|
||||
|
||||
public void setData_center(String data_center) {
|
||||
this.data_center = data_center;
|
||||
}
|
||||
|
||||
public String getDevice_group() {
|
||||
return device_group;
|
||||
}
|
||||
|
||||
public void setDevice_group(String device_group) {
|
||||
this.device_group = device_group;
|
||||
}
|
||||
|
||||
public String getDecoded_as() {
|
||||
return decoded_as;
|
||||
}
|
||||
|
||||
public void setDecoded_as(String decoded_as) {
|
||||
this.decoded_as = decoded_as;
|
||||
}
|
||||
|
||||
public String getClient_country() {
|
||||
return client_country;
|
||||
}
|
||||
|
||||
public void setClient_country(String client_country) {
|
||||
this.client_country = client_country;
|
||||
}
|
||||
|
||||
public String getServer_country() {
|
||||
return server_country;
|
||||
}
|
||||
|
||||
public void setServer_country(String server_country) {
|
||||
this.server_country = server_country;
|
||||
}
|
||||
|
||||
public String getClient_ip() {
|
||||
return client_ip;
|
||||
}
|
||||
|
||||
public void setClient_ip(String client_ip) {
|
||||
this.client_ip = client_ip;
|
||||
}
|
||||
|
||||
public String getServer_ip() {
|
||||
return server_ip;
|
||||
}
|
||||
|
||||
public void setServer_ip(String server_ip) {
|
||||
this.server_ip = server_ip;
|
||||
}
|
||||
|
||||
public long getSessions() {
|
||||
return sessions;
|
||||
}
|
||||
|
||||
public void setSessions(long sessions) {
|
||||
this.sessions = sessions;
|
||||
}
|
||||
|
||||
public long getPkts() {
|
||||
return pkts;
|
||||
}
|
||||
|
||||
public void setPkts(long pkts) {
|
||||
this.pkts = pkts;
|
||||
}
|
||||
|
||||
public long getBytes() {
|
||||
return bytes;
|
||||
}
|
||||
|
||||
public void setBytes(long bytes) {
|
||||
this.bytes = bytes;
|
||||
}
|
||||
|
||||
public int getVsys_id() {
|
||||
return vsys_id;
|
||||
}
|
||||
|
||||
public void setVsys_id(int vsys_id) {
|
||||
this.vsys_id = vsys_id;
|
||||
}
|
||||
|
||||
public long getStart_timestamp_ms() {
|
||||
return start_timestamp_ms;
|
||||
}
|
||||
|
||||
public void setStart_timestamp_ms(long start_timestamp_ms) {
|
||||
this.start_timestamp_ms = start_timestamp_ms;
|
||||
}
|
||||
|
||||
public long getEnd_timestamp_ms() {
|
||||
return end_timestamp_ms;
|
||||
}
|
||||
|
||||
public void setEnd_timestamp_ms(long end_timestamp_ms) {
|
||||
this.end_timestamp_ms = end_timestamp_ms;
|
||||
}
|
||||
|
||||
public long getDuration() {
|
||||
return duration;
|
||||
}
|
||||
|
||||
public void setDuration(long duration) {
|
||||
this.duration = duration;
|
||||
}
|
||||
|
||||
public long getSession_rate() {
|
||||
return session_rate;
|
||||
}
|
||||
|
||||
public void setSession_rate(long session_rate) {
|
||||
this.session_rate = session_rate;
|
||||
}
|
||||
|
||||
public long getPacket_rate() {
|
||||
return packet_rate;
|
||||
}
|
||||
|
||||
public void setPacket_rate(long packet_rate) {
|
||||
this.packet_rate = packet_rate;
|
||||
}
|
||||
|
||||
public long getBit_rate() {
|
||||
return bit_rate;
|
||||
}
|
||||
|
||||
public void setBit_rate(long bit_rate) {
|
||||
this.bit_rate = bit_rate;
|
||||
}
|
||||
|
||||
public String getAttack_type() {
|
||||
@@ -71,43 +243,12 @@ public class DosSketchLog implements Serializable {
|
||||
this.attack_type = attack_type;
|
||||
}
|
||||
|
||||
public String getSource_ip() {
|
||||
return source_ip;
|
||||
|
||||
public int getRule_id() {
|
||||
return rule_id;
|
||||
}
|
||||
|
||||
public void setSource_ip(String source_ip) {
|
||||
this.source_ip = source_ip;
|
||||
}
|
||||
|
||||
public String getDestination_ip() {
|
||||
return destination_ip;
|
||||
}
|
||||
|
||||
public void setDestination_ip(String destination_ip) {
|
||||
this.destination_ip = destination_ip;
|
||||
}
|
||||
|
||||
public long getSketch_sessions() {
|
||||
return sketch_sessions;
|
||||
}
|
||||
|
||||
public void setSketch_sessions(long sketch_sessions) {
|
||||
this.sketch_sessions = sketch_sessions;
|
||||
}
|
||||
|
||||
public long getSketch_packets() {
|
||||
return sketch_packets;
|
||||
}
|
||||
|
||||
public void setSketch_packets(long sketch_packets) {
|
||||
this.sketch_packets = sketch_packets;
|
||||
}
|
||||
|
||||
public long getSketch_bytes() {
|
||||
return sketch_bytes;
|
||||
}
|
||||
|
||||
public void setSketch_bytes(long sketch_bytes) {
|
||||
this.sketch_bytes = sketch_bytes;
|
||||
public void setRule_id(int rule_id) {
|
||||
this.rule_id = rule_id;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
package com.zdjizhi.common.pojo;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public class DosSketchMetricsLog {
|
||||
private String name;
|
||||
private Map<String,String> tags;
|
||||
private Map<String,Long> fields;
|
||||
private long timestamp_ms;
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public Map<String, String> getTags() {
|
||||
return tags;
|
||||
}
|
||||
|
||||
public void setTags(Map<String, String> tags) {
|
||||
this.tags = tags;
|
||||
}
|
||||
|
||||
public Map<String, Long> getFields() {
|
||||
return fields;
|
||||
}
|
||||
|
||||
public void setFields(Map<String, Long> fields) {
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
public long getTimestamp_ms() {
|
||||
return timestamp_ms;
|
||||
}
|
||||
|
||||
public void setTimestamp_ms(long timestamp_ms) {
|
||||
this.timestamp_ms = timestamp_ms;
|
||||
}
|
||||
}
|
||||
218
src/main/java/com/zdjizhi/conf/DosConfigs.java
Normal file
218
src/main/java/com/zdjizhi/conf/DosConfigs.java
Normal file
@@ -0,0 +1,218 @@
|
||||
package com.zdjizhi.conf;
|
||||
|
||||
import org.apache.flink.configuration.ConfigOption;
|
||||
import org.apache.flink.configuration.ConfigOptions;
|
||||
|
||||
public class DosConfigs {
|
||||
|
||||
|
||||
/**
|
||||
* The prefix for Kafka properties used in the source.
|
||||
*/
|
||||
public static final String SOURCE_KAFKA_PROPERTIES_PREFIX = "source.kafka.props.";
|
||||
|
||||
/**
|
||||
* The prefix for Kafka properties used in the sink.
|
||||
*/
|
||||
public static final String SINK_KAFKA_PROPERTIES_PREFIX = "sink.kafka.props.";
|
||||
|
||||
/**
|
||||
* Configuration option for the Kafka topic used in the source.
|
||||
*/
|
||||
public static final ConfigOption<String> SOURCE_KAFKA_TOPIC =
|
||||
ConfigOptions.key("source.kafka.topic")
|
||||
.stringType()
|
||||
.noDefaultValue();
|
||||
|
||||
public static final ConfigOption<Long> FLINK_WINDOW_MAX_TIME =
|
||||
ConfigOptions.key("flink.window.max.time")
|
||||
.longType()
|
||||
.noDefaultValue();
|
||||
|
||||
public static final ConfigOption<Long> FLINK_WATERMARK_MAX_ORDERNESS =
|
||||
ConfigOptions.key("flink.watermark.max.orderness")
|
||||
.longType()
|
||||
.noDefaultValue();
|
||||
/**
|
||||
* Configuration option for the Kafka topic used in the sink.
|
||||
*/
|
||||
public static final ConfigOption<String> KAFKA_SINK_EVENT_TOPIC =
|
||||
ConfigOptions.key("kafka.sink.event.topic.name")
|
||||
.stringType()
|
||||
.noDefaultValue();
|
||||
|
||||
public static final ConfigOption<String> KAFKA_SINK_METRIC_TOPIC =
|
||||
ConfigOptions.key("kafka.sink.metric.topic")
|
||||
.stringType()
|
||||
.noDefaultValue();
|
||||
|
||||
|
||||
public static final ConfigOption<String> HBASE_ZOOKEEPER_QUORUM =
|
||||
ConfigOptions.key("hbase.zookeeper.quorum")
|
||||
.stringType()
|
||||
.noDefaultValue();
|
||||
|
||||
|
||||
|
||||
//==============================The following variables have default values=====================================
|
||||
/**
|
||||
* Configuration option for the source parallelism used in the source.
|
||||
*/
|
||||
public static final ConfigOption<Integer> SOURCE_PARALLELISM =
|
||||
ConfigOptions.key("source.parallelism")
|
||||
.intType()
|
||||
.defaultValue(1);
|
||||
|
||||
public static final ConfigOption<Integer> Flink_FIRST_AGG_PATALLELISM =
|
||||
ConfigOptions.key("flink.first.agg.parallelism")
|
||||
.intType()
|
||||
.defaultValue(1);
|
||||
|
||||
public static final ConfigOption<Integer> FLINK_DETECTION_MAP_PARALLELISM =
|
||||
ConfigOptions.key("flink.detection.map.parallelism")
|
||||
.intType()
|
||||
.defaultValue(1);
|
||||
|
||||
public static final ConfigOption<Integer> KAFKA_SINK_EVENT_PARALLELISM =
|
||||
ConfigOptions.key("kafka.sink.event.parallelism")
|
||||
.intType()
|
||||
.defaultValue(1);
|
||||
|
||||
public static final ConfigOption<Integer> KAFKA_SINK_METRIC_PARALLELISM =
|
||||
ConfigOptions.key("kafka.sink.metric.parallelism")
|
||||
.intType()
|
||||
.defaultValue(1);
|
||||
|
||||
public static final ConfigOption<String> IP_BUILTIN_KD_ID =
|
||||
ConfigOptions.key("ip.builtin.kd.id")
|
||||
.stringType()
|
||||
.defaultValue("64af7077-eb9b-4b8f-80cf-2ceebc89bea9");
|
||||
|
||||
public static final ConfigOption<String> IP_USER_DEFINED_KD_ID =
|
||||
ConfigOptions.key("ip.user.defined.kd.id")
|
||||
.stringType()
|
||||
.defaultValue("004390bc-3135-4a6f-a492-3662ecb9e289");
|
||||
|
||||
public static final ConfigOption<Integer> HTTP_SOCKET_TIMEOUT =
|
||||
ConfigOptions.key("http.socket.timeout")
|
||||
.intType()
|
||||
.defaultValue(90000);
|
||||
|
||||
|
||||
|
||||
public static final ConfigOption<Integer> BASELINE_THRESHOLD_SCHEDULE_DAYS =
|
||||
ConfigOptions.key("baseline.threshold.schedule.days")
|
||||
.intType()
|
||||
.defaultValue(7);
|
||||
|
||||
public static final ConfigOption<Integer> STATIC_SENSITIVITY_THRESHOLD =
|
||||
ConfigOptions.key("static.sensitivity.threshold")
|
||||
.intType()
|
||||
.defaultValue(1);
|
||||
|
||||
public static final ConfigOption<Double> BASELINE_SENSITIVITY_THRESHOLD =
|
||||
ConfigOptions.key("baseline.sensitivity.threshold")
|
||||
.doubleType()
|
||||
.defaultValue(0.2);
|
||||
|
||||
public static final ConfigOption<Double> BASELINE_SESSIONS_MINOR_THRESHOLD =
|
||||
ConfigOptions.key("baseline.sessions.minor.threshold")
|
||||
.doubleType()
|
||||
.defaultValue(0.2);
|
||||
|
||||
public static final ConfigOption<Double> BASELINE_SESSIONS_WARNING_THRESHOLD =
|
||||
ConfigOptions.key("baseline.sessions.warning.threshold")
|
||||
.doubleType()
|
||||
.defaultValue(1.0);
|
||||
|
||||
public static final ConfigOption<Double> BASELINE_SESSIONS_MAJOR_THRESHOLD =
|
||||
ConfigOptions.key("baseline.sessions.major.threshold")
|
||||
.doubleType()
|
||||
.defaultValue(2.5);
|
||||
|
||||
public static final ConfigOption<Double> BASELINE_SESSIONS_SEVERE_THRESHOLD =
|
||||
ConfigOptions.key("baseline.sessions.severe.threshold")
|
||||
.doubleType()
|
||||
.defaultValue(5.0);
|
||||
|
||||
public static final ConfigOption<Double> BASELINE_SESSIONS_CRITICAL_THRESHOLD =
|
||||
ConfigOptions.key("baseline.sessions.critical.threshold")
|
||||
.doubleType()
|
||||
.defaultValue(8.0);
|
||||
|
||||
public static final ConfigOption<Integer> HTTP_POOL_MAX_CONNECTION =
|
||||
ConfigOptions.key("http.pool.max.connection")
|
||||
.intType()
|
||||
.defaultValue(400);
|
||||
|
||||
public static final ConfigOption<Integer> HTTP_POOL_MAX_PER_ROUTE =
|
||||
ConfigOptions.key("http.pool.max.per.route")
|
||||
.intType()
|
||||
.defaultValue(80);
|
||||
|
||||
public static final ConfigOption<Integer> HTTP_POOL_REQUEST_TIMEOUT =
|
||||
ConfigOptions.key("http.pool.request.timeout")
|
||||
.intType()
|
||||
.defaultValue(60000);
|
||||
|
||||
public static final ConfigOption<Integer> HTTP_POOL_CONNECT_TIMEOUT =
|
||||
ConfigOptions.key("http.pool.connect.timeout")
|
||||
.intType()
|
||||
.defaultValue(60000);
|
||||
|
||||
public static final ConfigOption<Integer> DATA_CENTER_ID_NUM =
|
||||
ConfigOptions.key("data.center.id.num")
|
||||
.intType()
|
||||
.defaultValue(15);
|
||||
|
||||
public static final ConfigOption<Integer> HBASE_CLIENT_OPERATION_TIMEOUT =
|
||||
ConfigOptions.key("hbase.client.operation.timeout")
|
||||
.intType()
|
||||
.defaultValue(30000);
|
||||
|
||||
public static final ConfigOption<Integer> HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD =
|
||||
ConfigOptions.key("hbase.client.scanner.timeout.period")
|
||||
.intType()
|
||||
.defaultValue(30000);
|
||||
|
||||
public static final ConfigOption<String> HBASE_BASELINE_TABLE_NAME =
|
||||
ConfigOptions.key("hbase.baseline.table.name")
|
||||
.stringType()
|
||||
.defaultValue("dos:ddos_traffic_baselines");
|
||||
|
||||
public static final ConfigOption<Integer> HBASE_BASELINE_TTL =
|
||||
ConfigOptions.key("hbase.baseline.ttl")
|
||||
.intType()
|
||||
.defaultValue(10);
|
||||
|
||||
|
||||
public static final ConfigOption<Integer> HBASE_BASELINE_TOTAL_NUM =
|
||||
ConfigOptions.key("hbase.baseline.total.num")
|
||||
.intType()
|
||||
.defaultValue(1000000);
|
||||
|
||||
public static final ConfigOption<Integer> DESTINATION_IP_PARTITION_NUM =
|
||||
ConfigOptions.key("destination.ip.partition.num")
|
||||
.intType()
|
||||
.defaultValue(10000);
|
||||
|
||||
public static final ConfigOption<Integer> SOURCE_IP_LIST_LIMIT =
|
||||
ConfigOptions.key("source.ip.list.limit")
|
||||
.intType()
|
||||
.defaultValue(10000);
|
||||
|
||||
/**
|
||||
* Knowledge base scheduling cycle, in minutes
|
||||
*/
|
||||
public static final ConfigOption<Long> KNOWLEDGE_BASE_SCHEDULE_MINUTES =
|
||||
ConfigOptions.key("knowledge.base.schedule.minutes")
|
||||
.longType()
|
||||
.defaultValue(60L);
|
||||
|
||||
public static final ConfigOption<String> JOB_NAME =
|
||||
ConfigOptions.key("job.name")
|
||||
.stringType()
|
||||
.defaultValue("detection_dos_attack")
|
||||
.withDescription("The flink job name.");
|
||||
|
||||
}
|
||||
36
src/main/java/com/zdjizhi/conf/DosConfiguration.java
Normal file
36
src/main/java/com/zdjizhi/conf/DosConfiguration.java
Normal file
@@ -0,0 +1,36 @@
|
||||
package com.zdjizhi.conf;
|
||||
|
||||
import org.apache.flink.configuration.Configuration;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
|
||||
public class DosConfiguration {
|
||||
private final Configuration config;
|
||||
|
||||
public DosConfiguration(final Configuration config) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves properties from the underlying `Configuration` instance that start with the specified
|
||||
* `prefix`. The properties are then converted into a `java.util.Properties` object and returned.
|
||||
*
|
||||
* @param prefix The prefix to filter properties.
|
||||
* @return A `java.util.Properties` object containing the properties with the specified prefix.
|
||||
*/
|
||||
public Properties getProperties(final String prefix) {
|
||||
if (prefix == null) {
|
||||
final Properties props = new Properties();
|
||||
props.putAll(config.toMap());
|
||||
return props;
|
||||
}
|
||||
return config.toMap()
|
||||
.entrySet()
|
||||
.stream()
|
||||
.filter(entry -> entry.getKey().startsWith(prefix))
|
||||
.collect(Properties::new, (props, e) ->
|
||||
props.setProperty(e.getKey().substring(prefix.length()), e.getValue()),
|
||||
Properties::putAll);
|
||||
}
|
||||
}
|
||||
@@ -1,191 +0,0 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.zdjizhi.common.DosEventLog;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import com.zdjizhi.sink.OutputStreamSink;
|
||||
import com.zdjizhi.utils.IpUtils;
|
||||
import com.zdjizhi.utils.SnowflakeId;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.flink.api.common.state.MapStateDescriptor;
|
||||
import org.apache.flink.api.common.typeinfo.Types;
|
||||
import org.apache.flink.api.java.typeutils.MapTypeInfo;
|
||||
import org.apache.flink.configuration.Configuration;
|
||||
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
|
||||
import org.apache.flink.util.Collector;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.text.NumberFormat;
|
||||
import java.text.ParseException;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author wlh
|
||||
* DoS检测判断逻辑
|
||||
*/
|
||||
public class DosDetection extends BroadcastProcessFunction<DosSketchLog, Map<String, Map<String, List<Integer>>>, DosEventLog> {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DosDetection.class);
|
||||
|
||||
private final static int BASELINE_SIZE = 144;
|
||||
|
||||
private static MapStateDescriptor<String, Map<String, Map<String, List<Integer>>>> descriptor = new MapStateDescriptor<>("boradcast-state",
|
||||
Types.STRING,
|
||||
new MapTypeInfo<>(String.class, new MapTypeInfo<>(String.class, (Class<List<Integer>>) (Class<?>) List.class).getTypeClass()));
|
||||
|
||||
private final static NumberFormat PERCENT_INSTANCE = NumberFormat.getPercentInstance();
|
||||
|
||||
@Override
|
||||
public void open(Configuration parameters) {
|
||||
PERCENT_INSTANCE.setMinimumFractionDigits(2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processElement(DosSketchLog value, ReadOnlyContext ctx, Collector<DosEventLog> out) throws Exception {
|
||||
try {
|
||||
Map<String, Map<String, List<Integer>>> broadcast = ctx.getBroadcastState(descriptor).get("broadcast-state");
|
||||
String destinationIp = value.getDestination_ip();
|
||||
String attackType = value.getAttack_type();
|
||||
logger.info("当前判断IP:{}, 类型: {}",destinationIp,attackType);
|
||||
if (broadcast.containsKey(destinationIp)){
|
||||
List<Integer> baseline = broadcast.get(destinationIp).get(attackType);
|
||||
if (baseline != null && baseline.size() == BASELINE_SIZE){
|
||||
int timeIndex = getCurrentTimeIndex(value.getSketch_start_time());
|
||||
Integer base = baseline.get(timeIndex);
|
||||
long sketchSessions = value.getSketch_sessions();
|
||||
long diff = sketchSessions - base;
|
||||
if (diff > 0){
|
||||
String percent = getDiffPercent(diff, sketchSessions);
|
||||
double diffPercentDouble = getDiffPercentDouble(percent);
|
||||
Severity severity = judgeSeverity(diffPercentDouble);
|
||||
if (severity != Severity.NORMAL){
|
||||
DosEventLog result = getResult(value, severity, percent);
|
||||
logger.info("检测到当前server IP {} 存在 {} 异常,日志详情\n {}",destinationIp,attackType,result.toString());
|
||||
out.collect(result);
|
||||
}else {
|
||||
logger.info("当前server IP:{} 未出现 {} 异常,日志详情 {}",destinationIp,attackType,value.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}else {
|
||||
logger.info("未获取到当前server IP:{} 类型 {} baseline数据",destinationIp,attackType);
|
||||
}
|
||||
}catch (Exception e){
|
||||
logger.error("判定失败\n {} \n{}",value,e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processBroadcastElement(Map<String, Map<String, List<Integer>>> value, Context ctx, Collector<DosEventLog> out) {
|
||||
try {
|
||||
ctx.getBroadcastState(descriptor).put("broadcast-state", value);
|
||||
}catch (Exception e){
|
||||
logger.error("更新广播状态失败 {}",e);
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
DosDetection dosDetection = new DosDetection();
|
||||
// HashSet<String> strings = new HashSet<>();
|
||||
// strings.add("13.46.241.36");
|
||||
// strings.add("25.46.241.45");
|
||||
// strings.add("133.46.241.53");
|
||||
// strings.add("219.46.242.74");
|
||||
// strings.add("153.146.241.196");
|
||||
// strings.add("132.46.241.21");
|
||||
// String join = StringUtils.join(strings, ",");
|
||||
// System.out.println(IpUtils.ipLookup.countryLookup("192.168.50.150"));
|
||||
System.out.println(Severity.CRITICAL.severity);
|
||||
}
|
||||
|
||||
private DosEventLog getResult(DosSketchLog value,Severity severity,String percent){
|
||||
DosEventLog dosEventLog = new DosEventLog();
|
||||
dosEventLog.setLog_id(SnowflakeId.generateId());
|
||||
dosEventLog.setStart_time(value.getSketch_start_time());
|
||||
dosEventLog.setEnd_time(value.getSketch_start_time()+CommonConfig.FLINK_WINDOW_MAX_TIME);
|
||||
dosEventLog.setAttack_type(value.getAttack_type());
|
||||
dosEventLog.setSeverity(severity.toString());
|
||||
dosEventLog.setConditions(getConditions(percent));
|
||||
dosEventLog.setDestination_ip(value.getDestination_ip());
|
||||
dosEventLog.setDestination_country(IpUtils.ipLookup.countryLookup(value.getDestination_ip()));
|
||||
String ipList = value.getSource_ip();
|
||||
dosEventLog.setSource_ip_list(ipList);
|
||||
dosEventLog.setSource_country_list(getSourceCountryList(ipList));
|
||||
dosEventLog.setSession_rate(value.getSketch_sessions());
|
||||
dosEventLog.setPacket_rate(value.getSketch_packets());
|
||||
dosEventLog.setBit_rate(value.getSketch_bytes());
|
||||
return dosEventLog;
|
||||
}
|
||||
|
||||
private String getConditions(String percent){
|
||||
return "sessions > "+percent+" of baseline";
|
||||
}
|
||||
|
||||
private String getSourceCountryList(String sourceIpList){
|
||||
String[] ipArr = sourceIpList.split(",");
|
||||
HashSet<String> countrySet = new HashSet<>();
|
||||
for (String ip:ipArr){
|
||||
countrySet.add(IpUtils.ipLookup.countryLookup(ip));
|
||||
}
|
||||
return StringUtils.join(countrySet,",");
|
||||
}
|
||||
|
||||
private int getCurrentTimeIndex(long sketchStartTime){
|
||||
long currentDayTime = sketchStartTime / (60 * 60 * 24) * 60 * 60 * 24;
|
||||
long indexLong = (sketchStartTime - currentDayTime) / 600;
|
||||
return Integer.parseInt(Long.toString(indexLong));
|
||||
}
|
||||
|
||||
private String getDiffPercent(long diff,long sketchSessions){
|
||||
double diffDou = Double.parseDouble(Long.toString(diff));
|
||||
double sessDou = Double.parseDouble(Long.toString(sketchSessions));
|
||||
return PERCENT_INSTANCE.format(diffDou / sessDou);
|
||||
}
|
||||
|
||||
private double getDiffPercentDouble(String diffPercent) throws ParseException {
|
||||
return PERCENT_INSTANCE.parse(diffPercent).doubleValue();
|
||||
}
|
||||
|
||||
private Severity judgeSeverity(double diffPercent){
|
||||
if (diffPercent >= CommonConfig.BASELINE_SESSIONS_MINOR_THRESHOLD && diffPercent < CommonConfig.BASELINE_SESSIONS_WARNING_THRESHOLD){
|
||||
return Severity.MINOR;
|
||||
}else if (diffPercent >= CommonConfig.BASELINE_SESSIONS_WARNING_THRESHOLD && diffPercent < CommonConfig.BASELINE_SESSIONS_MAJOR_THRESHOLD){
|
||||
return Severity.WARNING;
|
||||
}else if (diffPercent >= CommonConfig.BASELINE_SESSIONS_MAJOR_THRESHOLD && diffPercent < CommonConfig.BASELINE_SESSIONS_SEVERE_THRESHOLD){
|
||||
return Severity.MAJOR;
|
||||
}else if (diffPercent >= CommonConfig.BASELINE_SESSIONS_SEVERE_THRESHOLD && diffPercent < CommonConfig.BASELINE_SESSIONS_CRITICAL_THRESHOLD){
|
||||
return Severity.SEVERE;
|
||||
}else if (diffPercent >= CommonConfig.BASELINE_SESSIONS_CRITICAL_THRESHOLD){
|
||||
return Severity.CRITICAL;
|
||||
}else {
|
||||
return Severity.NORMAL;
|
||||
}
|
||||
}
|
||||
|
||||
private enum Severity {
|
||||
/**
|
||||
* 判断严重程度枚举类型
|
||||
*/
|
||||
CRITICAL("Critical"),
|
||||
SEVERE("Severe"),
|
||||
MAJOR("Major"),
|
||||
WARNING("Warning"),
|
||||
MINOR("Minor"),
|
||||
NORMAL("Normal");
|
||||
|
||||
private final String severity;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.severity;
|
||||
}
|
||||
|
||||
Severity(String severity) {
|
||||
this.severity = severity;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,92 +0,0 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.flink.api.java.tuple.Tuple4;
|
||||
import org.apache.flink.api.java.tuple.Tuple6;
|
||||
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
|
||||
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
|
||||
import org.apache.flink.util.Collector;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.HashSet;
|
||||
|
||||
import static com.zdjizhi.sink.OutputStreamSink.outputTag;
|
||||
|
||||
/**
|
||||
* @author 94976
|
||||
*/
|
||||
public class EtlProcessFunction extends ProcessWindowFunction<DosSketchLog, DosSketchLog, Tuple4<String,String,String,String>, TimeWindow> {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(EtlProcessFunction.class);
|
||||
@Override
|
||||
public void process(Tuple4<String,String, String, String> keys,
|
||||
Context context, Iterable<DosSketchLog> elements,
|
||||
Collector<DosSketchLog> out) {
|
||||
DosSketchLog middleResult = getMiddleResult(keys, elements);
|
||||
try {
|
||||
if (middleResult != null){
|
||||
out.collect(middleResult);
|
||||
logger.info("获取中间聚合结果:{}",middleResult.toString());
|
||||
context.output(outputTag,TrafficServerIpMetrics.getOutputMetric(middleResult));
|
||||
}
|
||||
}catch (Exception e){
|
||||
logger.error("获取中间聚合结果失败,middleResult: {}\n{}",middleResult.toString(),e);
|
||||
}
|
||||
}
|
||||
|
||||
private DosSketchLog getMiddleResult(Tuple4<String,String, String, String> keys,Iterable<DosSketchLog> elements){
|
||||
|
||||
DosSketchLog midResuleLog = new DosSketchLog();
|
||||
Tuple6<Long, Long, Long,String,Long,Long> values = sketchAggregate(elements);
|
||||
try {
|
||||
if (values != null){
|
||||
midResuleLog.setCommon_sled_ip(keys.f0);
|
||||
midResuleLog.setCommon_data_center(keys.f1);
|
||||
midResuleLog.setDestination_ip(keys.f3);
|
||||
midResuleLog.setAttack_type(keys.f2);
|
||||
midResuleLog.setSketch_start_time(values.f4);
|
||||
midResuleLog.setSketch_duration(values.f5);
|
||||
midResuleLog.setSource_ip(values.f3);
|
||||
midResuleLog.setSketch_sessions(values.f0);
|
||||
midResuleLog.setSketch_packets(values.f1);
|
||||
midResuleLog.setSketch_bytes(values.f2);
|
||||
return midResuleLog;
|
||||
}
|
||||
} catch (Exception e){
|
||||
logger.error("加载中间结果集失败,keys: {} values: {}\n{}",keys,values,e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private Tuple6<Long, Long, Long,String,Long,Long> sketchAggregate(Iterable<DosSketchLog> elements){
|
||||
int cnt = 1;
|
||||
long sessions = 0;
|
||||
long packets = 0 ;
|
||||
long bytes = 0;
|
||||
long startTime = 0;
|
||||
long duration = 0;
|
||||
HashSet<String> sourceIpSet = new HashSet<>();
|
||||
try {
|
||||
for (DosSketchLog newSketchLog : elements){
|
||||
sessions += newSketchLog.getSketch_sessions();
|
||||
packets += newSketchLog.getSketch_packets();
|
||||
bytes += newSketchLog.getSketch_bytes();
|
||||
startTime = newSketchLog.getSketch_start_time();
|
||||
duration = newSketchLog.getSketch_duration();
|
||||
cnt += 1;
|
||||
if (sourceIpSet.size() < CommonConfig.SOURCE_IP_LIST_LIMIT){
|
||||
sourceIpSet.add(newSketchLog.getSource_ip());
|
||||
}
|
||||
}
|
||||
String sourceIpList = StringUtils.join(sourceIpSet, ",");
|
||||
return Tuple6.of(sessions/cnt,packets/cnt,bytes/cnt,sourceIpList,startTime,duration);
|
||||
}catch (Exception e){
|
||||
logger.error("聚合中间结果集失败 {}",e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,83 +0,0 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import com.zdjizhi.source.DosSketchSource;
|
||||
import com.zdjizhi.utils.FlinkEnvironmentUtils;
|
||||
import com.zdjizhi.utils.JsonMapper;
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
|
||||
import org.apache.flink.api.common.functions.FlatMapFunction;
|
||||
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
||||
import org.apache.flink.util.Collector;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
|
||||
public class ParseSketchLog {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(ParseSketchLog.class);
|
||||
|
||||
public static SingleOutputStreamOperator<DosSketchLog> getSketchSource(){
|
||||
return flatSketchSource().assignTimestampsAndWatermarks(createWatermarkStrategy());
|
||||
}
|
||||
|
||||
private static SingleOutputStreamOperator<DosSketchLog> flatSketchSource(){
|
||||
return DosSketchSource.createDosSketchSource().flatMap(new FlatSketchLog());
|
||||
}
|
||||
|
||||
private static WatermarkStrategy<DosSketchLog> createWatermarkStrategy(){
|
||||
return WatermarkStrategy
|
||||
.<DosSketchLog>forBoundedOutOfOrderness(Duration.ofSeconds(CommonConfig.FLINK_WATERMARK_MAX_ORDERNESS))
|
||||
.withTimestampAssigner((event, timestamp) -> event.getSketch_start_time() * 1000);
|
||||
}
|
||||
|
||||
private static class FlatSketchLog implements FlatMapFunction<String, DosSketchLog> {
|
||||
@Override
|
||||
public void flatMap(String s, Collector<DosSketchLog> collector) throws Exception {
|
||||
try {
|
||||
if (StringUtil.isNotBlank(s)){
|
||||
HashMap<String, Object> sketchSource = (HashMap<String, Object>) JsonMapper.fromJsonString(s, Object.class);
|
||||
String commonSledIp = sketchSource.get("common_sled_ip").toString();
|
||||
String commonDataCenter = sketchSource.get("common_data_center").toString();
|
||||
long sketchStartTime = Long.parseLong(sketchSource.get("sketch_start_time").toString());
|
||||
long sketchDuration = Long.parseLong(sketchSource.get("sketch_duration").toString());
|
||||
String attackType = sketchSource.get("attack_type").toString();
|
||||
ArrayList<HashMap<String, Object>> reportIpList = (ArrayList<HashMap<String, Object>>) sketchSource.get("report_ip_list");
|
||||
for (HashMap<String, Object> obj : reportIpList) {
|
||||
DosSketchLog dosSketchLog = new DosSketchLog();
|
||||
dosSketchLog.setCommon_sled_ip(commonSledIp);
|
||||
dosSketchLog.setCommon_data_center(commonDataCenter);
|
||||
dosSketchLog.setSketch_start_time(sketchStartTime);
|
||||
dosSketchLog.setSketch_duration(sketchDuration);
|
||||
dosSketchLog.setAttack_type(attackType);
|
||||
String sourceIp = obj.get("source_ip").toString();
|
||||
String destinationIp = obj.get("destination_ip").toString();
|
||||
long sketchSessions = Long.parseLong(obj.get("sketch_sessions").toString());
|
||||
long sketchPackets = Long.parseLong(obj.get("sketch_packets").toString());
|
||||
long sketchBytes = Long.parseLong(obj.get("sketch_bytes").toString());
|
||||
dosSketchLog.setSource_ip(sourceIp);
|
||||
dosSketchLog.setDestination_ip(destinationIp);
|
||||
dosSketchLog.setSketch_sessions(sketchSessions/sketchDuration);
|
||||
dosSketchLog.setSketch_packets(sketchPackets/sketchDuration);
|
||||
dosSketchLog.setSketch_bytes(sketchBytes*8/sketchDuration);
|
||||
collector.collect(dosSketchLog);
|
||||
logger.info("数据解析成功:{}",dosSketchLog.toString());
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("数据解析错误:{} \n{}",s,e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
flatSketchSource().print();
|
||||
FlinkEnvironmentUtils.streamExeEnv.execute();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.zdjizhi.common.DosMetricsLog;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
||||
class TrafficServerIpMetrics {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TrafficServerIpMetrics.class);
|
||||
|
||||
static DosMetricsLog getOutputMetric(DosSketchLog midResuleLog) {
|
||||
DosMetricsLog dosMetricsLog = new DosMetricsLog();
|
||||
dosMetricsLog.setSketch_start_time(timeFloor(System.currentTimeMillis()/1000));
|
||||
dosMetricsLog.setCommon_sled_ip(midResuleLog.getCommon_sled_ip());
|
||||
dosMetricsLog.setCommon_data_center(midResuleLog.getCommon_data_center());
|
||||
dosMetricsLog.setDestination_ip(midResuleLog.getDestination_ip());
|
||||
dosMetricsLog.setAttack_type(midResuleLog.getAttack_type());
|
||||
dosMetricsLog.setSession_rate(midResuleLog.getSketch_sessions());
|
||||
dosMetricsLog.setPacket_rate(midResuleLog.getSketch_packets());
|
||||
dosMetricsLog.setBit_rate(midResuleLog.getSketch_bytes());
|
||||
logger.info("metric 结果已加载:{}",dosMetricsLog.toString());
|
||||
return dosMetricsLog;
|
||||
}
|
||||
|
||||
private static long timeFloor(long sketchStartTime){
|
||||
return sketchStartTime / CommonConfig.FLINK_WINDOW_MAX_TIME * CommonConfig.FLINK_WINDOW_MAX_TIME;
|
||||
}
|
||||
|
||||
}
|
||||
295
src/main/java/com/zdjizhi/function/DosDetectionFunction.java
Normal file
295
src/main/java/com/zdjizhi/function/DosDetectionFunction.java
Normal file
@@ -0,0 +1,295 @@
|
||||
package com.zdjizhi.function;
|
||||
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.geedgenetworks.utils.DateUtils;
|
||||
import com.zdjizhi.common.*;
|
||||
import com.zdjizhi.utils.Snowflakeld.SnowflakeId;
|
||||
import com.zdjizhi.utils.Threshold.ParseBaselineThreshold;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.flink.configuration.Configuration;
|
||||
import org.apache.flink.streaming.api.functions.ProcessFunction;
|
||||
import org.apache.flink.util.Collector;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.text.NumberFormat;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static com.zdjizhi.conf.DosConfigs.*;
|
||||
|
||||
/**
|
||||
* @author wlh
|
||||
*/
|
||||
public class DosDetectionFunction extends ProcessFunction<DosSketchLog, DosEventLog> {
|
||||
private static final Log logger = LogFactory.get();
|
||||
private Map<String, Map<String, DosBaselineThreshold>> baselineMap = new HashMap<>();
|
||||
private final NumberFormat PERCENT_INSTANCE = NumberFormat.getPercentInstance();
|
||||
// private HashMap<Integer, HashMap<String, TreeRangeMap<IPAddress, DosDetectionThreshold>>> thresholdRangeMap;
|
||||
private final int BASELINE_SIZE = 144;
|
||||
private final int STATIC_CONDITION_TYPE = 1;
|
||||
private final int BASELINE_CONDITION_TYPE = 2;
|
||||
private final int SENSITIVITY_CONDITION_TYPE = 3;
|
||||
private final String SESSIONS_TAG = "sessions";
|
||||
private final String PACKETS_TAG = "packets";
|
||||
private final String BITS_TAG = "bits";
|
||||
private final int OTHER_BASELINE_TYPE = 3;
|
||||
private SnowflakeId snowflakeId;
|
||||
private Configuration configuration;
|
||||
//private IpLookupUtils ipLookupUtils;
|
||||
private ParseBaselineThreshold parseBaselineThresholdld;
|
||||
|
||||
@Override
|
||||
public void open(Configuration parameters) {
|
||||
|
||||
configuration = (Configuration) getRuntimeContext()
|
||||
.getExecutionConfig().getGlobalJobParameters();
|
||||
|
||||
snowflakeId = new SnowflakeId(configuration.get(DATA_CENTER_ID_NUM), getRuntimeContext().getIndexOfThisSubtask());
|
||||
|
||||
try {
|
||||
parseBaselineThresholdld = new ParseBaselineThreshold(configuration);
|
||||
baselineMap = parseBaselineThresholdld.readFromHbase();
|
||||
Timer timer = new Timer();
|
||||
timer.schedule(new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
baselineMap = parseBaselineThresholdld.readFromHbase();
|
||||
logger.info("从Hbase获取baselineMap成功,baselineMap:" + baselineMap.toString());
|
||||
}
|
||||
}, configuration.get(BASELINE_THRESHOLD_SCHEDULE_DAYS) * 24 * 60 * 60 * 1000, configuration.get(BASELINE_THRESHOLD_SCHEDULE_DAYS) * 24 * 60 * 60 * 1000);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("从Hbase获取baselineMap失败,失败原因为:" + e);
|
||||
}
|
||||
|
||||
PERCENT_INSTANCE.setMinimumFractionDigits(2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void processElement(DosSketchLog value, Context ctx, Collector<DosEventLog> out) throws Exception {
|
||||
DosEventLog finalResult = null;
|
||||
try {
|
||||
if (value.getRule_id() == 0) {
|
||||
String destinationIp = value.getServer_ip();
|
||||
int vsysId = value.getVsys_id();
|
||||
String key = destinationIp + "-" + vsysId;
|
||||
String attackType = value.getAttack_type();
|
||||
DosDetectionThreshold threshold = null;
|
||||
logger.debug("当前判断IP:{}, 类型: {}", key, attackType);
|
||||
if (threshold == null && baselineMap.containsKey(key)) {
|
||||
finalResult = getDosEventLogByBaseline(value, key);
|
||||
} else if (threshold == null && !baselineMap.containsKey(key)) {
|
||||
finalResult = getDosEventLogBySensitivityThreshold(value);
|
||||
}
|
||||
else {
|
||||
logger.debug("未获取到当前server IP:{} 类型 {} 静态阈值 和 baseline", key, attackType);
|
||||
}
|
||||
}
|
||||
else{
|
||||
finalResult = getResult(value,0,0,Severity.MAJOR,0.0,0,"DoS Protection ["+value.getRule_id()+"]");
|
||||
}
|
||||
} catch(Exception e){
|
||||
logger.error("判定失败\n {} \n{}", value, e);
|
||||
}
|
||||
|
||||
if (finalResult != null) {
|
||||
out.collect(finalResult);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private DosEventLog getDosEventLogBySensitivityThreshold(DosSketchLog value) {
|
||||
long sketchSessionsRate = value.getSession_rate();
|
||||
Integer staticSensitivityThreshold = configuration.get(STATIC_SENSITIVITY_THRESHOLD);
|
||||
long diff = sketchSessionsRate - staticSensitivityThreshold;
|
||||
return getDosEventLog(value, staticSensitivityThreshold, diff, 0, SENSITIVITY_CONDITION_TYPE, SESSIONS_TAG);
|
||||
}
|
||||
|
||||
private DosEventLog getDosEventLogByBaseline(DosSketchLog value, String key) {
|
||||
String attackType = value.getAttack_type();
|
||||
long sketchSessionsRate = value.getSession_rate();
|
||||
DosBaselineThreshold dosBaselineThreshold = baselineMap.get(key).get(attackType);
|
||||
Integer baseSessionRate = getBaseValue(dosBaselineThreshold, value);
|
||||
long diff = sketchSessionsRate - baseSessionRate;
|
||||
return getDosEventLog(value, baseSessionRate, diff, 0, BASELINE_CONDITION_TYPE, SESSIONS_TAG);
|
||||
}
|
||||
|
||||
private DosEventLog getDosEventLog(DosSketchLog value, long base, long diff, long profileId, int type, String tag) {
|
||||
DosEventLog result = null;
|
||||
String destinationIp = value.getServer_ip();
|
||||
String attackType = value.getAttack_type();
|
||||
if (diff > 0 && base != 0) {
|
||||
double percent = getDiffPercent(diff, base);
|
||||
Severity severity = judgeSeverity(percent);
|
||||
Integer staticSensitivityThreshold = configuration.get(STATIC_SENSITIVITY_THRESHOLD);
|
||||
if (severity != Severity.NORMAL) {
|
||||
if (type == BASELINE_CONDITION_TYPE && percent < configuration.get(BASELINE_SENSITIVITY_THRESHOLD)) {
|
||||
logger.debug("当前server IP:{},类型:{},基线值{}百分比{}未超过基线敏感阈值,日志详情\n{}", destinationIp, attackType, base, percent, value);
|
||||
} else if ((type == BASELINE_CONDITION_TYPE || type == SENSITIVITY_CONDITION_TYPE) && value.getSession_rate() < staticSensitivityThreshold) {
|
||||
logger.debug("当前server IP:{},类型:{},基线值{}百分比{}未超过静态敏感阈值,日志详情\n{}", destinationIp, attackType, base, percent, value);
|
||||
} else {
|
||||
result = getResult(value, base, profileId, severity, percent, type, tag);
|
||||
if (type == SENSITIVITY_CONDITION_TYPE) {
|
||||
result.setSeverity(Severity.MAJOR.severity);
|
||||
}
|
||||
logger.info("检测到当前server IP {} 存在 {} 异常,超出基线{} {}倍,基于{}:{}检测,日志详情\n {}", destinationIp, attackType, base, percent, type, tag, result);
|
||||
}
|
||||
} else {
|
||||
logger.debug("当前server IP:{} 未出现 {} 异常,日志详情 {}", destinationIp, attackType, value);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private DosEventLog getResult(DosSketchLog value, long base, long profileId, Severity severity, double percent, int type, String tag) {
|
||||
DosEventLog dosEventLog = new DosEventLog();
|
||||
dosEventLog.setRecv_time(value.getRecv_time());
|
||||
dosEventLog.setLog_id(snowflakeId.nextId());
|
||||
dosEventLog.setVsys_id(value.getVsys_id());
|
||||
dosEventLog.setStart_time(value.getStart_timestamp_ms()/1000);
|
||||
dosEventLog.setEnd_time(value.getEnd_timestamp_ms()/1000);
|
||||
dosEventLog.setProfile_id(profileId);
|
||||
dosEventLog.setRule_id(value.getRule_id());
|
||||
dosEventLog.setAttack_type(value.getAttack_type());
|
||||
if(base != 0) {
|
||||
dosEventLog.setSeverity(severity.severity);
|
||||
dosEventLog.setConditions(getConditions(PERCENT_INSTANCE.format(percent), base, value.getSession_rate(), type, tag, dosEventLog));
|
||||
}
|
||||
else{
|
||||
dosEventLog.setSeverity(severity.severity);
|
||||
dosEventLog.setConditions(tag);
|
||||
}
|
||||
dosEventLog.setDestination_ip(value.getServer_ip());
|
||||
dosEventLog.setDestination_country(value.getServer_country());
|
||||
dosEventLog.setSource_ip_list(value.getClient_ips().stream().filter(ip -> !ip.isEmpty()).collect(Collectors.joining(",")));
|
||||
dosEventLog.setSource_country_list(value.getClient_countrys().stream().filter(ip -> !ip.isEmpty()).collect(Collectors.joining(",")));
|
||||
dosEventLog.setSession_rate(value.getSession_rate());
|
||||
dosEventLog.setPacket_rate(value.getPacket_rate());
|
||||
dosEventLog.setBit_rate(value.getBit_rate());
|
||||
dosEventLog.setBytes(value.getBytes());
|
||||
dosEventLog.setSessions(value.getSessions());
|
||||
dosEventLog.setPackets(value.getPkts());
|
||||
return dosEventLog;
|
||||
}
|
||||
|
||||
private Integer getBaseValue(DosBaselineThreshold dosBaselineThreshold, DosSketchLog value) {
|
||||
Integer base = 0;
|
||||
try {
|
||||
if (dosBaselineThreshold != null) {
|
||||
ArrayList<Integer> baselines = dosBaselineThreshold.getSession_rate();
|
||||
Integer defaultVaule = dosBaselineThreshold.getSession_rate_default_value();
|
||||
Integer sessionRateBaselineType = dosBaselineThreshold.getSession_rate_baseline_type();
|
||||
if (baselines != null && baselines.size() == BASELINE_SIZE) {
|
||||
int timeIndex = getCurrentTimeIndex(value.getStart_timestamp_ms());
|
||||
base = baselines.get(timeIndex);
|
||||
if (base == 0) {
|
||||
logger.debug("获取到当前IP: {},类型: {} baseline值为0,替换为P95观测值{}", value.getServer_ip(), value.getAttack_type(), defaultVaule);
|
||||
base = defaultVaule;
|
||||
}
|
||||
if (sessionRateBaselineType == OTHER_BASELINE_TYPE && base < configuration.get(STATIC_SENSITIVITY_THRESHOLD)) {
|
||||
base = configuration.get(STATIC_SENSITIVITY_THRESHOLD);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("解析baseline数据失败,返回默认值0", e);
|
||||
}
|
||||
return base;
|
||||
}
|
||||
|
||||
private String getConditions(String percent, long base, long sessions, int type, String tag, DosEventLog dosEventLog) {
|
||||
int condition = 0;
|
||||
if ("Minor".equals(dosEventLog.getSeverity())) {
|
||||
condition = 50;
|
||||
} else if ("Warning".equals(dosEventLog.getSeverity())) {
|
||||
condition = 100;
|
||||
} else if ("Major".equals(dosEventLog.getSeverity())) {
|
||||
condition = 250;
|
||||
} else if ("Severe".equals(dosEventLog.getSeverity())) {
|
||||
condition = 500;
|
||||
} else if ("Critical".equals(dosEventLog.getSeverity())) {
|
||||
condition = 800;
|
||||
}
|
||||
switch (type) {
|
||||
case STATIC_CONDITION_TYPE:
|
||||
return "Rate > " +
|
||||
base + " " +
|
||||
tag + "/s" + "(>" + condition + "%)";
|
||||
case BASELINE_CONDITION_TYPE:
|
||||
return tag + " > " +
|
||||
percent + " of baseline";
|
||||
case SENSITIVITY_CONDITION_TYPE:
|
||||
return String.valueOf(sessions) + " " +
|
||||
tag + "/s Unusually high " +
|
||||
StringUtils.capitalize(tag);
|
||||
default:
|
||||
throw new IllegalArgumentException("Illegal Argument type:" + type + ", known types = [1,2,3]");
|
||||
}
|
||||
}
|
||||
|
||||
private int getCurrentTimeIndex(long sketchStartTime) {
|
||||
int index = 0;
|
||||
try {
|
||||
long currentDayTime = DateUtils.getTimeFloor(new Date(sketchStartTime * 1000L), "P1D").getTime() / 1000;
|
||||
long indexLong = (sketchStartTime - currentDayTime) / (86400 / BASELINE_SIZE);
|
||||
index = Integer.parseInt(Long.toString(indexLong));
|
||||
} catch (Exception e) {
|
||||
logger.error("获取time index失败", e);
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
||||
|
||||
private Double getDiffPercent(long diff, long base) {
|
||||
try {
|
||||
return BigDecimal.valueOf((float) diff / base).setScale(4, BigDecimal.ROUND_HALF_UP).doubleValue();
|
||||
} catch (Exception e) {
|
||||
logger.info("当前阈值为0,进行下一阈值条件判断", e);
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private Severity judgeSeverity(double diffPercent) {
|
||||
if (diffPercent >= configuration.get(BASELINE_SESSIONS_MINOR_THRESHOLD) && diffPercent < configuration.get(BASELINE_SESSIONS_WARNING_THRESHOLD)) {
|
||||
return Severity.MINOR;
|
||||
} else if (diffPercent >= configuration.get(BASELINE_SESSIONS_WARNING_THRESHOLD) && diffPercent < configuration.get(BASELINE_SESSIONS_MAJOR_THRESHOLD)) {
|
||||
return Severity.WARNING;
|
||||
} else if (diffPercent >= configuration.get(BASELINE_SESSIONS_MAJOR_THRESHOLD) && diffPercent < configuration.get(BASELINE_SESSIONS_SEVERE_THRESHOLD)) {
|
||||
return Severity.MAJOR;
|
||||
} else if (diffPercent >= configuration.get(BASELINE_SESSIONS_SEVERE_THRESHOLD) && diffPercent < configuration.get(BASELINE_SESSIONS_CRITICAL_THRESHOLD)) {
|
||||
return Severity.SEVERE;
|
||||
} else if (diffPercent >= configuration.get(BASELINE_SESSIONS_CRITICAL_THRESHOLD)) {
|
||||
return Severity.CRITICAL;
|
||||
} else {
|
||||
return Severity.NORMAL;
|
||||
}
|
||||
}
|
||||
|
||||
private enum Severity {
|
||||
/**
|
||||
* 判断严重程度枚举类型
|
||||
*/
|
||||
CRITICAL("Critical"),
|
||||
SEVERE("Severe"),
|
||||
MAJOR("Major"),
|
||||
WARNING("Warning"),
|
||||
MINOR("Minor"),
|
||||
NORMAL("Normal");
|
||||
|
||||
private final String severity;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.severity;
|
||||
}
|
||||
|
||||
Severity(String severity) {
|
||||
this.severity = severity;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
package com.zdjizhi.function;
|
||||
|
||||
import com.alibaba.fastjson2.JSON;
|
||||
import com.alibaba.fastjson2.JSONObject;
|
||||
import com.geedgenetworks.utils.StringUtil;
|
||||
import com.zdjizhi.common.DosMetricsLog;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import com.zdjizhi.common.pojo.DosSketchMetricsLog;
|
||||
import org.apache.flink.api.common.functions.FlatMapFunction;
|
||||
import org.apache.flink.api.common.functions.RichFlatMapFunction;
|
||||
import org.apache.flink.configuration.Configuration;
|
||||
import org.apache.flink.util.Collector;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static com.zdjizhi.conf.DosConfigs.DESTINATION_IP_PARTITION_NUM;
|
||||
import static com.zdjizhi.conf.DosConfigs.FLINK_WINDOW_MAX_TIME;
|
||||
|
||||
public class DosMetricsRichFunction extends RichFlatMapFunction<DosSketchLog, String> {
|
||||
private static final Logger logger = LoggerFactory.getLogger(DosMetricsRichFunction.class);
|
||||
|
||||
private Configuration configuration;
|
||||
|
||||
@Override
|
||||
public void open(Configuration parameters) throws Exception {
|
||||
super.open(parameters);
|
||||
configuration = (Configuration) getRuntimeContext()
|
||||
.getExecutionConfig().getGlobalJobParameters();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flatMap(DosSketchLog dosSketchLog, Collector<String> out) throws Exception {
|
||||
try {
|
||||
|
||||
if(dosSketchLog.getRule_id()==0) {
|
||||
DosMetricsLog dosMetricsLog = new DosMetricsLog();
|
||||
dosMetricsLog.setSketch_start_time(dosSketchLog.getStart_timestamp_ms() / 1000);
|
||||
dosMetricsLog.setDestination_ip(dosSketchLog.getServer_ip());
|
||||
dosMetricsLog.setAttack_type(dosSketchLog.getAttack_type());
|
||||
dosMetricsLog.setSession_rate(dosSketchLog.getSession_rate());
|
||||
dosMetricsLog.setPacket_rate(dosSketchLog.getPacket_rate());
|
||||
dosMetricsLog.setBit_rate(dosSketchLog.getBit_rate());
|
||||
dosMetricsLog.setVsys_id(dosSketchLog.getVsys_id());
|
||||
dosMetricsLog.setPartition_num(getPartitionNumByIp(dosSketchLog.getServer_ip()));
|
||||
String jsonString = JSON.toJSONString(dosMetricsLog);
|
||||
logger.debug("metric 结果已加载:{}", jsonString);
|
||||
out.collect(jsonString);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("数据解析错误:", e);
|
||||
}
|
||||
}
|
||||
private long timeFloor(long sketchStartTime) {
|
||||
return sketchStartTime / configuration.get(FLINK_WINDOW_MAX_TIME) * configuration.get(FLINK_WINDOW_MAX_TIME);
|
||||
}
|
||||
|
||||
private int getPartitionNumByIp(String destinationIp) {
|
||||
if(destinationIp!=null){
|
||||
return Math.abs(destinationIp.hashCode()) % configuration.get(DESTINATION_IP_PARTITION_NUM);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
}
|
||||
63
src/main/java/com/zdjizhi/function/FlatSketchFunction.java
Normal file
63
src/main/java/com/zdjizhi/function/FlatSketchFunction.java
Normal file
@@ -0,0 +1,63 @@
|
||||
package com.zdjizhi.function;
|
||||
|
||||
import com.alibaba.fastjson2.JSONObject;
|
||||
import com.geedgenetworks.utils.StringUtil;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import com.zdjizhi.common.pojo.DosSketchMetricsLog;
|
||||
import org.apache.flink.api.common.functions.FlatMapFunction;
|
||||
import org.apache.flink.util.Collector;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
|
||||
public class FlatSketchFunction implements FlatMapFunction<String, DosSketchLog> {
|
||||
private static Logger logger = LoggerFactory.getLogger(FlatSketchFunction.class);
|
||||
@Override
|
||||
public void flatMap(String value, Collector<DosSketchLog> out) {
|
||||
|
||||
try {
|
||||
if (StringUtil.isNotBlank(value)) {
|
||||
DosSketchLog dosSketchLog = JSONObject.parseObject(value, DosSketchLog.class);
|
||||
dosSketchLog.setRecv_time(System.currentTimeMillis()/1000);
|
||||
dosSketchLog.setStart_timestamp_ms(dosSketchLog.getTimestamp_ms());
|
||||
if(dosSketchLog.getDuration()<=0){
|
||||
dosSketchLog.setDuration(60000);
|
||||
}
|
||||
dosSketchLog.setEnd_timestamp_ms(dosSketchLog.getTimestamp_ms() + dosSketchLog.getDuration());
|
||||
HashSet<String> client_ips = new HashSet<>();
|
||||
HashSet<String> client_countrys = new HashSet<>();
|
||||
dosSketchLog.setClient_ips(client_ips);
|
||||
dosSketchLog.setClient_countrys(client_countrys);
|
||||
|
||||
if("top_client_and_server_ip".equals(dosSketchLog.getName())){
|
||||
dosSketchLog.setDecoded_as("");
|
||||
if(dosSketchLog.getClient_ip()!=null) {
|
||||
client_ips.add(dosSketchLog.getClient_ip());
|
||||
}
|
||||
if(dosSketchLog.getClient_country()!=null && !dosSketchLog.getClient_country().isEmpty()) {
|
||||
client_countrys.add(dosSketchLog.getClient_country());
|
||||
}
|
||||
}
|
||||
else if("top_client_ip_and_server_ip".equals(dosSketchLog.getName())){
|
||||
dosSketchLog.setPkts(0);
|
||||
dosSketchLog.setBytes(0);
|
||||
dosSketchLog.setSessions(0);
|
||||
if(dosSketchLog.getClient_ip()!=null) {
|
||||
client_ips.add(dosSketchLog.getClient_ip());
|
||||
}
|
||||
if(dosSketchLog.getClient_country()!=null && !dosSketchLog.getClient_country().isEmpty()) {
|
||||
client_countrys.add(dosSketchLog.getClient_country());
|
||||
}
|
||||
}
|
||||
out.collect(dosSketchLog);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("数据解析错误:{} \n{}", value, e);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
package com.zdjizhi.function;
|
||||
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import org.apache.flink.api.common.functions.ReduceFunction;
|
||||
|
||||
public class MetricsAggregationReduce implements ReduceFunction<DosSketchLog> {
|
||||
|
||||
@Override
|
||||
public DosSketchLog reduce(DosSketchLog value1, DosSketchLog value2) throws Exception {
|
||||
value1.setPkts(value1.getPkts() + value2.getPkts());
|
||||
value1.setBytes(value1.getBytes() + value2.getBytes());
|
||||
value1.setSessions(value1.getSessions() + value2.getSessions());
|
||||
if (value1.getRecv_time() > value2.getRecv_time()) {
|
||||
value1.setRecv_time(value2.getRecv_time());
|
||||
}
|
||||
if (value1.getStart_timestamp_ms() > value2.getStart_timestamp_ms()) {
|
||||
value1.setStart_timestamp_ms(value2.getStart_timestamp_ms());
|
||||
}
|
||||
if (value1.getEnd_timestamp_ms() < value2.getEnd_timestamp_ms()) {
|
||||
value1.setEnd_timestamp_ms(value2.getEnd_timestamp_ms());
|
||||
}
|
||||
value1.getClient_ips().addAll(value2.getClient_ips());
|
||||
value1.getClient_countrys().addAll(value2.getClient_countrys());
|
||||
return value1;
|
||||
}
|
||||
}
|
||||
55
src/main/java/com/zdjizhi/function/MetricsCalculate.java
Normal file
55
src/main/java/com/zdjizhi/function/MetricsCalculate.java
Normal file
@@ -0,0 +1,55 @@
|
||||
package com.zdjizhi.function;
|
||||
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import org.apache.flink.api.java.tuple.Tuple3;
|
||||
import org.apache.flink.api.java.tuple.Tuple4;
|
||||
import org.apache.flink.configuration.Configuration;
|
||||
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
|
||||
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
|
||||
import org.apache.flink.util.Collector;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class MetricsCalculate extends ProcessWindowFunction<
|
||||
DosSketchLog, // 输入类型
|
||||
DosSketchLog, // 输出类型
|
||||
Tuple4<String, String, Integer, Integer>, // 键类型
|
||||
TimeWindow> { // 窗口类型
|
||||
private final Map<String, String> attackTypeMapping = new HashMap<>();
|
||||
private static Logger logger = LoggerFactory.getLogger(MetricsCalculate.class);
|
||||
|
||||
@Override
|
||||
public void open(Configuration parameters) throws Exception {
|
||||
super.open(parameters);
|
||||
attackTypeMapping.put("TCP SYN","TCP SYN Flood");
|
||||
attackTypeMapping.put("DNS","UDP Flood");
|
||||
attackTypeMapping.put("ICMP","ICMP Flood");
|
||||
attackTypeMapping.put("UDP","DNS Flood");
|
||||
attackTypeMapping.put("NTP","NTP Flood");
|
||||
attackTypeMapping.put("","Custom Network Attack");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Tuple4<String, String, Integer, Integer> key, ProcessWindowFunction<DosSketchLog, DosSketchLog, Tuple4<String, String, Integer,Integer>, TimeWindow>.Context context, Iterable<DosSketchLog> elements, Collector<DosSketchLog> out) throws Exception {
|
||||
|
||||
for (DosSketchLog dosSketchLog: elements){
|
||||
try {
|
||||
long duration = dosSketchLog.getEnd_timestamp_ms()-dosSketchLog.getStart_timestamp_ms();
|
||||
if(duration<=0){
|
||||
duration = dosSketchLog.getDuration();
|
||||
dosSketchLog.setEnd_timestamp_ms(dosSketchLog.getStart_timestamp_ms()+duration);
|
||||
}
|
||||
dosSketchLog.setSession_rate(dosSketchLog.getSessions()/ (duration/1000) );
|
||||
dosSketchLog.setPacket_rate(dosSketchLog.getPkts()/(duration/1000));
|
||||
dosSketchLog.setBit_rate(dosSketchLog.getBytes()*8/(duration/1000));
|
||||
dosSketchLog.setAttack_type(attackTypeMapping.get(dosSketchLog.getDecoded_as()));
|
||||
}catch (RuntimeException e){
|
||||
logger.error(e.toString());
|
||||
}
|
||||
out.collect(dosSketchLog);
|
||||
}
|
||||
}
|
||||
}
|
||||
16
src/main/java/com/zdjizhi/function/SketchKeysSelector.java
Normal file
16
src/main/java/com/zdjizhi/function/SketchKeysSelector.java
Normal file
@@ -0,0 +1,16 @@
|
||||
package com.zdjizhi.function;
|
||||
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import org.apache.flink.api.java.functions.KeySelector;
|
||||
import org.apache.flink.api.java.tuple.Tuple4;
|
||||
|
||||
public class SketchKeysSelector implements KeySelector<DosSketchLog, Tuple4<String, String, Integer,Integer>> {
|
||||
@Override
|
||||
public Tuple4<String, String, Integer,Integer> getKey(DosSketchLog dosSketchLog){
|
||||
return Tuple4.of(
|
||||
dosSketchLog.getDecoded_as(),
|
||||
dosSketchLog.getServer_ip(),
|
||||
dosSketchLog.getVsys_id(),
|
||||
dosSketchLog.getRule_id());
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,84 @@
|
||||
package com.zdjizhi.main;
|
||||
|
||||
import com.zdjizhi.sink.OutputStreamSink;
|
||||
import com.alibaba.fastjson2.JSONObject;
|
||||
import com.zdjizhi.common.DosEventLog;
|
||||
import com.zdjizhi.common.DosMetricsLog;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import com.zdjizhi.conf.DosConfiguration;
|
||||
import com.zdjizhi.function.*;
|
||||
import com.zdjizhi.utils.connections.kafka.KafkaConsumer;
|
||||
import com.zdjizhi.utils.connections.kafka.KafkaProducer;
|
||||
import org.apache.flink.api.java.utils.ParameterTool;
|
||||
import org.apache.flink.configuration.Configuration;
|
||||
import org.apache.flink.configuration.RestOptions;
|
||||
import org.apache.flink.streaming.api.datastream.DataStreamSource;
|
||||
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
||||
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
||||
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
|
||||
import org.apache.flink.streaming.api.windowing.time.Time;
|
||||
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.Objects;
|
||||
|
||||
import static com.zdjizhi.conf.DosConfigs.*;
|
||||
|
||||
/**
|
||||
* @author wangchengcheng
|
||||
* 程序主类入口
|
||||
*/
|
||||
public class DosDetectionApplication {
|
||||
|
||||
public static void main(String[] args) {
|
||||
OutputStreamSink.finalOutputSink();
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
|
||||
// param check
|
||||
if (args.length < 1) {
|
||||
throw new IllegalArgumentException("Error: Not found properties path. " +
|
||||
"\nUsage: flink -c xxx xxx.jar app.properties.");
|
||||
}
|
||||
final ParameterTool tool = ParameterTool.fromPropertiesFile(args[0]);
|
||||
final Configuration config = tool.getConfiguration();
|
||||
env.getConfig().setGlobalJobParameters(config);
|
||||
|
||||
final DosConfiguration DosConfiguration = new DosConfiguration(config);
|
||||
|
||||
//Source settings
|
||||
final DataStreamSource<String> dosStreamSource = env.addSource(KafkaConsumer.getKafkaConsumer(config.get(SOURCE_KAFKA_TOPIC), DosConfiguration
|
||||
.getProperties(SOURCE_KAFKA_PROPERTIES_PREFIX))).setParallelism(config.get(SOURCE_PARALLELISM));
|
||||
|
||||
//Watermark settings
|
||||
WatermarkStrategy<DosSketchLog> dosSketchLogWatermarkStrategy = WatermarkStrategy.
|
||||
<DosSketchLog>forBoundedOutOfOrderness(Duration.ofSeconds(config.get(FLINK_WATERMARK_MAX_ORDERNESS)))
|
||||
.withTimestampAssigner((event, timestamp) -> event.getTimestamp_ms() );
|
||||
|
||||
//Data preprocessing
|
||||
SingleOutputStreamOperator<DosSketchLog> sketchSource = dosStreamSource.flatMap(new FlatSketchFunction()).setParallelism(1)
|
||||
.assignTimestampsAndWatermarks(dosSketchLogWatermarkStrategy);
|
||||
|
||||
|
||||
SingleOutputStreamOperator<DosSketchLog> serverIpMetrics=sketchSource.keyBy(new SketchKeysSelector())
|
||||
.window(TumblingEventTimeWindows.of(Time.seconds(config.get(FLINK_WINDOW_MAX_TIME)))).reduce(new MetricsAggregationReduce(), new MetricsCalculate())
|
||||
.setParallelism(config.get(Flink_FIRST_AGG_PATALLELISM));
|
||||
|
||||
//dos detection
|
||||
SingleOutputStreamOperator<DosEventLog> dosEventLogOutputStream = serverIpMetrics.process(new DosDetectionFunction())
|
||||
.setParallelism(config.get(FLINK_DETECTION_MAP_PARALLELISM));
|
||||
|
||||
SingleOutputStreamOperator<String> dosMetricsLogOutputStream = serverIpMetrics.flatMap(new DosMetricsRichFunction())
|
||||
.setParallelism(config.get(FLINK_DETECTION_MAP_PARALLELISM));
|
||||
|
||||
|
||||
dosMetricsLogOutputStream.addSink(KafkaProducer.getKafkaProducer(config.get(KAFKA_SINK_METRIC_TOPIC), DosConfiguration.getProperties(SINK_KAFKA_PROPERTIES_PREFIX)))
|
||||
.setParallelism(config.get(KAFKA_SINK_METRIC_PARALLELISM));
|
||||
|
||||
//dos event output
|
||||
dosEventLogOutputStream.filter(Objects::nonNull)
|
||||
.map(JSONObject::toJSONString)
|
||||
.addSink(KafkaProducer.getKafkaProducer(config.get(KAFKA_SINK_EVENT_TOPIC), DosConfiguration.getProperties(SINK_KAFKA_PROPERTIES_PREFIX)))
|
||||
.setParallelism(config.get(KAFKA_SINK_EVENT_PARALLELISM));
|
||||
|
||||
env.execute(config.get(JOB_NAME));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
package com.zdjizhi.sink;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.zdjizhi.common.DosEventLog;
|
||||
import com.zdjizhi.utils.JsonMapper;
|
||||
import com.zdjizhi.utils.KafkaUtils;
|
||||
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
||||
|
||||
class DosEventSink {
|
||||
|
||||
static void dosEventOutputSink(SingleOutputStreamOperator<DosEventLog> dosEventLogOutputStream){
|
||||
dosEventLogOutputStream.map(JsonMapper::toJsonString).addSink(KafkaUtils.getKafkaSink(CommonConfig.KAFKA_OUTPUT_EVENT_TOPIC_NAME))
|
||||
.setParallelism(CommonConfig.KAFKA_OUTPUT_EVENT_PARALLELISM);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,138 +0,0 @@
|
||||
package com.zdjizhi.sink;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.zdjizhi.common.DosEventLog;
|
||||
import com.zdjizhi.common.DosMetricsLog;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import com.zdjizhi.etl.EtlProcessFunction;
|
||||
import com.zdjizhi.etl.DosDetection;
|
||||
import com.zdjizhi.etl.ParseSketchLog;
|
||||
import com.zdjizhi.source.BaselineSource;
|
||||
import com.zdjizhi.utils.FlinkEnvironmentUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.flink.api.common.functions.ReduceFunction;
|
||||
import org.apache.flink.api.common.state.MapStateDescriptor;
|
||||
import org.apache.flink.api.common.typeinfo.Types;
|
||||
import org.apache.flink.api.java.functions.KeySelector;
|
||||
import org.apache.flink.api.java.tuple.Tuple2;
|
||||
import org.apache.flink.api.java.tuple.Tuple4;
|
||||
import org.apache.flink.api.java.typeutils.MapTypeInfo;
|
||||
import org.apache.flink.streaming.api.datastream.*;
|
||||
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
|
||||
import org.apache.flink.streaming.api.windowing.time.Time;
|
||||
import org.apache.flink.util.OutputTag;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* @author 94976
|
||||
*/
|
||||
public class OutputStreamSink {
|
||||
private static final Logger logger = LoggerFactory.getLogger(OutputStreamSink.class);
|
||||
|
||||
public static OutputTag<DosMetricsLog> outputTag = new OutputTag<DosMetricsLog>("traffic server ip metrics"){};
|
||||
|
||||
private static MapStateDescriptor<String, Map<String, Map<String, List<Integer>>>> descriptor = new MapStateDescriptor<>("boradcast-state",
|
||||
Types.STRING,
|
||||
new MapTypeInfo<>(String.class, new MapTypeInfo<>(String.class, (Class<List<Integer>>) (Class<?>) List.class).getTypeClass()));
|
||||
|
||||
public static void finalOutputSink(){
|
||||
try {
|
||||
SingleOutputStreamOperator<DosSketchLog> middleStream = getMiddleStream();
|
||||
SingleOutputStreamOperator<DosEventLog> dosEventLogOutputStream = getOutputSinkStream(middleStream);
|
||||
DosEventSink.dosEventOutputSink(dosEventLogOutputStream);
|
||||
TrafficServerIpMetricsSink.sideOutputMetricsSink(middleStream);
|
||||
FlinkEnvironmentUtils.streamExeEnv.execute(CommonConfig.STREAM_EXECUTION_JOB_NAME);
|
||||
} catch (Exception e) {
|
||||
logger.error("任务启动失败 {}",e);
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
SingleOutputStreamOperator<DosSketchLog> middleStream = getMiddleStream();
|
||||
SingleOutputStreamOperator<DosEventLog> dosEventLogOutputStream = getOutputSinkStream(middleStream);
|
||||
DosEventSink.dosEventOutputSink(dosEventLogOutputStream);
|
||||
TrafficServerIpMetricsSink.sideOutputMetricsSink(middleStream);
|
||||
dosEventLogOutputStream.print();
|
||||
FlinkEnvironmentUtils.streamExeEnv.execute();
|
||||
}
|
||||
|
||||
private static SingleOutputStreamOperator<DosEventLog> getOutputSinkStream(SingleOutputStreamOperator<DosSketchLog> middleStream){
|
||||
|
||||
BroadcastStream<Map<String, Map<String,List<Integer>>>> broadcast = FlinkEnvironmentUtils.streamExeEnv
|
||||
.addSource(new BaselineSource())
|
||||
.setParallelism(CommonConfig.HBASE_INPUT_PARALLELISM)
|
||||
.broadcast(descriptor);
|
||||
logger.info("广播变量加载成功!!");
|
||||
|
||||
return middleStream.keyBy(new SecondKeySelector())
|
||||
// .window(TumblingEventTimeWindows.of(Time.seconds(CommonConfig.FLINK_WINDOW_MAX_TIME)))
|
||||
.reduce(new SecondReduceFunc())
|
||||
.connect(broadcast)
|
||||
.process(new DosDetection())
|
||||
.setParallelism(CommonConfig.FLINK_SECOND_AGG_PARALLELISM);
|
||||
}
|
||||
|
||||
private static SingleOutputStreamOperator<DosSketchLog> getMiddleStream(){
|
||||
return ParseSketchLog.getSketchSource()
|
||||
.keyBy(new FirstKeySelector())
|
||||
.window(TumblingEventTimeWindows.of(Time.seconds(CommonConfig.FLINK_WINDOW_MAX_TIME)))
|
||||
.process(new EtlProcessFunction())
|
||||
.setParallelism(CommonConfig.FLINK_FIRST_AGG_PARALLELISM);
|
||||
}
|
||||
|
||||
private static String groupUniqSourceIp(String sourceIp1,String sourceIp2){
|
||||
HashSet<String> sourceIpSet = new HashSet<>();
|
||||
Collections.addAll(sourceIpSet, (sourceIp1 + "," + sourceIp2).split(","));
|
||||
if (sourceIpSet.size() > CommonConfig.SOURCE_IP_LIST_LIMIT){
|
||||
return StringUtils.join(takeUniqLimit(sourceIpSet,CommonConfig.SOURCE_IP_LIST_LIMIT),",");
|
||||
}
|
||||
return StringUtils.join(sourceIpSet,",");
|
||||
}
|
||||
|
||||
private static<T> Collection<T> takeUniqLimit(Collection<T> collection, int limit){
|
||||
int i =0;
|
||||
Collection<T> newSet = new HashSet<>();
|
||||
for (T t:collection){
|
||||
if (i < limit){
|
||||
newSet.add(t);
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
return newSet;
|
||||
}
|
||||
|
||||
private static class FirstKeySelector implements KeySelector<DosSketchLog, Tuple4<String, String, String, String>>{
|
||||
@Override
|
||||
public Tuple4<String, String, String, String> getKey(DosSketchLog dosSketchLog) throws Exception {
|
||||
return Tuple4.of(
|
||||
dosSketchLog.getCommon_sled_ip(),
|
||||
dosSketchLog.getCommon_data_center(),
|
||||
dosSketchLog.getAttack_type(),
|
||||
dosSketchLog.getDestination_ip());
|
||||
}
|
||||
}
|
||||
|
||||
private static class SecondKeySelector implements KeySelector<DosSketchLog, Tuple2<String, String>> {
|
||||
@Override
|
||||
public Tuple2<String, String> getKey(DosSketchLog dosSketchLog) throws Exception {
|
||||
return Tuple2.of(
|
||||
dosSketchLog.getAttack_type(),
|
||||
dosSketchLog.getDestination_ip());
|
||||
}
|
||||
}
|
||||
|
||||
private static class SecondReduceFunc implements ReduceFunction<DosSketchLog> {
|
||||
@Override
|
||||
public DosSketchLog reduce(DosSketchLog value1, DosSketchLog value2) throws Exception {
|
||||
value1.setSketch_sessions((value1.getSketch_sessions()+value2.getSketch_sessions())/2);
|
||||
value1.setSketch_bytes((value1.getSketch_bytes()+value2.getSketch_bytes())/2);
|
||||
value1.setSketch_packets((value1.getSketch_packets()+value2.getSketch_packets())/2);
|
||||
value1.setSource_ip(groupUniqSourceIp(value1.getSource_ip(),value2.getSource_ip()));
|
||||
return value1;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
package com.zdjizhi.sink;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.zdjizhi.common.DosMetricsLog;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import com.zdjizhi.utils.JsonMapper;
|
||||
import com.zdjizhi.utils.KafkaUtils;
|
||||
import org.apache.flink.streaming.api.datastream.DataStream;
|
||||
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
||||
|
||||
import static com.zdjizhi.sink.OutputStreamSink.outputTag;
|
||||
|
||||
class TrafficServerIpMetricsSink {
|
||||
|
||||
static void sideOutputMetricsSink(SingleOutputStreamOperator<DosSketchLog> outputStream){
|
||||
DataStream<DosMetricsLog> sideOutput = outputStream.getSideOutput(outputTag);
|
||||
sideOutput.map(JsonMapper::toJsonString).addSink(KafkaUtils.getKafkaSink(CommonConfig.KAFKA_OUTPUT_METRIC_TOPIC_NAME))
|
||||
.setParallelism(CommonConfig.KAFKA_OUTPUT_METRIC_PARALLELISM);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,128 +0,0 @@
|
||||
package com.zdjizhi.source;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import org.apache.flink.configuration.Configuration;
|
||||
import org.apache.flink.streaming.api.datastream.DataStream;
|
||||
import org.apache.flink.streaming.api.datastream.DataStreamSource;
|
||||
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
||||
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.*;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.io.ArrayWritable;
|
||||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author wlh
|
||||
*/
|
||||
public class BaselineSource extends RichSourceFunction<Map<String, Map<String,List<Integer>>>> {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(BaselineSource.class);
|
||||
private Connection conn = null;
|
||||
private Table table = null;
|
||||
private Scan scan = null;
|
||||
|
||||
@Override
|
||||
public void open(Configuration parameters) throws Exception {
|
||||
|
||||
org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
|
||||
|
||||
config.set("hbase.zookeeper.quorum", CommonConfig.HBASE_ZOOKEEPER_QUORUM);
|
||||
config.set("hbase.client.retries.number", "3");
|
||||
config.set("hbase.bulkload.retries.number", "3");
|
||||
config.set("zookeeper.recovery.retry", "3");
|
||||
config.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, CommonConfig.HBASE_CLIENT_OPERATION_TIMEOUT);
|
||||
config.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, CommonConfig.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
|
||||
|
||||
TableName tableName = TableName.valueOf(CommonConfig.HBASE_BASELINE_TABLE_NAME);
|
||||
conn = ConnectionFactory.createConnection(config);
|
||||
table = conn.getTable(tableName);
|
||||
scan = new Scan().setAllowPartialResults(true).setLimit(CommonConfig.HBASE_BASELINE_TOTAL_NUM);
|
||||
logger.info("连接hbase成功,正在读取baseline数据");
|
||||
|
||||
// .addFamily(Bytes.toBytes(CommonConfig.HBASE_BASELINE_FAMLIY_NAME));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Exception {
|
||||
super.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run(SourceContext<Map<String, Map<String,List<Integer>>>> sourceContext) throws Exception {
|
||||
logger.info("开始读取baseline数据");
|
||||
ResultScanner rs = table.getScanner(scan);
|
||||
// Map<String, List<Integer>[]> baselineMap = new HashMap<>();
|
||||
Map<String, Map<String,List<Integer>>> baselineMap = new HashMap<>();
|
||||
for (Result result : rs) {
|
||||
Map<String, List<Integer>> floodTypeMap = new HashMap<>();
|
||||
String rowkey = Bytes.toString(result.getRow());
|
||||
ArrayList<Integer> tcp = getArraylist(result,"TCP SYN Flood", "session_num");
|
||||
ArrayList<Integer> udp = getArraylist(result,"UDP Flood", "session_num");
|
||||
ArrayList<Integer> icmp = getArraylist(result,"ICMP Flood", "session_num");
|
||||
ArrayList<Integer> dns = getArraylist(result,"DNS Amplification", "session_num");
|
||||
floodTypeMap.put("TCP SYN Flood",tcp);
|
||||
floodTypeMap.put("UDP Flood",udp);
|
||||
floodTypeMap.put("ICMP Flood",icmp);
|
||||
floodTypeMap.put("DNS Amplification",dns);
|
||||
// List[] arr = new ArrayList[]{tcp,udp,icmp,dns};
|
||||
baselineMap.put(rowkey,floodTypeMap);
|
||||
}
|
||||
sourceContext.collect(baselineMap);
|
||||
logger.info("格式化baseline数据成功,读取IP共:{}",baselineMap.size());
|
||||
}
|
||||
|
||||
private static ArrayList<Integer> getArraylist(Result result,String family,String qualifier) throws IOException {
|
||||
if (!result.containsColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier))){
|
||||
return null;
|
||||
}
|
||||
ArrayWritable w = new ArrayWritable(IntWritable.class);
|
||||
w.readFields(new DataInputStream(new ByteArrayInputStream(result.getValue(Bytes.toBytes(family), Bytes.toBytes(qualifier)))));
|
||||
return fromWritable(w);
|
||||
}
|
||||
|
||||
private static ArrayList<Integer> fromWritable(ArrayWritable writable) {
|
||||
Writable[] writables = writable.get();
|
||||
ArrayList<Integer> list = new ArrayList<>(writables.length);
|
||||
for (Writable wrt : writables) {
|
||||
list.add(((IntWritable)wrt).get());
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cancel() {
|
||||
try {
|
||||
if (table != null) {
|
||||
table.close();
|
||||
}
|
||||
if (conn != null) {
|
||||
conn.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
|
||||
env.enableCheckpointing(5000);
|
||||
DataStreamSource<Map<String, Map<String,List<Integer>>>> mapDataStreamSource = env.addSource(new BaselineSource());
|
||||
DataStream<Map<String, Map<String,List<Integer>>>> broadcast = mapDataStreamSource.broadcast();
|
||||
mapDataStreamSource.print();
|
||||
env.execute();
|
||||
}
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
package com.zdjizhi.source;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.zdjizhi.utils.FlinkEnvironmentUtils;
|
||||
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
||||
import org.apache.flink.streaming.api.datastream.DataStreamSource;
|
||||
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
public class DosSketchSource {
|
||||
|
||||
private static StreamExecutionEnvironment streamExeEnv = FlinkEnvironmentUtils.streamExeEnv;
|
||||
|
||||
public static DataStreamSource<String> createDosSketchSource(){
|
||||
Properties properties = new Properties();
|
||||
properties.setProperty("bootstrap.servers", CommonConfig.KAFKA_INPUT_BOOTSTRAP_SERVERS);
|
||||
properties.setProperty("group.id", CommonConfig.KAFKA_GROUP_ID);
|
||||
|
||||
return streamExeEnv.addSource(new FlinkKafkaConsumer<String>(
|
||||
CommonConfig.KAFKA_INPUT_TOPIC_NAME,
|
||||
new SimpleStringSchema(), properties))
|
||||
.setParallelism(CommonConfig.KAFKA_INPUT_PARALLELISM);
|
||||
}
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
public final class CommonConfigurations {
|
||||
|
||||
private static Properties propService = new Properties();
|
||||
|
||||
|
||||
public static String getStringProperty(String key) {
|
||||
|
||||
return propService.getProperty(key);
|
||||
|
||||
|
||||
}
|
||||
|
||||
public static Integer getIntProperty(String key) {
|
||||
|
||||
return Integer.parseInt(propService.getProperty(key));
|
||||
|
||||
}
|
||||
|
||||
public static Double getDoubleProperty(String key) {
|
||||
|
||||
return Double.parseDouble(propService.getProperty(key));
|
||||
|
||||
}
|
||||
|
||||
public static Long getLongProperty(String key) {
|
||||
return Long.parseLong(propService.getProperty(key));
|
||||
|
||||
}
|
||||
|
||||
public static Boolean getBooleanProperty(Integer type, String key) {
|
||||
return "true".equals(propService.getProperty(key).toLowerCase().trim());
|
||||
}
|
||||
|
||||
static {
|
||||
try {
|
||||
propService.load(CommonConfigurations.class.getClassLoader().getResourceAsStream("common.properties"));
|
||||
} catch (Exception e) {
|
||||
propService = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,199 +0,0 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import org.apache.zookeeper.*;
|
||||
import org.apache.zookeeper.data.Stat;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.locks.Condition;
|
||||
import java.util.concurrent.locks.Lock;
|
||||
|
||||
|
||||
|
||||
public class DistributedLock implements Lock, Watcher {
|
||||
private static final Logger logger = LoggerFactory.getLogger(DistributedLock.class);
|
||||
|
||||
private ZooKeeper zk = null;
|
||||
/**
|
||||
* 根节点
|
||||
*/
|
||||
private final String ROOT_LOCK = "/locks";
|
||||
/**
|
||||
* 竞争的资源
|
||||
*/
|
||||
private String lockName;
|
||||
/**
|
||||
* 等待的前一个锁
|
||||
*/
|
||||
private String waitLock;
|
||||
/**
|
||||
* 当前锁
|
||||
*/
|
||||
private String currentLock;
|
||||
/**
|
||||
* 计数器
|
||||
*/
|
||||
private CountDownLatch countDownLatch;
|
||||
|
||||
private int sessionTimeout = 2000;
|
||||
|
||||
private List<Exception> exceptionList = new ArrayList<Exception>();
|
||||
|
||||
/**
|
||||
* 配置分布式锁
|
||||
*
|
||||
* @param config 连接的url
|
||||
* @param lockName 竞争资源
|
||||
*/
|
||||
public DistributedLock(String config, String lockName) {
|
||||
this.lockName = lockName;
|
||||
try {
|
||||
// 连接zookeeper
|
||||
zk = new ZooKeeper(config, sessionTimeout, this);
|
||||
Stat stat = zk.exists(ROOT_LOCK, false);
|
||||
if (stat == null) {
|
||||
// 如果根节点不存在,则创建根节点
|
||||
zk.create(ROOT_LOCK, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
|
||||
}
|
||||
} catch (IOException | InterruptedException | KeeperException e) {
|
||||
logger.error("Node already exists!");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 节点监视器
|
||||
*/
|
||||
@Override
|
||||
public void process(WatchedEvent event) {
|
||||
if (this.countDownLatch != null) {
|
||||
this.countDownLatch.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void lock() {
|
||||
if (exceptionList.size() > 0) {
|
||||
throw new LockException(exceptionList.get(0));
|
||||
}
|
||||
try {
|
||||
if (this.tryLock()) {
|
||||
logger.info(Thread.currentThread().getName() + " " + lockName + "获得了锁");
|
||||
} else {
|
||||
// 等待锁
|
||||
waitForLock(waitLock, sessionTimeout);
|
||||
}
|
||||
} catch (InterruptedException | KeeperException e) {
|
||||
logger.error("获取锁异常" + e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean tryLock() {
|
||||
try {
|
||||
String splitStr = "_lock_";
|
||||
if (lockName.contains(splitStr)) {
|
||||
throw new LockException("锁名有误");
|
||||
}
|
||||
// 创建临时有序节点
|
||||
currentLock = zk.create(ROOT_LOCK + "/" + lockName + splitStr, new byte[0],
|
||||
ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL);
|
||||
// 取所有子节点
|
||||
List<String> subNodes = zk.getChildren(ROOT_LOCK, false);
|
||||
// 取出所有lockName的锁
|
||||
List<String> lockObjects = new ArrayList<String>();
|
||||
for (String node : subNodes) {
|
||||
String tmpNode = node.split(splitStr)[0];
|
||||
if (tmpNode.equals(lockName)) {
|
||||
lockObjects.add(node);
|
||||
}
|
||||
}
|
||||
Collections.sort(lockObjects);
|
||||
// 若当前节点为最小节点,则获取锁成功
|
||||
if (currentLock.equals(ROOT_LOCK + "/" + lockObjects.get(0))) {
|
||||
return true;
|
||||
}
|
||||
// 若不是最小节点,则找到自己的前一个节点
|
||||
String prevNode = currentLock.substring(currentLock.lastIndexOf("/") + 1);
|
||||
waitLock = lockObjects.get(Collections.binarySearch(lockObjects, prevNode) - 1);
|
||||
} catch (InterruptedException | KeeperException e) {
|
||||
logger.error("获取锁过程异常" + e);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean tryLock(long timeout, TimeUnit unit) {
|
||||
try {
|
||||
if (this.tryLock()) {
|
||||
return true;
|
||||
}
|
||||
return waitForLock(waitLock, timeout);
|
||||
} catch (KeeperException | InterruptedException | RuntimeException e) {
|
||||
logger.error("判断是否锁定异常" + e);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* 等待锁
|
||||
*
|
||||
* @param prev 锁名称
|
||||
* @param waitTime 等待时间
|
||||
* @return
|
||||
* @throws KeeperException
|
||||
* @throws InterruptedException
|
||||
*/
|
||||
private boolean waitForLock(String prev, long waitTime) throws KeeperException, InterruptedException {
|
||||
Stat stat = zk.exists(ROOT_LOCK + "/" + prev, true);
|
||||
|
||||
if (stat != null) {
|
||||
this.countDownLatch = new CountDownLatch(1);
|
||||
// 计数等待,若等到前一个节点消失,则precess中进行countDown,停止等待,获取锁
|
||||
this.countDownLatch.await(waitTime, TimeUnit.MILLISECONDS);
|
||||
this.countDownLatch = null;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void unlock() {
|
||||
try {
|
||||
zk.delete(currentLock, -1);
|
||||
currentLock = null;
|
||||
zk.close();
|
||||
} catch (InterruptedException | KeeperException e) {
|
||||
logger.error("关闭锁异常" + e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Condition newCondition() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void lockInterruptibly() throws InterruptedException {
|
||||
this.lock();
|
||||
}
|
||||
|
||||
|
||||
public class LockException extends RuntimeException {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public LockException(String e) {
|
||||
super(e);
|
||||
}
|
||||
|
||||
public LockException(Exception e) {
|
||||
super(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
||||
import org.apache.flink.table.api.EnvironmentSettings;
|
||||
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
|
||||
|
||||
|
||||
/**
|
||||
* @author wlh
|
||||
*/
|
||||
public class FlinkEnvironmentUtils {
|
||||
public static StreamExecutionEnvironment streamExeEnv = StreamExecutionEnvironment.getExecutionEnvironment();
|
||||
|
||||
|
||||
public static StreamTableEnvironment getStreamTableEnv() {
|
||||
streamExeEnv.setParallelism(CommonConfig.STREAM_EXECUTION_ENVIRONMENT_PARALLELISM);
|
||||
|
||||
EnvironmentSettings settings = EnvironmentSettings.newInstance()
|
||||
.useBlinkPlanner()
|
||||
.inStreamingMode()
|
||||
.build();
|
||||
|
||||
return StreamTableEnvironment.create(streamExeEnv, settings);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
public class HbaseUtils {
|
||||
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
|
||||
public class IpUtils {
|
||||
|
||||
/**
|
||||
* IP定位库工具类
|
||||
*/
|
||||
public static IpLookup ipLookup = new IpLookup.Builder(false)
|
||||
// .loadDataFileV4(CommonConfig.IP_MMDB_PATH + "ip_v4.mmdb")
|
||||
// .loadDataFileV6(CommonConfig.IP_MMDB_PATH + "ip_v6.mmdb")
|
||||
.loadDataFilePrivateV4(CommonConfig.IP_MMDB_PATH + "ip_private_v4.mmdb")
|
||||
// .loadDataFilePrivateV6(CommonConfig.IP_MMDB_PATH + "ip_private_v6.mmdb")
|
||||
.build();
|
||||
|
||||
public static void main(String[] args) {
|
||||
System.out.println(ipLookup.countryLookup("61.128.159.186"));
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
public class KafkaUtils {
|
||||
|
||||
private static Properties getKafkaSinkProperty(){
|
||||
Properties propertiesproducer = new Properties();
|
||||
propertiesproducer.setProperty("bootstrap.servers", CommonConfig.KAFKA_OUTPUT_BOOTSTRAP_SERVERS);
|
||||
|
||||
return propertiesproducer;
|
||||
}
|
||||
|
||||
public static FlinkKafkaProducer<String> getKafkaSink(String topic){
|
||||
return new FlinkKafkaProducer<String>(
|
||||
topic,
|
||||
new SimpleStringSchema(),
|
||||
getKafkaSinkProperty()
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,11 +1,17 @@
|
||||
package com.zdjizhi.utils;
|
||||
package com.zdjizhi.utils.Snowflakeld;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import org.apache.flink.configuration.Configuration;
|
||||
|
||||
import static com.zdjizhi.conf.DosConfigs.DATA_CENTER_ID_NUM;
|
||||
import static com.zdjizhi.conf.DosConfigs.HBASE_ZOOKEEPER_QUORUM;
|
||||
|
||||
public class SnowflakeId {
|
||||
private static final Logger logger = LoggerFactory.getLogger(SnowflakeId.class);
|
||||
|
||||
private static final Log logger = LogFactory.get();
|
||||
|
||||
private Configuration configuration;
|
||||
|
||||
/**
|
||||
* 共64位 第一位为符号位 默认0
|
||||
@@ -86,46 +92,32 @@ public class SnowflakeId {
|
||||
*/
|
||||
private long lastTimestamp = -1L;
|
||||
|
||||
|
||||
/**
|
||||
* 设置允许时间回拨的最大限制10s
|
||||
*/
|
||||
private static final long ROLL_BACK_TIME = 10000L;
|
||||
private static final long rollBackTime = 10000L;
|
||||
|
||||
|
||||
private static SnowflakeId idWorker;
|
||||
|
||||
private static ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
|
||||
|
||||
static {
|
||||
idWorker = new SnowflakeId(CommonConfig.HBASE_ZOOKEEPER_QUORUM, CommonConfig.DATA_CENTER_ID_NUM);
|
||||
}
|
||||
|
||||
//==============================Constructors=====================================
|
||||
|
||||
/**
|
||||
* 构造函数
|
||||
* 初始化雪花ID
|
||||
*
|
||||
* @param dataCenterIdNum 数据中心编号
|
||||
* @param tmpWorkerId worker编号
|
||||
*/
|
||||
private SnowflakeId(String zookeeperIp, long dataCenterIdNum) {
|
||||
DistributedLock lock = new DistributedLock(CommonConfig.HBASE_ZOOKEEPER_QUORUM, "disLocks1");
|
||||
try {
|
||||
lock.lock();
|
||||
int tmpWorkerId = zookeeperUtils.modifyNode("/Snowflake/" + "worker" + dataCenterIdNum, zookeeperIp);
|
||||
if (tmpWorkerId > maxWorkerId || tmpWorkerId < 0) {
|
||||
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
|
||||
}
|
||||
if (dataCenterIdNum > maxDataCenterId || dataCenterIdNum < 0) {
|
||||
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than ", maxDataCenterId));
|
||||
}
|
||||
this.workerId = tmpWorkerId;
|
||||
this.dataCenterId = dataCenterIdNum;
|
||||
} catch (RuntimeException e) {
|
||||
logger.error("This is not usual error!!!===>>>" + e + "<<<===");
|
||||
}finally {
|
||||
lock.unlock();
|
||||
public SnowflakeId(long dataCenterIdNum, long tmpWorkerId) {
|
||||
if (tmpWorkerId > maxWorkerId || tmpWorkerId < 0) {
|
||||
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
|
||||
}
|
||||
}
|
||||
if (dataCenterIdNum > maxDataCenterId || dataCenterIdNum < 0) {
|
||||
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than ", maxDataCenterId));
|
||||
}
|
||||
this.workerId = tmpWorkerId;
|
||||
this.dataCenterId = dataCenterIdNum;
|
||||
|
||||
|
||||
}
|
||||
// ==============================Methods==========================================
|
||||
|
||||
/**
|
||||
@@ -133,10 +125,10 @@ public class SnowflakeId {
|
||||
*
|
||||
* @return SnowflakeId
|
||||
*/
|
||||
private synchronized long nextId() {
|
||||
public synchronized long nextId() {
|
||||
long timestamp = timeGen();
|
||||
//设置一个允许回拨限制时间,系统时间回拨范围在rollBackTime内可以等待校准
|
||||
if (lastTimestamp - timestamp > 0 && lastTimestamp - timestamp < ROLL_BACK_TIME) {
|
||||
if (lastTimestamp - timestamp > 0 && lastTimestamp - timestamp < rollBackTime) {
|
||||
timestamp = tilNextMillis(lastTimestamp);
|
||||
}
|
||||
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
|
||||
@@ -193,12 +185,4 @@ public class SnowflakeId {
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* 静态工具类
|
||||
*/
|
||||
public static Long generateId() {
|
||||
return idWorker.nextId();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -0,0 +1,90 @@
|
||||
package com.zdjizhi.utils.Threshold;
|
||||
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.geedgenetworks.utils.DateUtils;
|
||||
import com.zdjizhi.common.DosBaselineThreshold;
|
||||
import com.zdjizhi.utils.connections.hbase.HbaseUtils;
|
||||
import org.apache.flink.configuration.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.*;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
|
||||
import static com.zdjizhi.conf.DosConfigs.*;
|
||||
|
||||
public class ParseBaselineThreshold {
|
||||
private static final Log logger = LogFactory.get();
|
||||
private ArrayList<String> floodTypeList = new ArrayList<>();
|
||||
private Configuration configuration;
|
||||
private Table table = null;
|
||||
private Scan scan = null;
|
||||
|
||||
public ParseBaselineThreshold(Configuration configuration) {
|
||||
this.configuration = configuration;
|
||||
this.floodTypeList.add("TCP SYN Flood");
|
||||
this.floodTypeList.add("UDP Flood");
|
||||
this.floodTypeList.add("ICMP Flood");
|
||||
this.floodTypeList.add("DNS Flood");
|
||||
}
|
||||
|
||||
|
||||
private void prepareHbaseEnv() throws IOException {
|
||||
final org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
|
||||
config.set("hbase.zookeeper.quorum", configuration.get(HBASE_ZOOKEEPER_QUORUM));
|
||||
config.set("hbase.client.retries.number", "3");
|
||||
config.set("hbase.bulkload.retries.number", "3");
|
||||
config.set("zookeeper.recovery.retry", "3");
|
||||
config.set("hbase.defaults.for.version", "2.2.3");
|
||||
config.set("hbase.defaults.for.version.skip", "true");
|
||||
config.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, configuration.get(HBASE_CLIENT_OPERATION_TIMEOUT));
|
||||
config.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, configuration.get(HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD));
|
||||
|
||||
TableName tableName = TableName.valueOf(configuration.get(HBASE_BASELINE_TABLE_NAME));
|
||||
Connection conn = ConnectionFactory.createConnection(config);
|
||||
table = conn.getTable(tableName);
|
||||
long currentTimeMillis = System.currentTimeMillis();
|
||||
scan = new Scan()
|
||||
.setAllowPartialResults(true)
|
||||
.setTimeRange(DateUtils.getSomeDate(new Date(currentTimeMillis), Math.negateExact(configuration.get(HBASE_BASELINE_TTL))).getTime(), currentTimeMillis)
|
||||
.setLimit(configuration.get(HBASE_BASELINE_TOTAL_NUM));
|
||||
logger.info("连接hbase成功,正在读取baseline数据");
|
||||
}
|
||||
|
||||
|
||||
public Map<String, Map<String, DosBaselineThreshold>> readFromHbase() {
|
||||
Map<String, Map<String, DosBaselineThreshold>> baselineMap = new HashMap<>();
|
||||
try {
|
||||
prepareHbaseEnv();
|
||||
logger.info("开始读取baseline数据");
|
||||
ResultScanner rs = table.getScanner(scan);
|
||||
for (Result result : rs) {
|
||||
Map<String, DosBaselineThreshold> floodTypeMap = new HashMap<>();
|
||||
String rowkey = Bytes.toString(result.getRow());
|
||||
for (String type : floodTypeList) {
|
||||
DosBaselineThreshold baselineThreshold = new DosBaselineThreshold();
|
||||
ArrayList<Integer> sessionRate = HbaseUtils.getArraylist(result, type, "session_rate");
|
||||
if (sessionRate != null && !sessionRate.isEmpty()) {
|
||||
Integer defaultValue = HbaseUtils.getIntegerValue(result, type, "session_rate_default_value");
|
||||
Integer rateBaselineType = HbaseUtils.getIntegerValue(result, type, "session_rate_baseline_type");
|
||||
baselineThreshold.setSession_rate(sessionRate);
|
||||
baselineThreshold.setSession_rate_default_value(defaultValue);
|
||||
baselineThreshold.setSession_rate_baseline_type(rateBaselineType);
|
||||
floodTypeMap.put(type, baselineThreshold);
|
||||
}
|
||||
}
|
||||
baselineMap.put(rowkey, floodTypeMap);
|
||||
}
|
||||
logger.info("格式化baseline数据成功,读取IP共:{}", baselineMap.size());
|
||||
} catch (Exception e) {
|
||||
logger.error("读取hbase数据失败", e);
|
||||
}
|
||||
return baselineMap;
|
||||
}
|
||||
}
|
||||
@@ -1,135 +0,0 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import cn.hutool.core.util.StrUtil;
|
||||
import org.apache.zookeeper.*;
|
||||
import org.apache.zookeeper.data.ACL;
|
||||
import org.apache.zookeeper.data.Stat;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
|
||||
public class ZookeeperUtils implements Watcher {
|
||||
private static final Logger logger = LoggerFactory.getLogger(ZookeeperUtils.class);
|
||||
|
||||
private ZooKeeper zookeeper;
|
||||
|
||||
private static final int SESSION_TIME_OUT = 20000;
|
||||
|
||||
private CountDownLatch countDownLatch = new CountDownLatch(1);
|
||||
|
||||
@Override
|
||||
public void process(WatchedEvent event) {
|
||||
if (event.getState() == Event.KeeperState.SyncConnected) {
|
||||
countDownLatch.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* 修改节点信息
|
||||
*
|
||||
* @param path 节点路径
|
||||
*/
|
||||
int modifyNode(String path, String zookeeperIp) {
|
||||
createNode(path, "0".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, zookeeperIp);
|
||||
int workerId = 0;
|
||||
try {
|
||||
connectZookeeper(zookeeperIp);
|
||||
Stat stat = zookeeper.exists(path, true);
|
||||
workerId = Integer.parseInt(getNodeDate(path));
|
||||
if (workerId > 63) {
|
||||
workerId = 0;
|
||||
zookeeper.setData(path, "1".getBytes(), stat.getVersion());
|
||||
} else {
|
||||
String result = String.valueOf(workerId + 1);
|
||||
if (stat != null) {
|
||||
zookeeper.setData(path, result.getBytes(), stat.getVersion());
|
||||
} else {
|
||||
logger.error("Node does not exist!,Can't modify");
|
||||
}
|
||||
}
|
||||
} catch (KeeperException | InterruptedException e) {
|
||||
logger.error("modify error Can't modify," + e);
|
||||
} finally {
|
||||
closeConn();
|
||||
}
|
||||
logger.warn("workerID is:" + workerId);
|
||||
return workerId;
|
||||
}
|
||||
|
||||
/**
|
||||
* 连接zookeeper
|
||||
*
|
||||
* @param host 地址
|
||||
*/
|
||||
private void connectZookeeper(String host) {
|
||||
try {
|
||||
zookeeper = new ZooKeeper(host, SESSION_TIME_OUT, this);
|
||||
countDownLatch.await();
|
||||
} catch (IOException | InterruptedException e) {
|
||||
logger.error("Connection to the Zookeeper Exception! message:" + e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 关闭连接
|
||||
*/
|
||||
private void closeConn() {
|
||||
try {
|
||||
if (zookeeper != null) {
|
||||
zookeeper.close();
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
logger.error("Close the Zookeeper connection Exception! message:" + e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取节点内容
|
||||
*
|
||||
* @param path 节点路径
|
||||
* @return 内容/异常null
|
||||
*/
|
||||
private String getNodeDate(String path) {
|
||||
String result = null;
|
||||
Stat stat = new Stat();
|
||||
try {
|
||||
byte[] resByte = zookeeper.getData(path, true, stat);
|
||||
|
||||
result = StrUtil.str(resByte, "UTF-8");
|
||||
} catch (KeeperException | InterruptedException e) {
|
||||
logger.error("Get node information exception" + e);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param path 节点创建的路径
|
||||
* @param date 节点所存储的数据的byte[]
|
||||
* @param acls 控制权限策略
|
||||
*/
|
||||
private void createNode(String path, byte[] date, List<ACL> acls, String zookeeperIp) {
|
||||
try {
|
||||
connectZookeeper(zookeeperIp);
|
||||
Stat exists = zookeeper.exists(path, true);
|
||||
if (exists == null) {
|
||||
Stat existsSnowflakeld = zookeeper.exists("/Snowflake", true);
|
||||
if (existsSnowflakeld == null) {
|
||||
zookeeper.create("/Snowflake", null, acls, CreateMode.PERSISTENT);
|
||||
}
|
||||
zookeeper.create(path, date, acls, CreateMode.PERSISTENT);
|
||||
} else {
|
||||
logger.warn("Node already exists ! Don't need to create");
|
||||
}
|
||||
} catch (KeeperException | InterruptedException e) {
|
||||
logger.error(e.toString());
|
||||
} finally {
|
||||
closeConn();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
package com.zdjizhi.utils.connections.hbase;
|
||||
|
||||
import org.apache.hadoop.hbase.client.*;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.io.ArrayWritable;
|
||||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* @author wlh
|
||||
*/
|
||||
public class HbaseUtils {
|
||||
|
||||
public static Integer getIntegerValue(Result result, String family, String qualifier) {
|
||||
byte[] value = result.getValue(Bytes.toBytes(family), Bytes.toBytes(qualifier));
|
||||
if (value != null){
|
||||
return Bytes.toInt(value);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
public static ArrayList<Integer> getArraylist(Result result, String family, String qualifier) throws IOException {
|
||||
if (containsColumn(result, family, qualifier)) {
|
||||
ArrayWritable w = new ArrayWritable(IntWritable.class);
|
||||
w.readFields(new DataInputStream(new ByteArrayInputStream(result.getValue(Bytes.toBytes(family), Bytes.toBytes(qualifier)))));
|
||||
return fromWritable(w);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static ArrayList<Integer> fromWritable(ArrayWritable writable) {
|
||||
Writable[] writables = writable.get();
|
||||
ArrayList<Integer> list = new ArrayList<>(writables.length);
|
||||
for (Writable wrt : writables) {
|
||||
list.add(((IntWritable) wrt).get());
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
private static boolean containsColumn(Result result, String family, String qualifier) {
|
||||
return result.containsColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier));
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
package com.zdjizhi.utils.connections.kafka;
|
||||
|
||||
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
|
||||
public class KafkaConsumer {
|
||||
|
||||
public static FlinkKafkaConsumer<String> getKafkaConsumer(String topics, Properties Properties){
|
||||
|
||||
|
||||
List<String> topicList = new ArrayList<>();
|
||||
if (topics.contains(",")) {
|
||||
String[] words = topics.split(",");
|
||||
topicList.addAll(Arrays.asList(words));
|
||||
} else {
|
||||
topicList.add(topics);
|
||||
}
|
||||
return new FlinkKafkaConsumer<>(topicList, new SimpleStringSchema(), Properties);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
package com.zdjizhi.utils.connections.kafka;
|
||||
|
||||
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.Properties;
|
||||
|
||||
|
||||
public class KafkaProducer {
|
||||
|
||||
public static FlinkKafkaProducer<String> getKafkaProducer(String topic, Properties Properties){
|
||||
FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<>(
|
||||
topic,
|
||||
new SimpleStringSchema(),
|
||||
Properties,
|
||||
Optional.empty()
|
||||
);
|
||||
kafkaProducer.setLogFailuresOnly(true);
|
||||
|
||||
return kafkaProducer;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
package com.zdjizhi.utils.exception;
|
||||
|
||||
public class FlowWriteException extends RuntimeException {
|
||||
public FlowWriteException() {
|
||||
}
|
||||
|
||||
public FlowWriteException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
stream.execution.environment.parallelism=1
|
||||
stream.execution.job.name=dos-detection-job
|
||||
|
||||
kafka.input.parallelism=1
|
||||
kafka.input.topic.name=DOS-SKETCH-LOG
|
||||
kafka.input.bootstrap.servers=192.168.44.12:9092
|
||||
kafka.input.scan.startup.mode=latest-offset
|
||||
kafka.input.group.id=2108041426
|
||||
#kafka.input.group.id=test
|
||||
|
||||
kafka.output.metric.parallelism=1
|
||||
kafka.output.metric.topic.name=TRAFFIC-TOP-DESTINATION-IP-METRICS-LOG
|
||||
kafka.output.event.parallelism=1
|
||||
kafka.output.event.topic.name=DOS-EVENT-LOG
|
||||
kafka.output.bootstrap.servers=192.168.44.12:9092
|
||||
|
||||
hbase.input.parallelism=1
|
||||
hbase.zookeeper.quorum=192.168.44.12:2181
|
||||
hbase.client.operation.timeout=30000
|
||||
hbase.client.scanner.timeout.period=30000
|
||||
|
||||
hbase.baseline.table.name=ddos_traffic_baselines
|
||||
hbase.baseline.total.num=1000000
|
||||
|
||||
flink.first.agg.parallelism=1
|
||||
flink.second.agg.parallelism=1
|
||||
flink.watermark.max.orderness=1
|
||||
flink.window.max.time=600
|
||||
|
||||
source.ip.list.limit=10000
|
||||
|
||||
data.center.id.num=15
|
||||
|
||||
ip.mmdb.path=D:\\data\\dat_test\\
|
||||
#ip.mmdb.path=/home/bigdata/wlh/topology/dos-detection/dat/
|
||||
|
||||
baseline.sessions.minor.threshold=0.1
|
||||
baseline.sessions.warning.threshold=0.5
|
||||
baseline.sessions.major.threshold=1
|
||||
baseline.sessions.severe.threshold=3
|
||||
baseline.sessions.critical.threshold=8
|
||||
45
src/main/resources/detection_dos_attack.properties
Normal file
45
src/main/resources/detection_dos_attack.properties
Normal file
@@ -0,0 +1,45 @@
|
||||
source.kafka.topic=DOS-SKETCH-METRIC,DOS-PROTECTION-RULE-METRIC
|
||||
source.kafka.props.bootstrap.servers=192.168.44.11:9094
|
||||
source.kafka.props.group.id=dos-detection-job-20240402-t1
|
||||
source.kafka.props.session.timeout.ms=60000
|
||||
source.kafka.props.max.poll.records=5000
|
||||
source.kafka.props.max.partition.fetch.bytes=31457280
|
||||
source.kafka.props.security.protocol=SASL_PLAINTEXT
|
||||
source.kafka.props.sasl.mechanism=PLAIN
|
||||
source.kafka.props.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="admin"password="galaxy2019";
|
||||
|
||||
#kafka sink
|
||||
kafka.sink.event.topic.name=DOS-EVENT
|
||||
kafka.sink.metric.topic=DOS-SKETCH-TOP-SERVER-IP
|
||||
sink.kafka.props.bootstrap.servers=192.168.44.12:9094
|
||||
sink.kafka.props.security.protocol=SASL_PLAINTEXT
|
||||
sink.kafka.props.sasl.mechanism=PLAIN
|
||||
sink.kafka.props.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="galaxy2019";
|
||||
sink.kafka.props.acks=1
|
||||
sink.kafka.props.retries=0
|
||||
sink.kafka.props.linger.ms=10
|
||||
sink.kafka.props.request.timeout.ms=30000
|
||||
sink.kafka.props.batch.size=262144
|
||||
sink.kafka.props.buffer.memory=134217728
|
||||
sink.kafka.props.max.request.size=10485760
|
||||
sink.kafka.props.compression.type=snappy
|
||||
|
||||
ip.user.defined.kd.id=dasdasdsad
|
||||
|
||||
#zookeeper地址
|
||||
hbase.zookeeper.quorum=192.168.44.12:2181
|
||||
|
||||
flink.watermark.max.orderness=30
|
||||
|
||||
#计算窗口大小,默认600s
|
||||
flink.window.max.time=600
|
||||
|
||||
static.sensitivity.threshold=1
|
||||
#基线敏感阈值
|
||||
baseline.sensitivity.threshold=0.2
|
||||
#基于baseline判定dos攻击的上下限
|
||||
baseline.sessions.minor.threshold=0.2
|
||||
baseline.sessions.warning.threshold=1
|
||||
baseline.sessions.major.threshold=2.5
|
||||
baseline.sessions.severe.threshold=5
|
||||
baseline.sessions.critical.threshold=8
|
||||
23
src/main/resources/log4j.properties
Normal file
23
src/main/resources/log4j.properties
Normal file
@@ -0,0 +1,23 @@
|
||||
#Log4j
|
||||
log4j.rootLogger=info,console,file
|
||||
# 控制台日志设置
|
||||
log4j.appender.console=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.console.Threshold=info
|
||||
log4j.appender.console.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
|
||||
|
||||
# 文件日志设置
|
||||
log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
|
||||
log4j.appender.file.Threshold=info
|
||||
log4j.appender.file.encoding=UTF-8
|
||||
log4j.appender.file.Append=true
|
||||
#路径请用相对路径,做好相关测试输出到应用目下
|
||||
log4j.appender.file.file=${nis.root}/log/flink-dos-detection.log
|
||||
log4j.appender.file.DatePattern='.'yyyy-MM-dd
|
||||
log4j.appender.file.layout=org.apache.log4j.PatternLayout
|
||||
#log4j.appender.file.layout.ConversionPattern=%d{HH:mm:ss} %X{ip} [%t] %5p %c{1} %m%n
|
||||
log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] %X{ip} [Thread\:%t] %l %x - %m%n
|
||||
#MyBatis 配置,com.nis.web.dao是mybatis接口所在包
|
||||
log4j.logger.com.nis.web.dao=debug
|
||||
#bonecp数据源配置
|
||||
log4j.category.com.jolbox=debug,console
|
||||
8
src/test/java/com/zdjizhi/ThresholdTestData/DNS-Flood
Normal file
8
src/test/java/com/zdjizhi/ThresholdTestData/DNS-Flood
Normal file
@@ -0,0 +1,8 @@
|
||||
--DosSketchLog
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277141, sketch_duration=59, attack_type='DNS Flood', source_ip='23.91.128.115', destination_ip='102.219.30.33', sketch_sessions=945, sketch_packets=945, sketch_bytes=446370, vsys_id=23}
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277205, sketch_duration=86, attack_type='DNS Flood', source_ip='172.217.160.68', destination_ip='10.113.83.88', sketch_sessions=730, sketch_packets=730, sketch_bytes=344575, vsys_id=1}
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277244, sketch_duration=47, attack_type='DNS Flood', source_ip='45.135.144.112', destination_ip='42.62.192.132', sketch_sessions=0, sketch_packets=0, sketch_bytes=47, vsys_id=1}
|
||||
|
||||
--DosDetectionThreshold
|
||||
{profileId='6091', attackType='DNS Flood', serverIpList=[113.113.83.213, 42.62.192.132/28, 10.113.83.1/25, 102.219.30.33/29], serverIpAddr='null', packetsPerSec=1, bitsPerSec=1, sessionsPerSec=1, isValid=1, vsysId=1, superiorIds=[4, 12, 5, 27]}
|
||||
{profileId='5679', attackType='DNS Flood', serverIpList=[102.219.30.33], serverIpAddr='null', packetsPerSec=500, bitsPerSec=1000000, sessionsPerSec=100000, isValid=1, vsysId=23, superiorIds=[4, 5]}
|
||||
6
src/test/java/com/zdjizhi/ThresholdTestData/ICMP-Flood
Normal file
6
src/test/java/com/zdjizhi/ThresholdTestData/ICMP-Flood
Normal file
@@ -0,0 +1,6 @@
|
||||
--DosSketchLog
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277232, sketch_duration=59, attack_type='ICMP Flood', source_ip='45.170.244.25', destination_ip='24.152.57.56', sketch_sessions=499, sketch_packets=499, sketch_bytes=111970, vsys_id=1}
|
||||
|
||||
|
||||
--DosDetectionThreshold
|
||||
{profileId='6093', attackType='ICMP Flood', serverIpList=[31.131.80.88/29, 24.152.57.56/29, 47.93.59.1/25], serverIpAddr='null', packetsPerSec=210, bitsPerSec=0, sessionsPerSec=0, isValid=1, vsysId=1, superiorIds=[4, 12, 5, 27]}
|
||||
@@ -0,0 +1,7 @@
|
||||
--DosSketchLog
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1685003938, sketch_duration=63714, attack_type='TCP SYN Flood', source_ip='5.32.144.55', destination_ip='45.188.134.11', sketch_sessions=0, sketch_packets=0, sketch_bytes=4195, vsys_id=1}
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277234, sketch_duration=57, attack_type='TCP SYN Flood', source_ip='18.65.148.128', destination_ip='23.200.74.224', sketch_sessions=54, sketch_packets=54, sketch_bytes=73427, vsys_id=1}
|
||||
|
||||
|
||||
--DosDetectionThreshold
|
||||
{profileId='6095', attackType='TCP SYN Flood', serverIpList=[23.200.74.224, 45.188.134.11/29, 41.183.0.15/29, 41.183.0.16/30], serverIpAddr='null', packetsPerSec=1, bitsPerSec=1, sessionsPerSec=1, isValid=1, vsysId=1, superiorIds=[5, 4, 12, 27]}
|
||||
8
src/test/java/com/zdjizhi/ThresholdTestData/UDP-Flood
Normal file
8
src/test/java/com/zdjizhi/ThresholdTestData/UDP-Flood
Normal file
@@ -0,0 +1,8 @@
|
||||
--DosSketchLog
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277291, sketch_duration=0, attack_type='UDP Flood', source_ip='121.14.89.209', destination_ip='192.168.50.11', sketch_sessions=0, sketch_packets=0, sketch_bytes=0, vsys_id=1}
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277233, sketch_duration=58, attack_type='UDP Flood', source_ip='192.168.50.56,192.168.50.34,192.168.50.11,192.168.50.33,192.168.50.55,192.168.50.58,192.168.50.36,192.168.50.14,192.168.50.35,192.168.50.13,192.168.50.57,192.168.50.30,192.168.50.51,192.168.50.54,192.168.50.10,192.168.50.32,192.168.50.53,192.168.50.31,192.168.50.16,192.168.50.38,192.168.50.15,192.168.50.37,192.168.50.18,192.168.50.17,192.168.50.50,192.168.50.45,192.168.50.23,192.168.50.22,192.168.50.44,192.168.50.25,192.168.50.47,192.168.50.46,192.168.50.24,192.168.50.63,192.168.50.41,192.168.50.40,192.168.50.62,192.168.50.43,192.168.50.21,192.168.50.20,192.168.50.42,192.168.50.27,192.168.50.26,192.168.50.48,192.168.50.28,192.168.50.61,192.168.50.60', destination_ip='121.14.89.209', sketch_sessions=297, sketch_packets=297, sketch_bytes=371404, vsys_id=1}
|
||||
|
||||
|
||||
--DosDetectionThreshold
|
||||
{profileId='5333', attackType='UDP Flood', serverIpList=[192.168.50.11, 192.168.50.12], serverIpAddr='null', packetsPerSec=50, bitsPerSec=50, sessionsPerSec=50, isValid=1, vsysId=1, superiorIds=[4, 12, 5, 27]}
|
||||
|
||||
@@ -19,12 +19,12 @@ public class HbaseTest {
|
||||
public static void main(String[] args) throws IOException {
|
||||
org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
|
||||
|
||||
config.set("hbase.zookeeper.quorum", CommonConfig.HBASE_ZOOKEEPER_QUORUM);
|
||||
config.set("hbase.client.retries.number", "3");
|
||||
config.set("hbase.bulkload.retries.number", "3");
|
||||
config.set("zookeeper.recovery.retry", "3");
|
||||
config.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, CommonConfig.HBASE_CLIENT_OPERATION_TIMEOUT);
|
||||
config.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, CommonConfig.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
|
||||
// config.set("hbase.zookeeper.quorum", FlowWriteConfig.HBASE_ZOOKEEPER_QUORUM);
|
||||
// config.set("hbase.client.retries.number", "3");
|
||||
// config.set("hbase.bulkload.retries.number", "3");
|
||||
// config.set("zookeeper.recovery.retry", "3");
|
||||
// config.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, FlowWriteConfig.HBASE_CLIENT_OPERATION_TIMEOUT);
|
||||
// config.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, FlowWriteConfig.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
|
||||
|
||||
TableName tableName = TableName.valueOf("dos_test");
|
||||
Connection conn = ConnectionFactory.createConnection(config);
|
||||
|
||||
7
src/test/java/com/zdjizhi/common/HttpTest.java
Normal file
7
src/test/java/com/zdjizhi/common/HttpTest.java
Normal file
@@ -0,0 +1,7 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
public class HttpTest {
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
}
|
||||
}
|
||||
106
src/test/java/com/zdjizhi/common/IpTest.java
Normal file
106
src/test/java/com/zdjizhi/common/IpTest.java
Normal file
@@ -0,0 +1,106 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
import inet.ipaddr.Address;
|
||||
import inet.ipaddr.AddressStringException;
|
||||
import inet.ipaddr.IPAddress;
|
||||
import inet.ipaddr.IPAddressString;
|
||||
import inet.ipaddr.format.util.AddressTrieMap;
|
||||
import inet.ipaddr.format.util.AssociativeAddressTrie;
|
||||
import inet.ipaddr.ipv4.IPv4Address;
|
||||
import inet.ipaddr.ipv4.IPv4AddressAssociativeTrie;
|
||||
import org.apache.flink.shaded.guava18.com.google.common.collect.Range;
|
||||
import org.apache.flink.shaded.guava18.com.google.common.collect.TreeRangeMap;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class IpTest {
|
||||
public static void main(String[] args) throws Exception {
|
||||
IPv4AddressAssociativeTrie<Integer> trie = new IPv4AddressAssociativeTrie<>();
|
||||
|
||||
IPAddress str1 = new IPAddressString("1.2.3.4").getAddress();
|
||||
IPAddress str2 = new IPAddressString("10.0.0.0/15").getAddress();
|
||||
IPAddress str3 = new IPAddressString("25.4.2.0/23").getAddress();
|
||||
IPAddress str4 = new IPAddressString("192.168.8.0/21").getAddress();
|
||||
IPAddress str5 = new IPAddressString("240.0.0.0/4").getAddress();
|
||||
IPAddress str6 = new IPAddressString("fc00::0/64").getAddress();
|
||||
IPAddress str7 = new IPAddressString("fc00::10:1").getAddress();
|
||||
|
||||
TreeRangeMap<IPAddress, Object> rangeMap = TreeRangeMap.create();
|
||||
rangeMap.put(Range.closed(str1.getLower(),str1.getUpper()),1);
|
||||
rangeMap.put(Range.closed(str2.getLower(),str2.getUpper()),2);
|
||||
rangeMap.put(Range.closed(str3.getLower(),str3.getUpper()),3);
|
||||
rangeMap.put(Range.closed(str4.getLower(),str4.getUpper()),4);
|
||||
rangeMap.put(Range.closed(str5.getLower(),str5.getUpper()),5);
|
||||
rangeMap.put(Range.closed(str6.getLower(),str6.getUpper()),6);
|
||||
rangeMap.put(Range.closed(str7.getLower(),str7.getUpper()),7);
|
||||
|
||||
IPAddress pv4 = new IPAddressString("255.255.14.255").getAddress();
|
||||
IPAddress pv42 = new IPAddressString("1.2.3.4").getAddress();
|
||||
IPAddress pv43 = new IPAddressString("fc00::").getAddress();
|
||||
IPAddress pv44 = new IPAddressString("fc00::10:1").getAddress();
|
||||
|
||||
IPAddress pv45 = new IPAddressString("192.168.42.1").getAddress();
|
||||
IPAddress pv46 = new IPAddressString("192.168.42.1/32").getAddress();
|
||||
IPAddress pv47 = new IPAddressString("12.56.4.0").getAddress();
|
||||
|
||||
IPAddress mask = pv45.getNetwork().getNetworkMask(24, false);
|
||||
|
||||
System.out.println(pv45.isMultiple());
|
||||
System.out.println(pv46.isMultiple());
|
||||
System.out.println(pv46.isPrefixed());
|
||||
System.out.println(pv47.isPrefixed());
|
||||
System.out.println(pv45+"---"+pv45.toMaxHost().withoutPrefixLength()+"---"+pv45.adjustPrefixLength(pv45.getBitCount()));
|
||||
System.out.println(pv45+"---mask:"+pv45.mask(mask).toString());
|
||||
|
||||
System.out.println(pv45.adjustPrefixLength(pv45.getBitCount())+"---"+pv45.toMaxHost().withoutPrefixLength());
|
||||
|
||||
/*
|
||||
System.out.println(str5.getUpper()+"---"+str5.getLower());
|
||||
|
||||
System.out.println(rangeMap.span().contains(pv4));
|
||||
|
||||
System.out.println(rangeMap.get(pv4));
|
||||
System.out.println(rangeMap.get(pv42));
|
||||
System.out.println(rangeMap.get(pv43));
|
||||
System.out.println(rangeMap.get(pv44));
|
||||
*/
|
||||
|
||||
/*
|
||||
System.out.println(str5.toSequentialRange());
|
||||
// System.out.println(str2.contains(new IPAddressString("10.0.0.2")));
|
||||
// System.out.println(str5.toAddress().toIPv4().toSequentialRange());
|
||||
|
||||
|
||||
trie.put(str1,1);
|
||||
trie.put(str2,2);
|
||||
trie.put(str3,3);
|
||||
trie.put(str4,4);
|
||||
trie.put(str5,5);
|
||||
|
||||
AddressTrieMap<IPv4Address, Integer> trieMap = new AddressTrieMap<>(trie);
|
||||
|
||||
|
||||
|
||||
trieMap.forEach((k,v) -> {
|
||||
System.out.println(k.toString() + "--" + v);
|
||||
});
|
||||
|
||||
System.out.println("-----------------");
|
||||
|
||||
trie.forEach((k) -> System.out.println(k.toString()));
|
||||
|
||||
System.out.println(str5.contains(pv4));
|
||||
System.out.println(trie.contains(pv4));
|
||||
System.out.println(trieMap.get(pv4));
|
||||
System.out.println(trieMap.containsKey(pv4));
|
||||
// System.out.println(trieMap.getRange());
|
||||
// IPAddress str3 = new IPAddressString("fc00::10:1").getAddress();
|
||||
// IPAddress str4 = new IPAddressString("fc00::10:2/64").getAddress();
|
||||
|
||||
// System.out.println(Arrays.toString(str1.mergeToPrefixBlocks(str2,str3,str4)));
|
||||
|
||||
*/
|
||||
}
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
import org.apache.flink.table.functions.TableFunction;
|
||||
import org.apache.flink.types.Row;
|
||||
|
||||
public class UdtfTest extends TableFunction<Row> {
|
||||
|
||||
public void eval(Row[] rows) {
|
||||
for (Row row : rows) {
|
||||
collect(row);
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
}
|
||||
}
|
||||
235
src/test/java/com/zdjizhi/etl/DosDetectionTest.java
Normal file
235
src/test/java/com/zdjizhi/etl/DosDetectionTest.java
Normal file
@@ -0,0 +1,235 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import com.geedgenetworks.utils.StringUtil;
|
||||
import com.zdjizhi.common.DosDetectionThreshold;
|
||||
import com.zdjizhi.common.DosEventLog;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.text.NumberFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
|
||||
public class DosDetectionTest {
|
||||
|
||||
private final static NumberFormat PERCENT_INSTANCE = NumberFormat.getPercentInstance();
|
||||
private final static int BASELINE_SIZE = 144;
|
||||
private final static int STATIC_CONDITION_TYPE = 1;
|
||||
private final static int BASELINE_CONDITION_TYPE = 2;
|
||||
private final static int SENSITIVITY_CONDITION_TYPE = 3;
|
||||
|
||||
private final static String SESSIONS_TAG = "sessions";
|
||||
private final static String PACKETS_TAG = "packets";
|
||||
private final static String BITS_TAG = "bits";
|
||||
@Test
|
||||
public void dosDetectionTest(){
|
||||
DosDetectionThreshold dosDetectionThreshold = new DosDetectionThreshold();
|
||||
ArrayList<String> serverIpList = new ArrayList<>();
|
||||
serverIpList.add("192.168.50.11");
|
||||
serverIpList.add("192.168.50.1/24");
|
||||
serverIpList.add("FC::12:0:0/54");
|
||||
serverIpList.add("FC::12:0:0");
|
||||
dosDetectionThreshold.setId(4437);
|
||||
dosDetectionThreshold.setAttack_type("DNS Flood");
|
||||
dosDetectionThreshold.setServer_ip_list(serverIpList);
|
||||
dosDetectionThreshold.setSessions_per_sec(1);
|
||||
dosDetectionThreshold.setPackets_per_sec(1);
|
||||
dosDetectionThreshold.setBits_per_sec(100000);
|
||||
dosDetectionThreshold.setIs_enabled(1);
|
||||
dosDetectionThreshold.setSuperior_ids(new Integer[]{5,4,12,27});
|
||||
|
||||
|
||||
DosSketchLog dosSketchLog = new DosSketchLog ();
|
||||
|
||||
dosSketchLog.setSessions(68);
|
||||
dosSketchLog.setPkts(68);
|
||||
dosSketchLog.setBytes(285820);//185.82
|
||||
dosSketchLog.setVsys_id(1);
|
||||
dosSketchLog.setAttack_type("ICMP Flood");
|
||||
dosSketchLog.setServer_ip("45.170.244.25");
|
||||
dosSketchLog.setClient_ip("24.152.57.56");
|
||||
//静态阈值获取
|
||||
long sessionBase = dosDetectionThreshold.getSessions_per_sec();
|
||||
long pktBase=dosDetectionThreshold.getPackets_per_sec();
|
||||
long bitBase=dosDetectionThreshold.getBits_per_sec();
|
||||
//基于速率进行计算
|
||||
long diffSession = dosSketchLog.getSession_rate() - sessionBase;
|
||||
long diffPkt = dosSketchLog.getPkts() - pktBase;
|
||||
long diffByte = dosSketchLog.getBytes() - bitBase;
|
||||
|
||||
|
||||
Double diffSessionPercent = getDiffPercent(diffSession, sessionBase)*100;
|
||||
Double diffPktPercent = getDiffPercent(diffPkt, pktBase)*100;
|
||||
Double diffBitPercent = getDiffPercent(diffByte, bitBase)*100;
|
||||
long profileId = 0;
|
||||
DosEventLog result =null;
|
||||
if (diffSessionPercent >= diffPktPercent && diffSessionPercent >= diffBitPercent){
|
||||
profileId = dosDetectionThreshold.getId();
|
||||
result= getDosEventLog(dosSketchLog, sessionBase, diffSession, profileId, STATIC_CONDITION_TYPE, SESSIONS_TAG);
|
||||
System.out.println(result);
|
||||
}else if (diffPktPercent >= diffSessionPercent && diffPktPercent >= diffBitPercent){
|
||||
profileId = dosDetectionThreshold.getId();
|
||||
result = getDosEventLog(dosSketchLog, pktBase, diffPkt,profileId, STATIC_CONDITION_TYPE, PACKETS_TAG);
|
||||
System.out.println(result);
|
||||
}else if (diffBitPercent >= diffPktPercent && diffBitPercent >= diffSessionPercent){
|
||||
profileId = dosDetectionThreshold.getId();
|
||||
result = getDosEventLog(dosSketchLog, bitBase, diffByte, profileId, STATIC_CONDITION_TYPE, BITS_TAG);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
private DosEventLog getDosEventLog(DosSketchLog value, long base, long diff, long profileId, int type, String tag) {
|
||||
DosEventLog result = null;
|
||||
String destinationIp = value.getServer_ip();
|
||||
String attackType = value.getAttack_type();
|
||||
if (diff > 0 && base != 0) {
|
||||
double percent = getDiffPercent(diff, base);
|
||||
Severity severity = judgeSeverity(percent);
|
||||
Integer staticSensitivityThreshold = 100;
|
||||
if (severity != Severity.NORMAL) {
|
||||
if (type == BASELINE_CONDITION_TYPE && percent < 0.2) {
|
||||
// logger.debug("当前server IP:{},类型:{},基线值{}百分比{}未超过基线敏感阈值,日志详情\n{}", destinationIp, attackType, base, percent, value);
|
||||
}else if ((type == BASELINE_CONDITION_TYPE || type == SENSITIVITY_CONDITION_TYPE) && value.getSession_rate() < staticSensitivityThreshold){
|
||||
// logger.debug("当前server IP:{},类型:{},基线值{}百分比{}未超过静态敏感阈值,日志详情\n{}",destinationIp, attackType, base, percent, value);
|
||||
}else {
|
||||
result = getResult(value, base, profileId, severity, percent+1, type, tag);
|
||||
if (type == SENSITIVITY_CONDITION_TYPE){
|
||||
result.setSeverity(Severity.MAJOR.severity);
|
||||
}
|
||||
// logger.info("检测到当前server IP {} 存在 {} 异常,超出基线{} {}倍,基于{}:{}检测,日志详情\n {}", destinationIp,attackType,base,percent,type,tag,result);
|
||||
}
|
||||
}
|
||||
// else {
|
||||
// logger.debug("当前server IP:{} 未出现 {} 异常,日志详情 {}", destinationIp, attackType, value);
|
||||
// }
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private DosEventLog getResult(DosSketchLog value, long base, long profileId, Severity severity, double percent, int type, String tag) {
|
||||
DosEventLog dosEventLog = new DosEventLog();
|
||||
// dosEventLog.setLog_id(SnowflakeId.generateId());
|
||||
dosEventLog.setVsys_id(value.getVsys_id());
|
||||
dosEventLog.setStart_time(value.getStart_timestamp_ms());
|
||||
dosEventLog.setEnd_time(value.getStart_timestamp_ms() + value.getDuration());
|
||||
dosEventLog.setProfile_id(profileId);
|
||||
dosEventLog.setAttack_type(value.getAttack_type());
|
||||
dosEventLog.setSeverity(severity.severity);
|
||||
// dosEventLog.setConditions(getConditions(PERCENT_INSTANCE.format(percent), base, value.getSketch_sessions(), type, tag));
|
||||
dosEventLog.setConditions(getConditions(percent, base, value.getSession_rate(), type, tag,dosEventLog));
|
||||
dosEventLog.setDestination_ip(value.getServer_ip());
|
||||
// dosEventLog.setDestination_country(IpUtils.ipLookup.countryLookup(value.getDestination_ip()));
|
||||
String ipList = value.getClient_ip();
|
||||
dosEventLog.setSource_ip_list(ipList);
|
||||
dosEventLog.setSource_country_list(getSourceCountryList(ipList));
|
||||
dosEventLog.setSession_rate(value.getSession_rate());
|
||||
dosEventLog.setPacket_rate(value.getPacket_rate());
|
||||
dosEventLog.setBit_rate(value.getBit_rate());
|
||||
return dosEventLog;
|
||||
}
|
||||
|
||||
public String getConditions(double percent, long base, long sessions, int type, String tag,DosEventLog dosEventLog) {
|
||||
int condition =0;
|
||||
if ("Minor".equals(dosEventLog.getSeverity())){
|
||||
condition=50;
|
||||
}else if ("Warning".equals(dosEventLog.getSeverity())){
|
||||
condition=100;
|
||||
}else if ("Major".equals(dosEventLog.getSeverity())){
|
||||
condition=250;
|
||||
}else if ("Severe".equals(dosEventLog.getSeverity())){
|
||||
condition=500;
|
||||
}else if ("Critical".equals(dosEventLog.getSeverity())){
|
||||
condition =800;
|
||||
}
|
||||
switch (type) {
|
||||
case STATIC_CONDITION_TYPE:
|
||||
return "Rate > " +
|
||||
base + " " +
|
||||
tag + "/s" + "(>"+condition+"%)";
|
||||
case BASELINE_CONDITION_TYPE:
|
||||
return tag + " > " +
|
||||
PERCENT_INSTANCE.format(percent) + " of baseline";
|
||||
case SENSITIVITY_CONDITION_TYPE:
|
||||
return String.valueOf(sessions) + " " +
|
||||
tag + "/s Unusually high " +
|
||||
StringUtils.capitalize(tag);
|
||||
default:
|
||||
throw new IllegalArgumentException("Illegal Argument type:" + type + ", known types = [1,2,3]");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private String getSourceCountryList(String sourceIpList) {
|
||||
if (StringUtil.isNotBlank(sourceIpList)) {
|
||||
String countryList;
|
||||
try {
|
||||
String[] ipArr = sourceIpList.split(",");
|
||||
HashSet<String> countrySet = new HashSet<>();
|
||||
for (String ip : ipArr) {
|
||||
// String country = IpLookupUtils.getCountryLookup(ip);
|
||||
// if (StringUtil.isNotBlank(country)){
|
||||
// countrySet.add(country);
|
||||
// }
|
||||
}
|
||||
countryList = StringUtils.join(countrySet, ", ");
|
||||
return countryList;
|
||||
} catch (Exception e) {
|
||||
// logger.error("{} source IP lists 获取国家失败", sourceIpList, e);
|
||||
return StringUtil.EMPTY;
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Illegal Argument sourceIpList = null");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private Double getDiffPercent(long diff, long base) {
|
||||
return BigDecimal.valueOf((float) diff / base).setScale(4, BigDecimal.ROUND_HALF_UP).doubleValue();
|
||||
}
|
||||
|
||||
private Severity judgeSeverity(double diffPercent) {
|
||||
if (diffPercent >= 0.5 && diffPercent < 1) {
|
||||
return Severity.MINOR;
|
||||
} else if (diffPercent >= 1 && diffPercent < 2.5) {
|
||||
return Severity.WARNING;
|
||||
} else if (diffPercent >= 2.5 && diffPercent < 5) {
|
||||
return Severity.MAJOR;
|
||||
} else if (diffPercent >= 5 && diffPercent < 8) {
|
||||
return Severity.SEVERE;
|
||||
} else if (diffPercent >= 8) {
|
||||
return Severity.CRITICAL;
|
||||
} else {
|
||||
return Severity.NORMAL;
|
||||
}
|
||||
}
|
||||
|
||||
private enum Severity {
|
||||
/**
|
||||
* 判断严重程度枚举类型
|
||||
*/
|
||||
CRITICAL("Critical"),
|
||||
SEVERE("Severe"),
|
||||
MAJOR("Major"),
|
||||
WARNING("Warning"),
|
||||
MINOR("Minor"),
|
||||
NORMAL("Normal");
|
||||
|
||||
private final String severity;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.severity;
|
||||
}
|
||||
|
||||
Severity(String severity) {
|
||||
this.severity = severity;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
97
src/test/java/com/zdjizhi/etl/EtlProcessFunctionTest.java
Normal file
97
src/test/java/com/zdjizhi/etl/EtlProcessFunctionTest.java
Normal file
@@ -0,0 +1,97 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import com.zdjizhi.function.FlatSketchFunction;
|
||||
import org.apache.flink.api.java.utils.ParameterTool;
|
||||
import org.apache.flink.configuration.Configuration;
|
||||
import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration;
|
||||
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
||||
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
|
||||
import org.apache.flink.test.util.MiniClusterWithClientResource;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class EtlProcessFunctionTest {
|
||||
@ClassRule
|
||||
public static MiniClusterWithClientResource flinkCluster =
|
||||
new MiniClusterWithClientResource(
|
||||
new MiniClusterResourceConfiguration.Builder()
|
||||
.setNumberSlotsPerTaskManager(1)
|
||||
.setNumberTaskManagers(1)
|
||||
.build());
|
||||
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testIncrementPipeline() throws Exception {
|
||||
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
|
||||
|
||||
|
||||
String jsonString1 = "{\"rule_id\":9055,\"server_ip\":\"192.64.30.9\",\"client_country\":\"US\",\"server_country\":\"CA\",\"name\":\"top_client_and_server_ip\",\"vsys_id\":1,\"device_id\":\"9800165603247024\",\"device_group\":\"group-xxg-tsgx\",\"data_center\":\"center-xxg-tsgx\",\"sessions\":89,\"pkts\":89,\"bytes\":5874,\"timestamp_ms\":1723014831175}";
|
||||
String jsonString2 = "{\"server_ip\":\"192.168.41.32\",\"decoded_as\":\"UDP\",\"name\":\"top_server_ip\",\"device_id\":\"21426003\",\"device_group\":\"group-xxg-9140\",\"data_center\":\"center-xxg-9140\",\"vsys_id\":1,\"duration\":60000,\"sessions\":5,\"bytes\":350,\"pkts\":5,\"timestamp_ms\":1723014960000}";
|
||||
String jsonString3 = "{\"client_ip\":\"10.64.23.157\",\"server_ip\":\"10.64.127.184\",\"decoded_as\":\"TCP SYN\",\"name\":\"top_client_ip_and_server_ip\",\"device_id\":\"9800165603191151\",\"device_group\":\"group-xxg-tsgx\",\"data_center\":\"center-xxg-tsgx\",\"vsys_id\":1,\"duration\":60000,\"sessions\":4,\"bytes\":264,\"pkts\":4,\"timestamp_ms\":1723014900010}";
|
||||
|
||||
env.setParallelism(1);
|
||||
CollectSink.values.clear();
|
||||
|
||||
ParameterTool serviceConfig = ParameterTool.fromPropertiesFile("src\\main\\resources\\detection_dos_attack.properties");
|
||||
Configuration configurationService = serviceConfig.getConfiguration();
|
||||
// global register
|
||||
env.getConfig().setGlobalJobParameters(configurationService);
|
||||
|
||||
|
||||
env.fromElements(jsonString1,jsonString2,jsonString3)
|
||||
.flatMap(new FlatSketchFunction())
|
||||
.addSink(new CollectSink());
|
||||
|
||||
// execute
|
||||
env.execute();
|
||||
assertEquals("", CollectSink.values.get(0).getDecoded_as());
|
||||
assertEquals("192.64.30.9", CollectSink.values.get(0).getServer_ip());
|
||||
assertEquals(9055, CollectSink.values.get(0).getRule_id());
|
||||
assertTrue(CollectSink.values.get(0).getClient_countrys().contains("US"));
|
||||
assertEquals("CA",CollectSink.values.get(0).getServer_country());
|
||||
assertEquals("top_client_and_server_ip",CollectSink.values.get(0).getName());
|
||||
assertEquals(60000,CollectSink.values.get(0).getDuration());
|
||||
assertEquals(1,CollectSink.values.get(0).getVsys_id());
|
||||
assertEquals("9800165603247024",CollectSink.values.get(0).getDevice_id());
|
||||
assertEquals("group-xxg-tsgx",CollectSink.values.get(0).getDevice_group());
|
||||
assertEquals("center-xxg-tsgx",CollectSink.values.get(0).getData_center());
|
||||
assertEquals(89,CollectSink.values.get(0).getSessions());
|
||||
assertEquals(89,CollectSink.values.get(0).getPkts());
|
||||
assertEquals(5874,CollectSink.values.get(0).getBytes());
|
||||
assertEquals(1723014831175L,CollectSink.values.get(0).getStart_timestamp_ms());
|
||||
assertEquals("UDP", CollectSink.values.get(1).getDecoded_as());
|
||||
assertEquals("TCP SYN", CollectSink.values.get(2).getDecoded_as());
|
||||
assertNull(CollectSink.values.get(0).getClient_ip());
|
||||
assertEquals(5,CollectSink.values.get(1).getSessions());
|
||||
assertEquals(5,CollectSink.values.get(1).getPkts());
|
||||
assertEquals(350,CollectSink.values.get(1).getBytes());
|
||||
assertEquals(0,CollectSink.values.get(1).getClient_countrys().size());
|
||||
assertEquals(0,CollectSink.values.get(2).getSessions());
|
||||
assertEquals(0,CollectSink.values.get(2).getPkts());
|
||||
assertEquals(0,CollectSink.values.get(2).getBytes());
|
||||
}
|
||||
|
||||
// create a testing sink
|
||||
private static class CollectSink implements SinkFunction<DosSketchLog> {
|
||||
|
||||
// must be static
|
||||
public static final List<DosSketchLog> values = Collections.synchronizedList(new ArrayList<>());
|
||||
|
||||
@Override
|
||||
public void invoke(DosSketchLog value, Context context) throws Exception {
|
||||
values.add(value);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user