Compare commits
56 Commits
tsg-v09-A
...
feature/ts
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
52336accbd | ||
|
|
c8a2a6b627 | ||
|
|
24d70f690e | ||
|
|
77e982b22f | ||
|
|
b3a23686a0 | ||
|
|
b9a694ddb9 | ||
|
|
6fb37324ff | ||
|
|
315b638470 | ||
|
|
bd48417eb8 | ||
|
|
72acc976e3 | ||
|
|
6be3ea7f1e | ||
|
|
04ee45f77d | ||
|
|
d8b0a7637b | ||
|
|
b56a2ec31e | ||
|
|
11747d9964 | ||
|
|
ce15a27a1b | ||
|
|
01bbe562c9 | ||
|
|
f07651cf14 | ||
|
|
7c201a8a3f | ||
|
|
78435d54ea | ||
|
|
76c9247bb9 | ||
|
|
488b7c6644 | ||
|
|
0662d265dd | ||
|
|
87fe11dc93 | ||
|
|
9a2a5b3957 | ||
|
|
c58acdcfc9 | ||
|
|
b409150532 | ||
|
|
7e6d5fcfc5 | ||
|
|
859cd379e5 | ||
|
|
47ddef9bca | ||
|
|
0a6f36393c | ||
|
|
84a1e6879a | ||
|
|
ab8f6aba81 | ||
|
|
94e8fb807a | ||
|
|
cead1d4d99 | ||
|
|
2d98c3b6e6 | ||
|
|
3dc29a07be | ||
|
|
1fcdb79739 | ||
|
|
3d974217d9 | ||
|
|
db17064f73 | ||
|
|
065e5abb09 | ||
|
|
75bbdd2962 | ||
|
|
c46a395d9b | ||
|
|
cc3f0cf620 | ||
|
|
0617b1e614 | ||
|
|
0125b031dd | ||
|
|
177e7461cc | ||
|
|
be916531fb | ||
|
|
c692112445 | ||
|
|
b03ab9642d | ||
|
|
c44250bf73 | ||
|
|
77bc6a844e | ||
|
|
e930fa23ed | ||
|
|
8cd4dea19e | ||
|
|
62f3c65d66 | ||
|
|
8cfb442c44 |
170
pom.xml
170
pom.xml
@@ -6,19 +6,26 @@
|
||||
|
||||
<groupId>com.zdjizhi</groupId>
|
||||
<artifactId>flink-dos-detection</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<version>23.11</version>
|
||||
|
||||
<name>flink-dos-detection</name>
|
||||
<url>http://www.example.com</url>
|
||||
|
||||
|
||||
<properties>
|
||||
<galaxy.tools.version>1.2.1</galaxy.tools.version>
|
||||
<flink.version>1.13.1</flink.version>
|
||||
<hive.version>2.1.1</hive.version>
|
||||
<hadoop.version>2.7.1</hadoop.version>
|
||||
<scala.binary.version>2.11</scala.binary.version>
|
||||
<jsonpath.version>2.4.0</jsonpath.version>
|
||||
</properties>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>nexus</id>
|
||||
<name>Team Nexus Repository</name>
|
||||
<url>http://192.168.40.125:8099/content/groups/public</url>
|
||||
<url>http://192.168.40.153:8099/content/groups/public</url>
|
||||
</repository>
|
||||
|
||||
<repository>
|
||||
@@ -96,29 +103,60 @@
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
</plugins>
|
||||
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>properties</directory>
|
||||
<includes>
|
||||
<include>**/*.properties</include>
|
||||
<include>**/*.xml</include>
|
||||
</includes>
|
||||
<filtering>false</filtering>
|
||||
</resource>
|
||||
|
||||
<resource>
|
||||
<directory>src\main</directory>
|
||||
<includes>
|
||||
<include>log4j.properties</include>
|
||||
</includes>
|
||||
<filtering>false</filtering>
|
||||
</resource>
|
||||
</resources>
|
||||
|
||||
</build>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/org.jasypt/jasypt -->
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>1.7.21</version>
|
||||
<groupId>org.jasypt</groupId>
|
||||
<artifactId>jasypt</artifactId>
|
||||
<version>1.9.3</version>
|
||||
</dependency>
|
||||
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>org.slf4j</groupId>-->
|
||||
<!--<artifactId>slf4j-api</artifactId>-->
|
||||
<!--<version>1.7.21</version>-->
|
||||
<!--</dependency>-->
|
||||
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>org.slf4j</groupId>-->
|
||||
<!--<artifactId>slf4j-log4j12</artifactId>-->
|
||||
<!--<version>1.7.21</version>-->
|
||||
<!--</dependency>-->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<version>1.7.21</version>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path</artifactId>
|
||||
<version>${jsonpath.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-connector-kafka_2.12</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
<!--<scope>provided</scope>-->
|
||||
</dependency>
|
||||
|
||||
<!-- CLI dependencies -->
|
||||
@@ -126,12 +164,13 @@
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-clients_2.12</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
<!--<scope>provided</scope>-->
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
<version>2.7.1</version>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>zookeeper</artifactId>
|
||||
@@ -141,14 +180,34 @@
|
||||
<artifactId>jdk.tools</artifactId>
|
||||
<groupId>jdk.tools</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>guava</artifactId>
|
||||
<groupId>com.google.guava</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-auth</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- <dependency>-->
|
||||
<!-- <groupId>org.apache.hadoop</groupId>-->
|
||||
<!-- <artifactId>hadoop-hdfs</artifactId>-->
|
||||
<!-- <version>${hadoop.version}</version>-->
|
||||
<!-- </dependency>-->
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-client -->
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-client</artifactId>
|
||||
<version>2.2.3</version>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
@@ -158,6 +217,14 @@
|
||||
<artifactId>log4j-over-slf4j</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
@@ -195,25 +262,88 @@
|
||||
<version>5.3.3</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>com.zdjizhi</groupId>
|
||||
<artifactId>galaxy</artifactId>
|
||||
<version>1.0.6</version>
|
||||
<groupId>com.alibaba.fastjson2</groupId>
|
||||
<artifactId>fastjson2</artifactId>
|
||||
<version>2.0.32</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.alibaba.nacos</groupId>
|
||||
<artifactId>nacos-client</artifactId>
|
||||
<version>1.2.0</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>log4j-over-slf4j</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<!-- https://mvnrepository.com/artifact/commons-codec/commons-codec -->
|
||||
<dependency>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
<version>1.11</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.12</version>
|
||||
</dependency>
|
||||
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-databind -->
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>2.9.10</version>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/com.google.guava/guava -->
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>22.0</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<version>1.18.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path</artifactId>
|
||||
<version>2.4.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>com.geedgenetworks</groupId>
|
||||
<artifactId>galaxy</artifactId>
|
||||
<version>${galaxy.tools.version}</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
|
||||
63
src/main/java/com/zdjizhi/common/DosBaselineThreshold.java
Normal file
63
src/main/java/com/zdjizhi/common/DosBaselineThreshold.java
Normal file
@@ -0,0 +1,63 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DosBaselineThreshold implements Serializable {
|
||||
private ArrayList<Integer> session_rate;
|
||||
private Integer session_rate_baseline_type;
|
||||
private Integer session_rate_default_value;
|
||||
|
||||
public ArrayList<Integer> getSession_rate() {
|
||||
return session_rate;
|
||||
}
|
||||
|
||||
public void setSession_rate(ArrayList<Integer> session_rate) {
|
||||
this.session_rate = session_rate;
|
||||
}
|
||||
|
||||
public Integer getSession_rate_baseline_type() {
|
||||
return session_rate_baseline_type;
|
||||
}
|
||||
|
||||
public void setSession_rate_baseline_type(Integer session_rate_baseline_type) {
|
||||
this.session_rate_baseline_type = session_rate_baseline_type;
|
||||
}
|
||||
|
||||
public Integer getSession_rate_default_value() {
|
||||
return session_rate_default_value;
|
||||
}
|
||||
|
||||
public void setSession_rate_default_value(Integer session_rate_default_value) {
|
||||
this.session_rate_default_value = session_rate_default_value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "DosBaselineThreshold{" +
|
||||
"session_rate=" + session_rate +
|
||||
", session_rate_baseline_type=" + session_rate_baseline_type +
|
||||
", session_rate_default_value=" + session_rate_default_value +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof DosBaselineThreshold)) {
|
||||
return false;
|
||||
}
|
||||
DosBaselineThreshold that = (DosBaselineThreshold) o;
|
||||
return Objects.equals(getSession_rate(), that.getSession_rate()) &&
|
||||
Objects.equals(getSession_rate_baseline_type(), that.getSession_rate_baseline_type()) &&
|
||||
Objects.equals(getSession_rate_default_value(), that.getSession_rate_default_value());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(getSession_rate(), getSession_rate_baseline_type(), getSession_rate_default_value());
|
||||
}
|
||||
}
|
||||
@@ -2,92 +2,117 @@ package com.zdjizhi.common;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* @author wlh
|
||||
*/
|
||||
public class DosDetectionThreshold implements Serializable {
|
||||
private String profileId;
|
||||
private String attackType;
|
||||
private ArrayList<String> serverIpList;
|
||||
private String serverIpAddr;
|
||||
private long packetsPerSec;
|
||||
private long bitsPerSec;
|
||||
private long sessionsPerSec;
|
||||
private int isValid;
|
||||
private long profile_id;
|
||||
private String attack_type;
|
||||
private ArrayList<String> server_ip_list;
|
||||
private String server_ip_addr;
|
||||
private long packets_per_sec;
|
||||
private long bits_per_sec;
|
||||
private long sessions_per_sec;
|
||||
private int is_valid;
|
||||
private int vsys_id;
|
||||
private Integer[] superior_ids;
|
||||
|
||||
public long getProfile_id() {
|
||||
return profile_id;
|
||||
}
|
||||
|
||||
public void setProfile_id(long profile_id) {
|
||||
this.profile_id = profile_id;
|
||||
}
|
||||
|
||||
public String getAttack_type() {
|
||||
return attack_type;
|
||||
}
|
||||
|
||||
public void setAttack_type(String attack_type) {
|
||||
this.attack_type = attack_type;
|
||||
}
|
||||
|
||||
public ArrayList<String> getServer_ip_list() {
|
||||
return server_ip_list;
|
||||
}
|
||||
|
||||
public void setServer_ip_list(ArrayList<String> server_ip_list) {
|
||||
this.server_ip_list = server_ip_list;
|
||||
}
|
||||
|
||||
public String getServer_ip_addr() {
|
||||
return server_ip_addr;
|
||||
}
|
||||
|
||||
public void setServer_ip_addr(String server_ip_addr) {
|
||||
this.server_ip_addr = server_ip_addr;
|
||||
}
|
||||
|
||||
public long getPackets_per_sec() {
|
||||
return packets_per_sec;
|
||||
}
|
||||
|
||||
public void setPackets_per_sec(long packets_per_sec) {
|
||||
this.packets_per_sec = packets_per_sec;
|
||||
}
|
||||
|
||||
public long getBits_per_sec() {
|
||||
return bits_per_sec;
|
||||
}
|
||||
|
||||
public void setBits_per_sec(long bits_per_sec) {
|
||||
this.bits_per_sec = bits_per_sec;
|
||||
}
|
||||
|
||||
public long getSessions_per_sec() {
|
||||
return sessions_per_sec;
|
||||
}
|
||||
|
||||
public void setSessions_per_sec(long sessions_per_sec) {
|
||||
this.sessions_per_sec = sessions_per_sec;
|
||||
}
|
||||
|
||||
public int getIs_valid() {
|
||||
return is_valid;
|
||||
}
|
||||
|
||||
public void setIs_valid(int is_valid) {
|
||||
this.is_valid = is_valid;
|
||||
}
|
||||
|
||||
public int getVsys_id() {
|
||||
return vsys_id;
|
||||
}
|
||||
|
||||
public void setVsys_id(int vsys_id) {
|
||||
this.vsys_id = vsys_id;
|
||||
}
|
||||
|
||||
public Integer[] getSuperior_ids() {
|
||||
return superior_ids;
|
||||
}
|
||||
|
||||
public void setSuperior_ids(Integer[] superior_ids) {
|
||||
this.superior_ids = superior_ids;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "DosDetectionThreshold{" +
|
||||
"profileId='" + profileId + '\'' +
|
||||
", attackType='" + attackType + '\'' +
|
||||
", serverIpList=" + serverIpList +
|
||||
", serverIpAddr='" + serverIpAddr + '\'' +
|
||||
", packetsPerSec=" + packetsPerSec +
|
||||
", bitsPerSec=" + bitsPerSec +
|
||||
", sessionsPerSec=" + sessionsPerSec +
|
||||
", isValid=" + isValid +
|
||||
"profile_id=" + profile_id +
|
||||
", attack_type='" + attack_type + '\'' +
|
||||
", server_ip_list=" + server_ip_list +
|
||||
", server_ip_addr='" + server_ip_addr + '\'' +
|
||||
", packets_per_sec=" + packets_per_sec +
|
||||
", bits_per_sec=" + bits_per_sec +
|
||||
", sessions_per_sec=" + sessions_per_sec +
|
||||
", is_valid=" + is_valid +
|
||||
", vsys_id=" + vsys_id +
|
||||
", superior_ids=" + Arrays.toString(superior_ids) +
|
||||
'}';
|
||||
}
|
||||
|
||||
public String getProfileId() {
|
||||
return profileId;
|
||||
}
|
||||
|
||||
public void setProfileId(String profileId) {
|
||||
this.profileId = profileId;
|
||||
}
|
||||
|
||||
public String getAttackType() {
|
||||
return attackType;
|
||||
}
|
||||
|
||||
public void setAttackType(String attackType) {
|
||||
this.attackType = attackType;
|
||||
}
|
||||
|
||||
public ArrayList<String> getServerIpList() {
|
||||
return serverIpList;
|
||||
}
|
||||
|
||||
public void setServerIpList(ArrayList<String> serverIpList) {
|
||||
this.serverIpList = serverIpList;
|
||||
}
|
||||
|
||||
public String getServerIpAddr() {
|
||||
return serverIpAddr;
|
||||
}
|
||||
|
||||
public void setServerIpAddr(String serverIpAddr) {
|
||||
this.serverIpAddr = serverIpAddr;
|
||||
}
|
||||
|
||||
public long getPacketsPerSec() {
|
||||
return packetsPerSec;
|
||||
}
|
||||
|
||||
public void setPacketsPerSec(long packetsPerSec) {
|
||||
this.packetsPerSec = packetsPerSec;
|
||||
}
|
||||
|
||||
public long getBitsPerSec() {
|
||||
return bitsPerSec;
|
||||
}
|
||||
|
||||
public void setBitsPerSec(long bitsPerSec) {
|
||||
this.bitsPerSec = bitsPerSec;
|
||||
}
|
||||
|
||||
public long getSessionsPerSec() {
|
||||
return sessionsPerSec;
|
||||
}
|
||||
|
||||
public void setSessionsPerSec(long sessionsPerSec) {
|
||||
this.sessionsPerSec = sessionsPerSec;
|
||||
}
|
||||
|
||||
public int getIsValid() {
|
||||
return isValid;
|
||||
}
|
||||
|
||||
public void setIsValid(int isValid) {
|
||||
this.isValid = isValid;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,11 +2,13 @@ package com.zdjizhi.common;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
public class DosEventLog implements Serializable {
|
||||
public class DosEventLog implements Serializable,Cloneable {
|
||||
|
||||
private long log_id;
|
||||
private int vsys_id;
|
||||
private long start_time;
|
||||
private long end_time;
|
||||
private long profile_id;
|
||||
private String attack_type;
|
||||
private String severity;
|
||||
private String conditions;
|
||||
@@ -26,6 +28,14 @@ public class DosEventLog implements Serializable {
|
||||
this.log_id = log_id;
|
||||
}
|
||||
|
||||
public int getVsys_id() {
|
||||
return vsys_id;
|
||||
}
|
||||
|
||||
public void setVsys_id(int vsys_id) {
|
||||
this.vsys_id = vsys_id;
|
||||
}
|
||||
|
||||
public long getStart_time() {
|
||||
return start_time;
|
||||
}
|
||||
@@ -42,6 +52,14 @@ public class DosEventLog implements Serializable {
|
||||
this.end_time = end_time;
|
||||
}
|
||||
|
||||
public long getProfile_id() {
|
||||
return profile_id;
|
||||
}
|
||||
|
||||
public void setProfile_id(long profile_id) {
|
||||
this.profile_id = profile_id;
|
||||
}
|
||||
|
||||
public String getAttack_type() {
|
||||
return attack_type;
|
||||
}
|
||||
@@ -124,10 +142,12 @@ public class DosEventLog implements Serializable {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "dosEventLog{" +
|
||||
return "DosEventLog{" +
|
||||
"log_id=" + log_id +
|
||||
", vsys_id=" + vsys_id +
|
||||
", start_time=" + start_time +
|
||||
", end_time=" + end_time +
|
||||
", profile_id=" + profile_id +
|
||||
", attack_type='" + attack_type + '\'' +
|
||||
", severity='" + severity + '\'' +
|
||||
", conditions='" + conditions + '\'' +
|
||||
@@ -140,4 +160,9 @@ public class DosEventLog implements Serializable {
|
||||
", bit_rate=" + bit_rate +
|
||||
'}';
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object clone() throws CloneNotSupportedException {
|
||||
return super.clone();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DosMetricsLog implements Serializable {
|
||||
|
||||
@@ -11,6 +12,7 @@ public class DosMetricsLog implements Serializable {
|
||||
private long packet_rate;
|
||||
private long bit_rate;
|
||||
private int partition_num;
|
||||
private int vsys_id;
|
||||
|
||||
public int getPartition_num() {
|
||||
return partition_num;
|
||||
@@ -68,6 +70,14 @@ public class DosMetricsLog implements Serializable {
|
||||
this.bit_rate = bit_rate;
|
||||
}
|
||||
|
||||
public int getVsys_id() {
|
||||
return vsys_id;
|
||||
}
|
||||
|
||||
public void setVsys_id(int vsys_id) {
|
||||
this.vsys_id = vsys_id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "DosMetricsLog{" +
|
||||
@@ -78,6 +88,7 @@ public class DosMetricsLog implements Serializable {
|
||||
", packet_rate=" + packet_rate +
|
||||
", bit_rate=" + bit_rate +
|
||||
", partition_num=" + partition_num +
|
||||
", vsys_id=" + vsys_id +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DosSketchLog implements Serializable {
|
||||
|
||||
@@ -14,6 +15,7 @@ public class DosSketchLog implements Serializable {
|
||||
private long sketch_sessions;
|
||||
private long sketch_packets;
|
||||
private long sketch_bytes;
|
||||
private int vsys_id;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
@@ -28,6 +30,7 @@ public class DosSketchLog implements Serializable {
|
||||
", sketch_sessions=" + sketch_sessions +
|
||||
", sketch_packets=" + sketch_packets +
|
||||
", sketch_bytes=" + sketch_bytes +
|
||||
", vsys_id=" + vsys_id +
|
||||
'}';
|
||||
}
|
||||
|
||||
@@ -110,4 +113,12 @@ public class DosSketchLog implements Serializable {
|
||||
public void setSketch_bytes(long sketch_bytes) {
|
||||
this.sketch_bytes = sketch_bytes;
|
||||
}
|
||||
|
||||
public int getVsys_id() {
|
||||
return vsys_id;
|
||||
}
|
||||
|
||||
public void setVsys_id(int vsys_id) {
|
||||
this.vsys_id = vsys_id;
|
||||
}
|
||||
}
|
||||
|
||||
32
src/main/java/com/zdjizhi/common/DosVsysId.java
Normal file
32
src/main/java/com/zdjizhi/common/DosVsysId.java
Normal file
@@ -0,0 +1,32 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
public class DosVsysId {
|
||||
private Integer id;
|
||||
private Integer[] superior_ids;
|
||||
|
||||
public Integer getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(Integer id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public Integer[] getSuperior_ids() {
|
||||
return superior_ids;
|
||||
}
|
||||
|
||||
public void setSuperior_ids(Integer[] superior_ids) {
|
||||
this.superior_ids = superior_ids;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "DosVsysId{" +
|
||||
"id=" + id +
|
||||
", superior_ids=" + Arrays.toString(superior_ids) +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,23 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
import com.zdjizhi.utils.CommonConfigurations;
|
||||
import org.jasypt.encryption.pbe.StandardPBEStringEncryptor;
|
||||
|
||||
/**
|
||||
* Created by wk on 2021/1/6.
|
||||
* @author wlh
|
||||
* @date 2021/1/6
|
||||
*/
|
||||
public class CommonConfig {
|
||||
public class FlowWriteConfig {
|
||||
|
||||
/**
|
||||
* 定位库默认分隔符
|
||||
*/
|
||||
|
||||
private static StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor();
|
||||
|
||||
static {
|
||||
encryptor.setPassword("galaxy");
|
||||
}
|
||||
|
||||
public static final int STREAM_EXECUTION_ENVIRONMENT_PARALLELISM = CommonConfigurations.getIntProperty("stream.execution.environment.parallelism");
|
||||
public static final String STREAM_EXECUTION_JOB_NAME = CommonConfigurations.getStringProperty("stream.execution.job.name");
|
||||
@@ -27,6 +39,7 @@ public class CommonConfig {
|
||||
|
||||
public static final String HBASE_BASELINE_TABLE_NAME = CommonConfigurations.getStringProperty("hbase.baseline.table.name");
|
||||
public static final int HBASE_BASELINE_TOTAL_NUM = CommonConfigurations.getIntProperty("hbase.baseline.total.num");
|
||||
public static final int HBASE_BASELINE_TTL = CommonConfigurations.getIntProperty("hbase.baseline.ttl");
|
||||
|
||||
public static final int FLINK_FIRST_AGG_PARALLELISM = CommonConfigurations.getIntProperty("flink.first.agg.parallelism");
|
||||
public static final int FLINK_DETECTION_MAP_PARALLELISM = CommonConfigurations.getIntProperty("flink.detection.map.parallelism");
|
||||
@@ -37,22 +50,14 @@ public class CommonConfig {
|
||||
public static final int DESTINATION_IP_PARTITION_NUM = CommonConfigurations.getIntProperty("destination.ip.partition.num");
|
||||
public static final int DATA_CENTER_ID_NUM = CommonConfigurations.getIntProperty("data.center.id.num");
|
||||
|
||||
public static final String IP_MMDB_PATH = CommonConfigurations.getStringProperty("ip.mmdb.path");
|
||||
|
||||
public static final int SENSITIVITY_THRESHOLD = CommonConfigurations.getIntProperty("sensitivity.threshold");
|
||||
|
||||
public static final double BASELINE_SESSIONS_MINOR_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.minor.threshold");
|
||||
public static final double BASELINE_SESSIONS_WARNING_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.warning.threshold");
|
||||
public static final double BASELINE_SESSIONS_MAJOR_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.major.threshold");
|
||||
public static final double BASELINE_SESSIONS_SEVERE_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.severe.threshold");
|
||||
public static final double BASELINE_SESSIONS_CRITICAL_THRESHOLD = CommonConfigurations.getDoubleProperty("baseline.sessions.critical.threshold");
|
||||
|
||||
public static final String BIFANG_SERVER_URI = CommonConfigurations.getStringProperty("bifang.server.uri");
|
||||
public static final String BIFANG_SERVER_TOKEN = CommonConfigurations.getStringProperty("bifang.server.token");
|
||||
public static final String BIFANG_SERVER_ENCRYPTPWD_PATH = CommonConfigurations.getStringProperty("bifang.server.encryptpwd.path");
|
||||
public static final String BIFANG_SERVER_LOGIN_PATH = CommonConfigurations.getStringProperty("bifang.server.login.path");
|
||||
public static final String BIFANG_SERVER_POLICY_THRESHOLD_PATH = CommonConfigurations.getStringProperty("bifang.server.policy.threshold.path");
|
||||
|
||||
public static final String BIFANG_SERVER_POLICY_VSYSID_PATH = CommonConfigurations.getStringProperty("bifang.server.policy.vaysid.path");
|
||||
|
||||
public static final int HTTP_POOL_MAX_CONNECTION = CommonConfigurations.getIntProperty("http.pool.max.connection");
|
||||
public static final int HTTP_POOL_MAX_PER_ROUTE = CommonConfigurations.getIntProperty("http.pool.max.per.route");
|
||||
public static final int HTTP_POOL_REQUEST_TIMEOUT = CommonConfigurations.getIntProperty("http.pool.request.timeout");
|
||||
@@ -63,7 +68,24 @@ public class CommonConfig {
|
||||
public static final int BASELINE_THRESHOLD_SCHEDULE_DAYS = CommonConfigurations.getIntProperty("baseline.threshold.schedule.days");
|
||||
|
||||
public static final String SASL_JAAS_CONFIG_USER = CommonConfigurations.getStringProperty("sasl.jaas.config.user");
|
||||
public static final String SASL_JAAS_CONFIG_PASSWORD = CommonConfigurations.getStringProperty("sasl.jaas.config.password");
|
||||
public static final String SASL_JAAS_CONFIG_PASSWORD = encryptor.decrypt(CommonConfigurations.getStringProperty("sasl.jaas.config.password"));
|
||||
|
||||
public static final int SASL_JAAS_CONFIG_FLAG = CommonConfigurations.getIntProperty("sasl.jaas.config.flag");
|
||||
|
||||
public static final String NACOS_SERVER = CommonConfigurations.getStringProperty("nacos.server.addr");
|
||||
public static final String NACOS_USERNAME = CommonConfigurations.getStringProperty("nacos.username");
|
||||
public static final String NACOS_PIN = CommonConfigurations.getStringProperty("nacos.password");
|
||||
public static final String NACOS_PUBLIC_NAMESPACE = CommonConfigurations.getStringProperty("nacos.namespace");
|
||||
public static final String NACOS_KNOWLEDGEBASE_DATA_ID = CommonConfigurations.getStringProperty("nacos.data.id");
|
||||
public static final String NACOS_PUBLIC_GROUP = CommonConfigurations.getStringProperty("nacos.group");
|
||||
public static final Integer NACOS_CONNECTION_TIMEOUT = CommonConfigurations.getIntProperty("nacos.connection.timeout");
|
||||
|
||||
|
||||
|
||||
public static final String NACOS_DOS_NAMESPACE = CommonConfigurations.getStringProperty("nacos.dos.namespace");
|
||||
public static final String NACOS_DOS_DATA_ID = CommonConfigurations.getStringProperty("nacos.dos.data.id");
|
||||
public static final String NACOS_DOS_GROUP = CommonConfigurations.getStringProperty("nacos.dos.group");
|
||||
|
||||
public static final Integer HTTP_SOCKET_TIMEOUT = CommonConfigurations.getIntProperty("http.socket.timeout");
|
||||
|
||||
}
|
||||
74
src/main/java/com/zdjizhi/common/pojo/KnowlegeBaseMeta.java
Normal file
74
src/main/java/com/zdjizhi/common/pojo/KnowlegeBaseMeta.java
Normal file
@@ -0,0 +1,74 @@
|
||||
package com.zdjizhi.common.pojo;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class KnowlegeBaseMeta implements Serializable {
|
||||
private String id;
|
||||
private String name;
|
||||
private String sha256;
|
||||
private String format;
|
||||
private String path;
|
||||
|
||||
public KnowlegeBaseMeta(String id, String name, String sha256, String format, String path) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.sha256 = sha256;
|
||||
this.format = format;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getSha256() {
|
||||
return sha256;
|
||||
}
|
||||
|
||||
public void setSha256(String sha256) {
|
||||
this.sha256 = sha256;
|
||||
}
|
||||
|
||||
public String getFormat() {
|
||||
return format;
|
||||
}
|
||||
|
||||
public void setFormat(String format) {
|
||||
this.format = format;
|
||||
}
|
||||
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
public void setPath(String path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "KnowlegeBaseMeta{" +
|
||||
"id='" + id + '\'' +
|
||||
", name='" + name + '\'' +
|
||||
", sha256='" + sha256 + '\'' +
|
||||
", format='" + format + '\'' +
|
||||
", path='" + path + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +1,20 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.zdjizhi.common.DosDetectionThreshold;
|
||||
import com.zdjizhi.common.DosEventLog;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import com.zdjizhi.utils.DateUtils;
|
||||
import com.zdjizhi.utils.HbaseUtils;
|
||||
import com.zdjizhi.utils.IpUtils;
|
||||
import com.zdjizhi.utils.SnowflakeId;
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.geedgenetworks.utils.DateUtils;
|
||||
import com.geedgenetworks.utils.StringUtil;
|
||||
import com.zdjizhi.common.*;
|
||||
import com.zdjizhi.utils.*;
|
||||
import com.zdjizhi.utils.connections.nacos.NacosUtils;
|
||||
import inet.ipaddr.IPAddress;
|
||||
import inet.ipaddr.IPAddressString;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
|
||||
import org.apache.flink.api.common.functions.RichMapFunction;
|
||||
import org.apache.flink.api.java.tuple.Tuple2;
|
||||
import org.apache.flink.configuration.Configuration;
|
||||
import org.apache.flink.shaded.guava18.com.google.common.collect.TreeRangeMap;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.flink.streaming.api.functions.ProcessFunction;
|
||||
import org.apache.flink.util.Collector;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.text.NumberFormat;
|
||||
@@ -29,28 +26,38 @@ import java.util.concurrent.TimeUnit;
|
||||
/**
|
||||
* @author wlh
|
||||
*/
|
||||
public class DosDetection extends RichMapFunction<DosSketchLog, DosEventLog> {
|
||||
public class DosDetection extends ProcessFunction<DosSketchLog, DosEventLog> {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DosDetection.class);
|
||||
private static Map<String, Map<String, Tuple2<ArrayList<Integer>, Integer>>> baselineMap = new HashMap<>();
|
||||
private final static int BASELINE_SIZE = 144;
|
||||
private static final Log logger = LogFactory.get();
|
||||
private static Map<String, Map<String, DosBaselineThreshold>> baselineMap = new HashMap<>();
|
||||
private final static NumberFormat PERCENT_INSTANCE = NumberFormat.getPercentInstance();
|
||||
private TreeRangeMap<IPAddress, Map<String, DosDetectionThreshold>> thresholdRangeMap;
|
||||
private HashMap<Integer, HashMap<String, TreeRangeMap<IPAddress, DosDetectionThreshold>>> thresholdRangeMap;
|
||||
|
||||
private final static int BASELINE_SIZE = 144;
|
||||
private final static int STATIC_CONDITION_TYPE = 1;
|
||||
private final static int BASELINE_CONDITION_TYPE = 2;
|
||||
private final static int SENSITIVITY_CONDITION_TYPE = 3;
|
||||
|
||||
private final static String SESSIONS_TAG = "sessions";
|
||||
private final static String PACKETS_TAG = "packets";
|
||||
private final static String BITS_TAG = "bits";
|
||||
|
||||
private final static int OTHER_BASELINE_TYPE = 3;
|
||||
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public void open(Configuration parameters) {
|
||||
ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(2,
|
||||
new BasicThreadFactory.Builder().namingPattern("Dos-Detection-%d").daemon(true).build());
|
||||
try {
|
||||
executorService.scheduleAtFixedRate(() -> {
|
||||
//do something
|
||||
thresholdRangeMap = ParseStaticThreshold.createStaticThreshold();
|
||||
}, 0, CommonConfig.STATIC_THRESHOLD_SCHEDULE_MINUTES, TimeUnit.MINUTES);
|
||||
super.open(parameters);
|
||||
executorService.scheduleAtFixedRate(() -> thresholdRangeMap = ParseStaticThreshold.createStaticThreshold(), 0,
|
||||
FlowWriteConfig.STATIC_THRESHOLD_SCHEDULE_MINUTES, TimeUnit.MINUTES);
|
||||
|
||||
executorService.scheduleAtFixedRate(() -> {
|
||||
//do something
|
||||
baselineMap = HbaseUtils.readFromHbase();
|
||||
}, 0, CommonConfig.BASELINE_THRESHOLD_SCHEDULE_DAYS, TimeUnit.DAYS);
|
||||
executorService.scheduleAtFixedRate(() -> baselineMap = ParseBaselineThreshold.readFromHbase(), 0,
|
||||
FlowWriteConfig.BASELINE_THRESHOLD_SCHEDULE_DAYS, TimeUnit.DAYS);
|
||||
} catch (Exception e) {
|
||||
logger.error("定时器任务执行失败", e);
|
||||
}
|
||||
@@ -58,100 +65,137 @@ public class DosDetection extends RichMapFunction<DosSketchLog, DosEventLog> {
|
||||
}
|
||||
|
||||
@Override
|
||||
public DosEventLog map(DosSketchLog value) {
|
||||
public void processElement(DosSketchLog value, Context ctx, Collector<DosEventLog> out) throws Exception {
|
||||
DosEventLog finalResult = null;
|
||||
try {
|
||||
String destinationIp = value.getDestination_ip();
|
||||
int vsysId = value.getVsys_id();
|
||||
String key = destinationIp + "-" + vsysId;
|
||||
String attackType = value.getAttack_type();
|
||||
IPAddress destinationIpAddress = new IPAddressString(destinationIp).getAddress();
|
||||
Map<String, DosDetectionThreshold> thresholdMap = thresholdRangeMap.get(destinationIpAddress);
|
||||
logger.debug("当前判断IP:{}, 类型: {}", destinationIp, attackType);
|
||||
if (baselineMap != null && baselineMap.containsKey(destinationIp) && thresholdMap == null) {
|
||||
finalResult = getDosEventLogByBaseline(value, destinationIp, attackType).f1;
|
||||
} else if (baselineMap != null && !baselineMap.containsKey(destinationIp) && thresholdMap != null) {
|
||||
finalResult = getDosEventLogByStaticThreshold(value, thresholdMap).f1;
|
||||
} else if (baselineMap != null && baselineMap.containsKey(destinationIp) && thresholdMap != null) {
|
||||
Tuple2<Severity, DosEventLog> eventLogByBaseline = getDosEventLogByBaseline(value, destinationIp, attackType);
|
||||
Tuple2<Severity, DosEventLog> eventLogByStaticThreshold = getDosEventLogByStaticThreshold(value, thresholdMap);
|
||||
finalResult = mergeFinalResult(eventLogByBaseline, eventLogByStaticThreshold);
|
||||
|
||||
DosDetectionThreshold threshold = null;
|
||||
if (thresholdRangeMap.containsKey(vsysId)) {
|
||||
threshold = thresholdRangeMap.get(vsysId).getOrDefault(attackType, TreeRangeMap.create()).get(destinationIpAddress);
|
||||
}
|
||||
|
||||
logger.debug("当前判断IP:{}, 类型: {}", key, attackType);
|
||||
if (threshold == null && baselineMap.containsKey(key)) {
|
||||
finalResult = getDosEventLogByBaseline(value, key);
|
||||
} else if (threshold == null && !baselineMap.containsKey(key)) {
|
||||
finalResult = getDosEventLogBySensitivityThreshold(value);
|
||||
} else if (threshold != null) {
|
||||
finalResult = getDosEventLogByStaticThreshold(value, threshold);
|
||||
} else {
|
||||
logger.debug("未获取到当前server IP:{} 类型 {} 静态阈值 和 baseline", destinationIp, attackType);
|
||||
logger.debug("未获取到当前server IP:{} 类型 {} 静态阈值 和 baseline", key, attackType);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("判定失败\n {} \n{}", value, e);
|
||||
}
|
||||
return finalResult;
|
||||
}
|
||||
|
||||
private DosEventLog mergeFinalResult(Tuple2<Severity, DosEventLog> eventLogByBaseline, Tuple2<Severity, DosEventLog> eventLogByStaticThreshold) {
|
||||
if (eventLogByBaseline.f0.score > eventLogByStaticThreshold.f0.score) {
|
||||
logger.info("merge eventLogByBaseline {} \neventLogByStaticThreshold {}",eventLogByBaseline,eventLogByStaticThreshold);
|
||||
return mergeCondition(eventLogByBaseline.f1, eventLogByStaticThreshold.f1);
|
||||
} else {
|
||||
logger.info("merge eventLogByStaticThreshold {} \neventLogByBaseline {}",eventLogByStaticThreshold,eventLogByBaseline);
|
||||
return mergeCondition(eventLogByStaticThreshold.f1, eventLogByBaseline.f1);
|
||||
if (finalResult != null) {
|
||||
out.collect(finalResult);
|
||||
}
|
||||
}
|
||||
|
||||
private DosEventLog mergeCondition(DosEventLog log1, DosEventLog log2) {
|
||||
if (log1 != null && log2 != null) {
|
||||
String conditions1 = log1.getConditions();
|
||||
String conditions2 = log2.getConditions();
|
||||
log1.setConditions(conditions1 + " and " + conditions2);
|
||||
}else if (log1 == null && log2 != null){
|
||||
log1 = log2;
|
||||
}
|
||||
return log1;
|
||||
}
|
||||
|
||||
private Tuple2<Severity, DosEventLog> getDosEventLogByBaseline(DosSketchLog value, String destinationIp, String attackType) {
|
||||
Tuple2<ArrayList<Integer>, Integer> floodTypeTup = baselineMap.get(destinationIp).get(attackType);
|
||||
Integer base = getBaseValue(floodTypeTup, value);
|
||||
private DosEventLog getDosEventLogBySensitivityThreshold(DosSketchLog value) {
|
||||
long sketchSessions = value.getSketch_sessions();
|
||||
return sketchSessions > CommonConfig.SENSITIVITY_THRESHOLD ? getDosEventLog(value, base, sketchSessions - base, "baseline"):Tuple2.of(Severity.NORMAL, null);
|
||||
Integer staticSensitivityThreshold = NacosUtils.getIntProperty("static.sensitivity.threshold");
|
||||
long diff = sketchSessions - staticSensitivityThreshold;
|
||||
return getDosEventLog(value, staticSensitivityThreshold, diff, 0, SENSITIVITY_CONDITION_TYPE, SESSIONS_TAG);
|
||||
}
|
||||
|
||||
private Tuple2<Severity, DosEventLog> getDosEventLogByStaticThreshold(DosSketchLog value, Map<String, DosDetectionThreshold> thresholdMap) {
|
||||
Tuple2<Severity, DosEventLog> result = Tuple2.of(Severity.NORMAL, null);
|
||||
private DosEventLog getDosEventLogByBaseline(DosSketchLog value, String key) {
|
||||
String attackType = value.getAttack_type();
|
||||
if (thresholdMap.containsKey(attackType)) {
|
||||
DosDetectionThreshold threshold = thresholdMap.get(attackType);
|
||||
long base = threshold.getSessionsPerSec();
|
||||
long diff = value.getSketch_sessions() - base;
|
||||
result = getDosEventLog(value, base, diff, "static");
|
||||
long sketchSessions = value.getSketch_sessions();
|
||||
DosBaselineThreshold dosBaselineThreshold = baselineMap.get(key).get(attackType);
|
||||
Integer base = getBaseValue(dosBaselineThreshold, value);
|
||||
long diff = sketchSessions - base;
|
||||
return getDosEventLog(value, base, diff, 0, BASELINE_CONDITION_TYPE, SESSIONS_TAG);
|
||||
}
|
||||
|
||||
private DosEventLog getDosEventLogByStaticThreshold(DosSketchLog value, DosDetectionThreshold threshold) throws CloneNotSupportedException {
|
||||
long sessionBase = threshold.getSessions_per_sec();
|
||||
long pktBase = threshold.getPackets_per_sec();
|
||||
long bitBase = threshold.getBits_per_sec();
|
||||
|
||||
long diffSession = value.getSketch_sessions() - sessionBase;
|
||||
long diffPkt = value.getSketch_packets() - pktBase;
|
||||
long diffByte = value.getSketch_bytes() - bitBase;
|
||||
|
||||
double diffSessionPercent = 0.0;
|
||||
double diffPktPercent = 0.0;
|
||||
double diffBitPercent = 0.0;
|
||||
//todo 代码Review发现该部分存在bug,23.11版本做修复,需测试。
|
||||
if (sessionBase > 0) {
|
||||
diffSessionPercent = getDiffPercent(diffSession, sessionBase) * 100;
|
||||
}
|
||||
if (pktBase > 0) {
|
||||
diffPktPercent = getDiffPercent(diffPkt, pktBase) * 100;
|
||||
}
|
||||
if (bitBase > 0) {
|
||||
diffBitPercent = getDiffPercent(diffByte, bitBase) * 100;
|
||||
}
|
||||
|
||||
long profileId = 0;
|
||||
DosEventLog result = null;
|
||||
|
||||
if (diffSessionPercent >= diffPktPercent && diffSessionPercent >= diffBitPercent) {
|
||||
profileId = threshold.getProfile_id();
|
||||
result = getDosEventLog(value, sessionBase, diffSession, profileId, STATIC_CONDITION_TYPE, SESSIONS_TAG);
|
||||
} else if (diffPktPercent >= diffSessionPercent && diffPktPercent >= diffBitPercent) {
|
||||
profileId = threshold.getProfile_id();
|
||||
result = getDosEventLog(value, pktBase, diffPkt, profileId, STATIC_CONDITION_TYPE, PACKETS_TAG);
|
||||
} else if (diffBitPercent >= diffPktPercent && diffBitPercent >= diffSessionPercent) {
|
||||
profileId = threshold.getProfile_id();
|
||||
result = getDosEventLog(value, bitBase, diffByte, profileId, STATIC_CONDITION_TYPE, BITS_TAG);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private Tuple2<Severity, DosEventLog> getDosEventLog(DosSketchLog value, long base, long diff, String tag) {
|
||||
private DosEventLog getDosEventLog(DosSketchLog value, long base, long diff, long profileId, int type, String tag) {
|
||||
DosEventLog result = null;
|
||||
String destinationIp = value.getDestination_ip();
|
||||
String attackType = value.getAttack_type();
|
||||
Severity severity = Severity.NORMAL;
|
||||
if (diff > 0 && base != 0) {
|
||||
double percent = getDiffPercent(diff, base);
|
||||
severity = judgeSeverity(percent);
|
||||
Severity severity = judgeSeverity(percent);
|
||||
Integer staticSensitivityThreshold = NacosUtils.getIntProperty("static.sensitivity.threshold");
|
||||
if (severity != Severity.NORMAL) {
|
||||
result = getResult(value, severity, percent, tag);
|
||||
logger.info("检测到当前server IP {} 存在 {} 异常,超出基线{} {}倍,日志详情\n {}", destinationIp,attackType,base,percent,result);
|
||||
if (type == BASELINE_CONDITION_TYPE && percent < NacosUtils.getDoubleProperty("baseline.sensitivity.threshold")) {
|
||||
logger.debug("当前server IP:{},类型:{},基线值{}百分比{}未超过基线敏感阈值,日志详情\n{}", destinationIp, attackType, base, percent, value);
|
||||
} else if ((type == BASELINE_CONDITION_TYPE || type == SENSITIVITY_CONDITION_TYPE) && value.getSketch_sessions() < staticSensitivityThreshold) {
|
||||
logger.debug("当前server IP:{},类型:{},基线值{}百分比{}未超过静态敏感阈值,日志详情\n{}", destinationIp, attackType, base, percent, value);
|
||||
} else {
|
||||
// result = getResult(value, base, profileId, severity, percent+1, type, tag);
|
||||
result = getResult(value, base, profileId, severity, percent, type, tag);
|
||||
if (type == SENSITIVITY_CONDITION_TYPE) {
|
||||
result.setSeverity(Severity.MAJOR.severity);
|
||||
}
|
||||
logger.info("检测到当前server IP {} 存在 {} 异常,超出基线{} {}倍,基于{}:{}检测,日志详情\n {}", destinationIp, attackType, base, percent, type, tag, result);
|
||||
}
|
||||
} else {
|
||||
logger.debug("当前server IP:{} 未出现 {} 异常,日志详情 {}", destinationIp, attackType, value.toString());
|
||||
logger.debug("当前server IP:{} 未出现 {} 异常,日志详情 {}", destinationIp, attackType, value);
|
||||
}
|
||||
}
|
||||
return Tuple2.of(severity, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
private DosEventLog getResult(DosSketchLog value, Severity severity, double percent, String tag) {
|
||||
private DosEventLog getResult(DosSketchLog value, long base, long profileId, Severity severity, double percent, int type, String tag) {
|
||||
DosEventLog dosEventLog = new DosEventLog();
|
||||
dosEventLog.setLog_id(SnowflakeId.generateId());
|
||||
dosEventLog.setVsys_id(value.getVsys_id());
|
||||
dosEventLog.setStart_time(value.getSketch_start_time());
|
||||
dosEventLog.setEnd_time(value.getSketch_start_time() + CommonConfig.FLINK_WINDOW_MAX_TIME);
|
||||
dosEventLog.setEnd_time(value.getSketch_start_time() + value.getSketch_duration());
|
||||
dosEventLog.setProfile_id(profileId);
|
||||
dosEventLog.setAttack_type(value.getAttack_type());
|
||||
dosEventLog.setSeverity(severity.severity);
|
||||
dosEventLog.setConditions(getConditions(PERCENT_INSTANCE.format(percent), value.getSketch_sessions(), tag));
|
||||
dosEventLog.setConditions(getConditions(PERCENT_INSTANCE.format(percent), base, value.getSketch_sessions(), type, tag, dosEventLog));
|
||||
// dosEventLog.setConditions(getConditions(percent, base, value.getSketch_sessions(), type, tag,dosEventLog));
|
||||
dosEventLog.setDestination_ip(value.getDestination_ip());
|
||||
dosEventLog.setDestination_country(IpUtils.ipLookup.countryLookup(value.getDestination_ip()));
|
||||
dosEventLog.setDestination_country(IpLookupUtils.getCountryLookup(value.getDestination_ip()));
|
||||
String ipList = value.getSource_ip();
|
||||
dosEventLog.setSource_ip_list(ipList);
|
||||
dosEventLog.setSource_country_list(getSourceCountryList(ipList));
|
||||
@@ -161,12 +205,13 @@ public class DosDetection extends RichMapFunction<DosSketchLog, DosEventLog> {
|
||||
return dosEventLog;
|
||||
}
|
||||
|
||||
private Integer getBaseValue(Tuple2<ArrayList<Integer>, Integer> floodTypeTup, DosSketchLog value) {
|
||||
private Integer getBaseValue(DosBaselineThreshold dosBaselineThreshold, DosSketchLog value) {
|
||||
Integer base = 0;
|
||||
try {
|
||||
if (floodTypeTup != null) {
|
||||
ArrayList<Integer> baselines = floodTypeTup.f0;
|
||||
Integer defaultVaule = floodTypeTup.f1;
|
||||
if (dosBaselineThreshold != null) {
|
||||
ArrayList<Integer> baselines = dosBaselineThreshold.getSession_rate();
|
||||
Integer defaultVaule = dosBaselineThreshold.getSession_rate_default_value();
|
||||
Integer sessionRateBaselineType = dosBaselineThreshold.getSession_rate_baseline_type();
|
||||
if (baselines != null && baselines.size() == BASELINE_SIZE) {
|
||||
int timeIndex = getCurrentTimeIndex(value.getSketch_start_time());
|
||||
base = baselines.get(timeIndex);
|
||||
@@ -174,6 +219,9 @@ public class DosDetection extends RichMapFunction<DosSketchLog, DosEventLog> {
|
||||
logger.debug("获取到当前IP: {},类型: {} baseline值为0,替换为P95观测值{}", value.getDestination_ip(), value.getAttack_type(), defaultVaule);
|
||||
base = defaultVaule;
|
||||
}
|
||||
if (sessionRateBaselineType == OTHER_BASELINE_TYPE && base < NacosUtils.getIntProperty("static.sensitivity.threshold")) {
|
||||
base = NacosUtils.getIntProperty("static.sensitivity.threshold");
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
@@ -182,46 +230,93 @@ public class DosDetection extends RichMapFunction<DosSketchLog, DosEventLog> {
|
||||
return base;
|
||||
}
|
||||
|
||||
private String getConditions(String percent, long sessions, String tag) {
|
||||
switch (tag) {
|
||||
case "baseline":
|
||||
return "sessions > " + percent + " of baseline";
|
||||
case "static":
|
||||
return "sessions > " + sessions + " sessions/s";
|
||||
private String getConditions(String percent, long base, long sessions, int type, String tag, DosEventLog dosEventLog) {
|
||||
int condition = 0;
|
||||
if ("Minor".equals(dosEventLog.getSeverity())) {
|
||||
condition = 50;
|
||||
} else if ("Warning".equals(dosEventLog.getSeverity())) {
|
||||
condition = 100;
|
||||
} else if ("Major".equals(dosEventLog.getSeverity())) {
|
||||
condition = 250;
|
||||
} else if ("Severe".equals(dosEventLog.getSeverity())) {
|
||||
condition = 500;
|
||||
} else if ("Critical".equals(dosEventLog.getSeverity())) {
|
||||
condition = 800;
|
||||
}
|
||||
switch (type) {
|
||||
case STATIC_CONDITION_TYPE:
|
||||
return "Rate > " +
|
||||
base + " " +
|
||||
tag + "/s" + "(>" + condition + "%)";
|
||||
case BASELINE_CONDITION_TYPE:
|
||||
return tag + " > " +
|
||||
percent + " of baseline";
|
||||
case SENSITIVITY_CONDITION_TYPE:
|
||||
return String.valueOf(sessions) + " " +
|
||||
tag + "/s Unusually high " +
|
||||
StringUtils.capitalize(tag);
|
||||
default:
|
||||
return null;
|
||||
throw new IllegalArgumentException("Illegal Argument type:" + type + ", known types = [1,2,3]");
|
||||
}
|
||||
}
|
||||
|
||||
private String getSourceCountryList(String sourceIpList) {
|
||||
String[] ipArr = sourceIpList.split(",");
|
||||
HashSet<String> countrySet = new HashSet<>();
|
||||
for (String ip : ipArr) {
|
||||
countrySet.add(IpUtils.ipLookup.countryLookup(ip));
|
||||
if (StringUtil.isNotBlank(sourceIpList)) {
|
||||
String countryList;
|
||||
try {
|
||||
String[] ipArr = sourceIpList.split(",");
|
||||
HashSet<String> countrySet = new HashSet<>();
|
||||
for (String ip : ipArr) {
|
||||
String country = IpLookupUtils.getCountryLookup(ip);
|
||||
if (StringUtil.isNotBlank(country)) {
|
||||
countrySet.add(country);
|
||||
}
|
||||
}
|
||||
countryList = StringUtils.join(countrySet, ", ");
|
||||
return countryList;
|
||||
} catch (Exception e) {
|
||||
logger.error("{} source IP lists 获取国家失败", sourceIpList, e);
|
||||
return StringUtil.EMPTY;
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Illegal Argument sourceIpList = null");
|
||||
}
|
||||
return StringUtils.join(countrySet, ",");
|
||||
}
|
||||
|
||||
private int getCurrentTimeIndex(long sketchStartTime) {
|
||||
long currentDayTime = DateUtils.getTimeFloor(new Date(sketchStartTime * 1000L), "P1D").getTime()/1000;
|
||||
long indexLong = (sketchStartTime - currentDayTime) / 600;
|
||||
return Integer.parseInt(Long.toString(indexLong));
|
||||
int index = 0;
|
||||
try {
|
||||
long currentDayTime = DateUtils.getTimeFloor(new Date(sketchStartTime * 1000L), "P1D").getTime() / 1000;
|
||||
long indexLong = (sketchStartTime - currentDayTime) / (86400 / BASELINE_SIZE);
|
||||
index = Integer.parseInt(Long.toString(indexLong));
|
||||
} catch (Exception e) {
|
||||
logger.error("获取time index失败", e);
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
||||
|
||||
|
||||
private Double getDiffPercent(long diff, long base) {
|
||||
return BigDecimal.valueOf((float)diff/base).setScale(4, BigDecimal.ROUND_HALF_UP).doubleValue();
|
||||
try {
|
||||
return BigDecimal.valueOf((float) diff / base).setScale(4, BigDecimal.ROUND_HALF_UP).doubleValue();
|
||||
} catch (Exception e) {
|
||||
logger.info("当前阈值为0,进行下一阈值条件判断", e);
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private Severity judgeSeverity(double diffPercent) {
|
||||
if (diffPercent >= CommonConfig.BASELINE_SESSIONS_MINOR_THRESHOLD && diffPercent < CommonConfig.BASELINE_SESSIONS_WARNING_THRESHOLD) {
|
||||
if (diffPercent >= NacosUtils.getDoubleProperty("baseline.sessions.minor.threshold") && diffPercent < NacosUtils.getDoubleProperty("baseline.sessions.warning.threshold")) {
|
||||
return Severity.MINOR;
|
||||
} else if (diffPercent >= CommonConfig.BASELINE_SESSIONS_WARNING_THRESHOLD && diffPercent < CommonConfig.BASELINE_SESSIONS_MAJOR_THRESHOLD) {
|
||||
} else if (diffPercent >= NacosUtils.getDoubleProperty("baseline.sessions.warning.threshold") && diffPercent < NacosUtils.getDoubleProperty("baseline.sessions.major.threshold")) {
|
||||
return Severity.WARNING;
|
||||
} else if (diffPercent >= CommonConfig.BASELINE_SESSIONS_MAJOR_THRESHOLD && diffPercent < CommonConfig.BASELINE_SESSIONS_SEVERE_THRESHOLD) {
|
||||
} else if (diffPercent >= NacosUtils.getDoubleProperty("baseline.sessions.major.threshold") && diffPercent < NacosUtils.getDoubleProperty("baseline.sessions.severe.threshold")) {
|
||||
return Severity.MAJOR;
|
||||
} else if (diffPercent >= CommonConfig.BASELINE_SESSIONS_SEVERE_THRESHOLD && diffPercent < CommonConfig.BASELINE_SESSIONS_CRITICAL_THRESHOLD) {
|
||||
} else if (diffPercent >= NacosUtils.getDoubleProperty("baseline.sessions.severe.threshold") && diffPercent < NacosUtils.getDoubleProperty("baseline.sessions.critical.threshold")) {
|
||||
return Severity.SEVERE;
|
||||
} else if (diffPercent >= CommonConfig.BASELINE_SESSIONS_CRITICAL_THRESHOLD) {
|
||||
} else if (diffPercent >= NacosUtils.getDoubleProperty("baseline.sessions.critical.threshold")) {
|
||||
return Severity.CRITICAL;
|
||||
} else {
|
||||
return Severity.NORMAL;
|
||||
@@ -232,24 +327,22 @@ public class DosDetection extends RichMapFunction<DosSketchLog, DosEventLog> {
|
||||
/**
|
||||
* 判断严重程度枚举类型
|
||||
*/
|
||||
CRITICAL("Critical", 5),
|
||||
SEVERE("Severe", 4),
|
||||
MAJOR("Major", 3),
|
||||
WARNING("Warning", 2),
|
||||
MINOR("Minor", 1),
|
||||
NORMAL("Normal", 0);
|
||||
CRITICAL("Critical"),
|
||||
SEVERE("Severe"),
|
||||
MAJOR("Major"),
|
||||
WARNING("Warning"),
|
||||
MINOR("Minor"),
|
||||
NORMAL("Normal");
|
||||
|
||||
private final String severity;
|
||||
private final int score;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.severity;
|
||||
}
|
||||
|
||||
Severity(String severity, int score) {
|
||||
Severity(String severity) {
|
||||
this.severity = severity;
|
||||
this.score = score;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.flink.api.java.tuple.Tuple2;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.flink.api.java.tuple.Tuple3;
|
||||
import org.apache.flink.api.java.tuple.Tuple6;
|
||||
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
|
||||
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
|
||||
import org.apache.flink.util.Collector;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.HashSet;
|
||||
|
||||
@@ -18,11 +18,15 @@ import static com.zdjizhi.sink.OutputStreamSink.outputTag;
|
||||
/**
|
||||
* @author 94976
|
||||
*/
|
||||
public class EtlProcessFunction extends ProcessWindowFunction<DosSketchLog, DosSketchLog, Tuple2<String,String>, TimeWindow> {
|
||||
public class EtlProcessFunction extends ProcessWindowFunction<DosSketchLog, DosSketchLog, Tuple3<String,String,Integer>, TimeWindow> {
|
||||
|
||||
// private static final Logger logger = LoggerFactory.getLogger(EtlProcessFunction.class);
|
||||
private static final Log logger = LogFactory.get();
|
||||
private static final String EMPTY_SOURCE_IP_IPV4 = "0.0.0.0";
|
||||
private static final String EMPTY_SOURCE_IP_IPV6 = "::";
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(EtlProcessFunction.class);
|
||||
@Override
|
||||
public void process(Tuple2<String, String> keys,
|
||||
public void process(Tuple3<String,String,Integer> keys,
|
||||
Context context, Iterable<DosSketchLog> elements,
|
||||
Collector<DosSketchLog> out) {
|
||||
DosSketchLog middleResult = getMiddleResult(keys, elements);
|
||||
@@ -37,7 +41,7 @@ public class EtlProcessFunction extends ProcessWindowFunction<DosSketchLog, DosS
|
||||
}
|
||||
}
|
||||
|
||||
private DosSketchLog getMiddleResult(Tuple2<String, String> keys,Iterable<DosSketchLog> elements){
|
||||
private DosSketchLog getMiddleResult(Tuple3<String,String,Integer> keys,Iterable<DosSketchLog> elements){
|
||||
|
||||
DosSketchLog midResuleLog = new DosSketchLog();
|
||||
Tuple6<Long, Long, Long,String,Long,Long> values = sketchAggregate(elements);
|
||||
@@ -45,6 +49,7 @@ public class EtlProcessFunction extends ProcessWindowFunction<DosSketchLog, DosS
|
||||
if (values != null){
|
||||
midResuleLog.setAttack_type(keys.f0);
|
||||
midResuleLog.setDestination_ip(keys.f1);
|
||||
midResuleLog.setVsys_id(keys.f2);
|
||||
midResuleLog.setSketch_start_time(values.f4);
|
||||
midResuleLog.setSketch_duration(values.f5);
|
||||
midResuleLog.setSource_ip(values.f3);
|
||||
@@ -60,29 +65,32 @@ public class EtlProcessFunction extends ProcessWindowFunction<DosSketchLog, DosS
|
||||
}
|
||||
|
||||
private Tuple6<Long, Long, Long,String,Long,Long> sketchAggregate(Iterable<DosSketchLog> elements){
|
||||
int cnt = 1;
|
||||
long sessions = 0;
|
||||
long packets = 0 ;
|
||||
long bytes = 0;
|
||||
long startTime = 0;
|
||||
long startTime = System.currentTimeMillis()/1000;
|
||||
long endTime = System.currentTimeMillis()/1000;
|
||||
long duration = 0;
|
||||
HashSet<String> sourceIpSet = new HashSet<>();
|
||||
try {
|
||||
for (DosSketchLog newSketchLog : elements){
|
||||
sessions += newSketchLog.getSketch_sessions();
|
||||
packets += newSketchLog.getSketch_packets();
|
||||
bytes += newSketchLog.getSketch_bytes();
|
||||
startTime = newSketchLog.getSketch_start_time();
|
||||
duration = newSketchLog.getSketch_duration();
|
||||
cnt += 1;
|
||||
if (sourceIpSet.size() < CommonConfig.SOURCE_IP_LIST_LIMIT){
|
||||
sourceIpSet.add(newSketchLog.getSource_ip());
|
||||
String sourceIp = newSketchLog.getSource_ip();
|
||||
if (StringUtils.equals(sourceIp,EMPTY_SOURCE_IP_IPV4) || StringUtils.equals(sourceIp,EMPTY_SOURCE_IP_IPV6)){
|
||||
sessions += newSketchLog.getSketch_sessions();
|
||||
packets += newSketchLog.getSketch_packets();
|
||||
bytes += newSketchLog.getSketch_bytes();
|
||||
startTime = newSketchLog.getSketch_start_time() > startTime ? startTime : newSketchLog.getSketch_start_time();
|
||||
endTime = newSketchLog.getSketch_start_time() > endTime ? newSketchLog.getSketch_start_time() : endTime;
|
||||
duration = endTime - startTime == 0 ? 5 : endTime - startTime;
|
||||
}else {
|
||||
if (sourceIpSet.size() < FlowWriteConfig.SOURCE_IP_LIST_LIMIT){
|
||||
sourceIpSet.add(sourceIp);
|
||||
}
|
||||
}
|
||||
}
|
||||
String sourceIpList = StringUtils.join(sourceIpSet, ",");
|
||||
// return Tuple6.of(sessions/cnt/duration,packets/cnt/duration,bytes/cnt/duration,sourceIpList,startTime,duration);
|
||||
return Tuple6.of(sessions/CommonConfig.FLINK_WINDOW_MAX_TIME,packets/CommonConfig.FLINK_WINDOW_MAX_TIME,
|
||||
bytes*8/CommonConfig.FLINK_WINDOW_MAX_TIME,sourceIpList,startTime,duration);
|
||||
return Tuple6.of(sessions/ FlowWriteConfig.FLINK_WINDOW_MAX_TIME,packets/ FlowWriteConfig.FLINK_WINDOW_MAX_TIME,
|
||||
bytes*8/ FlowWriteConfig.FLINK_WINDOW_MAX_TIME,sourceIpList,startTime,duration);
|
||||
}catch (Exception e){
|
||||
logger.error("聚合中间结果集失败 {}",e);
|
||||
}
|
||||
|
||||
87
src/main/java/com/zdjizhi/etl/ParseBaselineThreshold.java
Normal file
87
src/main/java/com/zdjizhi/etl/ParseBaselineThreshold.java
Normal file
@@ -0,0 +1,87 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.geedgenetworks.utils.DateUtils;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import com.zdjizhi.common.DosBaselineThreshold;
|
||||
import com.zdjizhi.utils.HbaseUtils;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.*;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
public class ParseBaselineThreshold {
|
||||
|
||||
|
||||
private static final Log logger = LogFactory.get();
|
||||
private static ArrayList<String> floodTypeList = new ArrayList<>();
|
||||
|
||||
private static Table table = null;
|
||||
private static Scan scan = null;
|
||||
|
||||
static {
|
||||
floodTypeList.add("TCP SYN Flood");
|
||||
floodTypeList.add("UDP Flood");
|
||||
floodTypeList.add("ICMP Flood");
|
||||
floodTypeList.add("DNS Flood");
|
||||
}
|
||||
|
||||
private static void prepareHbaseEnv() throws IOException {
|
||||
org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
|
||||
|
||||
config.set("hbase.zookeeper.quorum", FlowWriteConfig.HBASE_ZOOKEEPER_QUORUM);
|
||||
config.set("hbase.client.retries.number", "3");
|
||||
config.set("hbase.bulkload.retries.number", "3");
|
||||
config.set("zookeeper.recovery.retry", "3");
|
||||
config.set("hbase.defaults.for.version", "2.2.3");
|
||||
config.set("hbase.defaults.for.version.skip", "true");
|
||||
config.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, FlowWriteConfig.HBASE_CLIENT_OPERATION_TIMEOUT);
|
||||
config.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, FlowWriteConfig.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
|
||||
|
||||
TableName tableName = TableName.valueOf(FlowWriteConfig.HBASE_BASELINE_TABLE_NAME);
|
||||
Connection conn = ConnectionFactory.createConnection(config);
|
||||
table = conn.getTable(tableName);
|
||||
long currentTimeMillis = System.currentTimeMillis();
|
||||
scan = new Scan()
|
||||
.setAllowPartialResults(true)
|
||||
.setTimeRange(DateUtils.getSomeDate(new Date(currentTimeMillis), Math.negateExact(FlowWriteConfig.HBASE_BASELINE_TTL)).getTime(), currentTimeMillis)
|
||||
.setLimit(FlowWriteConfig.HBASE_BASELINE_TOTAL_NUM);
|
||||
logger.info("连接hbase成功,正在读取baseline数据");
|
||||
}
|
||||
|
||||
|
||||
static Map<String, Map<String, DosBaselineThreshold>> readFromHbase() {
|
||||
Map<String, Map<String, DosBaselineThreshold>> baselineMap = new HashMap<>();
|
||||
try {
|
||||
prepareHbaseEnv();
|
||||
logger.info("开始读取baseline数据");
|
||||
ResultScanner rs = table.getScanner(scan);
|
||||
for (Result result : rs) {
|
||||
Map<String, DosBaselineThreshold> floodTypeMap = new HashMap<>();
|
||||
String rowkey = Bytes.toString(result.getRow());
|
||||
for (String type:floodTypeList){
|
||||
DosBaselineThreshold baselineThreshold = new DosBaselineThreshold();
|
||||
ArrayList<Integer> sessionRate = HbaseUtils.getArraylist(result, type, "session_rate");
|
||||
if (sessionRate != null && !sessionRate.isEmpty()){
|
||||
Integer defaultValue = HbaseUtils.getIntegerValue(result, type, "session_rate_default_value");
|
||||
Integer rateBaselineType = HbaseUtils.getIntegerValue(result, type, "session_rate_baseline_type");
|
||||
baselineThreshold.setSession_rate(sessionRate);
|
||||
baselineThreshold.setSession_rate_default_value(defaultValue);
|
||||
baselineThreshold.setSession_rate_baseline_type(rateBaselineType);
|
||||
floodTypeMap.put(type,baselineThreshold);
|
||||
}
|
||||
}
|
||||
baselineMap.put(rowkey, floodTypeMap);
|
||||
}
|
||||
logger.info("格式化baseline数据成功,读取IP共:{}", baselineMap.size());
|
||||
} catch (Exception e) {
|
||||
logger.error("读取hbase数据失败", e);
|
||||
}
|
||||
return baselineMap;
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,11 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import com.fasterxml.jackson.databind.JavaType;
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.alibaba.fastjson2.JSONObject;
|
||||
import com.geedgenetworks.utils.StringUtil;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import com.zdjizhi.source.DosSketchSource;
|
||||
import com.zdjizhi.utils.FlinkEnvironmentUtils;
|
||||
import com.zdjizhi.utils.JsonMapper;
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
|
||||
import org.apache.flink.api.common.functions.FlatMapFunction;
|
||||
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
||||
@@ -15,8 +14,7 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* @author wlh
|
||||
@@ -24,22 +22,19 @@ import java.util.HashMap;
|
||||
public class ParseSketchLog {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(ParseSketchLog.class);
|
||||
private static JsonMapper jsonMapperInstance = JsonMapper.getInstance();
|
||||
private static JavaType hashmapJsonType = jsonMapperInstance.createCollectionType(HashMap.class, String.class, Object.class);
|
||||
private static JavaType listType = jsonMapperInstance.createCollectionType(ArrayList.class, HashMap.class);
|
||||
|
||||
|
||||
public static SingleOutputStreamOperator<DosSketchLog> getSketchSource(){
|
||||
public static SingleOutputStreamOperator<DosSketchLog> getSketchSource() {
|
||||
return flatSketchSource().assignTimestampsAndWatermarks(createWatermarkStrategy());
|
||||
}
|
||||
|
||||
private static SingleOutputStreamOperator<DosSketchLog> flatSketchSource(){
|
||||
private static SingleOutputStreamOperator<DosSketchLog> flatSketchSource() {
|
||||
return DosSketchSource.createDosSketchSource().flatMap(new FlatSketchLog());
|
||||
}
|
||||
|
||||
private static WatermarkStrategy<DosSketchLog> createWatermarkStrategy(){
|
||||
private static WatermarkStrategy<DosSketchLog> createWatermarkStrategy() {
|
||||
return WatermarkStrategy
|
||||
.<DosSketchLog>forBoundedOutOfOrderness(Duration.ofSeconds(CommonConfig.FLINK_WATERMARK_MAX_ORDERNESS))
|
||||
.<DosSketchLog>forBoundedOutOfOrderness(Duration.ofSeconds(FlowWriteConfig.FLINK_WATERMARK_MAX_ORDERNESS))
|
||||
.withTimestampAssigner((event, timestamp) -> event.getSketch_start_time() * 1000);
|
||||
}
|
||||
|
||||
@@ -47,17 +42,20 @@ public class ParseSketchLog {
|
||||
@Override
|
||||
public void flatMap(String s, Collector<DosSketchLog> collector) {
|
||||
try {
|
||||
if (StringUtil.isNotBlank(s)){
|
||||
HashMap<String, Object> sketchSource = jsonMapperInstance.fromJson(s, hashmapJsonType);
|
||||
if (StringUtil.isNotBlank(s)) {
|
||||
HashMap<String, Object> sketchSource = JSONObject.parseObject(s, HashMap.class);
|
||||
long sketchStartTime = Long.parseLong(sketchSource.get("sketch_start_time").toString());
|
||||
long sketchDuration = Long.parseLong(sketchSource.get("sketch_duration").toString());
|
||||
String attackType = sketchSource.get("attack_type").toString();
|
||||
ArrayList<HashMap<String, Object>> reportIpList = jsonMapperInstance.fromJson(jsonMapperInstance.toJson(sketchSource.get("report_ip_list")), listType);
|
||||
for (HashMap<String, Object> obj : reportIpList) {
|
||||
int vsysId = Integer.parseInt(sketchSource.getOrDefault("common_vsys_id", 1).toString());
|
||||
String report_ip_list = JSONObject.toJSONString(sketchSource.get("report_ip_list"));
|
||||
ArrayList<HashMap<String, Object>> reportIpList = JSONObject.parseObject(report_ip_list, ArrayList.class);
|
||||
for (HashMap<String, Object> obj : reportIpList) {
|
||||
DosSketchLog dosSketchLog = new DosSketchLog();
|
||||
dosSketchLog.setSketch_start_time(sketchStartTime);
|
||||
dosSketchLog.setSketch_duration(sketchDuration);
|
||||
dosSketchLog.setAttack_type(attackType);
|
||||
dosSketchLog.setVsys_id(vsysId);
|
||||
String sourceIp = obj.get("source_ip").toString();
|
||||
String destinationIp = obj.get("destination_ip").toString();
|
||||
long sketchSessions = Long.parseLong(obj.get("sketch_sessions").toString());
|
||||
@@ -69,19 +67,12 @@ public class ParseSketchLog {
|
||||
dosSketchLog.setSketch_packets(sketchPackets);
|
||||
dosSketchLog.setSketch_bytes(sketchBytes);
|
||||
collector.collect(dosSketchLog);
|
||||
logger.debug("数据解析成功:{}",dosSketchLog.toString());
|
||||
logger.debug("数据解析成功:{}", dosSketchLog.toString());
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("数据解析错误:{} \n{}",s,e);
|
||||
logger.error("数据解析错误:{} \n{}", s, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
flatSketchSource().print();
|
||||
FlinkEnvironmentUtils.streamExeEnv.execute();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,35 +1,38 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import com.fasterxml.jackson.databind.JavaType;
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.alibaba.fastjson2.JSON;
|
||||
import com.alibaba.fastjson2.JSONObject;
|
||||
//import com.fasterxml.jackson.databind.JavaType;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import com.zdjizhi.common.DosDetectionThreshold;
|
||||
import com.zdjizhi.common.DosVsysId;
|
||||
import com.zdjizhi.utils.HttpClientUtils;
|
||||
import com.zdjizhi.utils.JsonMapper;
|
||||
//import com.zdjizhi.utils.JsonMapper;
|
||||
|
||||
import com.zdjizhi.utils.connections.nacos.NacosUtils;
|
||||
import inet.ipaddr.IPAddress;
|
||||
import inet.ipaddr.IPAddressString;
|
||||
import org.apache.flink.shaded.guava18.com.google.common.collect.Range;
|
||||
import org.apache.flink.shaded.guava18.com.google.common.collect.TreeRangeMap;
|
||||
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author wlh
|
||||
*/
|
||||
public class ParseStaticThreshold {
|
||||
private static Logger logger = LoggerFactory.getLogger(ParseStaticThreshold.class);
|
||||
private static final Log logger = LogFactory.get();
|
||||
private static String encryptpwd;
|
||||
|
||||
private static JsonMapper jsonMapperInstance = JsonMapper.getInstance();
|
||||
private static JavaType hashmapJsonType = jsonMapperInstance.createCollectionType(HashMap.class, String.class, Object.class);
|
||||
private static JavaType thresholdType = jsonMapperInstance.createCollectionType(ArrayList.class, DosDetectionThreshold.class);
|
||||
|
||||
static {
|
||||
//加载加密登录密码
|
||||
encryptpwd = getEncryptpwd();
|
||||
@@ -41,17 +44,18 @@ public class ParseStaticThreshold {
|
||||
private static String getEncryptpwd() {
|
||||
String psw = HttpClientUtils.ERROR_MESSAGE;
|
||||
try {
|
||||
URIBuilder uriBuilder = new URIBuilder(CommonConfig.BIFANG_SERVER_URI);
|
||||
HashMap<String, String> parms = new HashMap<>();
|
||||
URIBuilder uriBuilder = new URIBuilder(FlowWriteConfig.BIFANG_SERVER_URI);
|
||||
HashMap<String, Object> parms = new HashMap<>();
|
||||
parms.put("password", "admin");
|
||||
HttpClientUtils.setUrlWithParams(uriBuilder, CommonConfig.BIFANG_SERVER_ENCRYPTPWD_PATH, parms);
|
||||
HttpClientUtils.setUrlWithParams(uriBuilder, FlowWriteConfig.BIFANG_SERVER_ENCRYPTPWD_PATH, parms);
|
||||
String resposeJsonStr = HttpClientUtils.httpGet(uriBuilder.build());
|
||||
if (!HttpClientUtils.ERROR_MESSAGE.equals(resposeJsonStr)) {
|
||||
HashMap<String, Object> resposeMap = jsonMapperInstance.fromJson(resposeJsonStr, hashmapJsonType);
|
||||
HashMap<String, Object> resposeMap = JSONObject.parseObject(resposeJsonStr, HashMap.class);
|
||||
boolean success = (boolean) resposeMap.get("success");
|
||||
String msg = resposeMap.get("msg").toString();
|
||||
|
||||
if (success) {
|
||||
HashMap<String, Object> data = jsonMapperInstance.fromJson(jsonMapperInstance.toJson(resposeMap.get("data")), hashmapJsonType);
|
||||
HashMap<String, Object> data = JSONObject.parseObject(JSONObject.toJSONString(resposeMap.get("data")), HashMap.class);
|
||||
psw = data.get("encryptpwd").toString();
|
||||
} else {
|
||||
logger.error(msg);
|
||||
@@ -65,6 +69,178 @@ public class ParseStaticThreshold {
|
||||
return psw;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* 获取vsysId配置列表
|
||||
*
|
||||
* @return vsysIdList
|
||||
*/
|
||||
private static ArrayList<DosVsysId> getVsysId() {
|
||||
ArrayList<DosVsysId> vsysIdList = null;
|
||||
try {
|
||||
URIBuilder uriBuilder = new URIBuilder(FlowWriteConfig.BIFANG_SERVER_URI);
|
||||
HashMap<String, Object> parms = new HashMap<>();
|
||||
parms.put("page_size", -1);
|
||||
// parms.put("orderBy", "vsysId desc");
|
||||
parms.put("type", 1);
|
||||
HttpClientUtils.setUrlWithParams(uriBuilder, FlowWriteConfig.BIFANG_SERVER_POLICY_VSYSID_PATH, parms);
|
||||
String token = NacosUtils.getStringProperty("bifang.server.token");
|
||||
if (!HttpClientUtils.ERROR_MESSAGE.equals(token)) {
|
||||
BasicHeader authorization = new BasicHeader("Authorization", token);
|
||||
BasicHeader authorization1 = new BasicHeader("Content-Type", "application/x-www-form-urlencoded");
|
||||
String resposeJsonStr = HttpClientUtils.httpGet(uriBuilder.build(), authorization, authorization1);
|
||||
if (!HttpClientUtils.ERROR_MESSAGE.equals(resposeJsonStr)) {
|
||||
HashMap<String, Object> resposeMap = JSONObject.parseObject(resposeJsonStr, HashMap.class);
|
||||
boolean success = (boolean) resposeMap.get("success");
|
||||
String msg = resposeMap.get("msg").toString();
|
||||
if (success) {
|
||||
HashMap<String, Object> data = JSONObject.parseObject(JSONObject.toJSONString(resposeMap.get("data")), HashMap.class);
|
||||
Object list = data.get("list");
|
||||
if (list != null) {
|
||||
List<DosVsysId> dosVsysIds = JSON.parseArray(JSONObject.toJSONString(list), DosVsysId.class);
|
||||
vsysIdList= (ArrayList)dosVsysIds;
|
||||
logger.info("获取到vsysId {}条", vsysIdList.size());
|
||||
} else {
|
||||
logger.warn("vsysIdList为空");
|
||||
}
|
||||
} else {
|
||||
logger.error(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("获取vsysId失败,请检查bifang服务或登录配置信息 ", e);
|
||||
}
|
||||
return vsysIdList;
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据vsysId获取静态阈值配置列表
|
||||
*
|
||||
* @return thresholds
|
||||
*/
|
||||
private static ArrayList<DosDetectionThreshold> getDosDetectionThreshold() {
|
||||
ArrayList<DosDetectionThreshold> vsysThresholds = new ArrayList<>();
|
||||
ArrayList<DosVsysId> vsysIds = getVsysId();
|
||||
try {
|
||||
if (vsysIds != null) {
|
||||
for (DosVsysId dosVsysId : vsysIds) {
|
||||
Integer vsysId = dosVsysId.getId() == null ? 1 : dosVsysId.getId();
|
||||
Integer[] superiorIds = dosVsysId.getSuperior_ids();
|
||||
URIBuilder uriBuilder = new URIBuilder(FlowWriteConfig.BIFANG_SERVER_URI);
|
||||
HashMap<String, Object> parms = new HashMap<>();
|
||||
parms.put("page_size", -1);
|
||||
parms.put("order_by", "profileId asc");
|
||||
parms.put("is_valid", 1);
|
||||
parms.put("vsys_id", vsysId);
|
||||
HttpClientUtils.setUrlWithParams(uriBuilder, FlowWriteConfig.BIFANG_SERVER_POLICY_THRESHOLD_PATH, parms);
|
||||
String token = NacosUtils.getStringProperty("bifang.server.token");
|
||||
if (!HttpClientUtils.ERROR_MESSAGE.equals(token)) {
|
||||
BasicHeader authorization = new BasicHeader("Authorization", token);
|
||||
BasicHeader authorization1 = new BasicHeader("Content-Type", "application/x-www-form-urlencoded");
|
||||
String resposeJsonStr = HttpClientUtils.httpGet(uriBuilder.build(), authorization, authorization1);
|
||||
if (!HttpClientUtils.ERROR_MESSAGE.equals(resposeJsonStr)) {
|
||||
|
||||
HashMap<String, Object> resposeMap = JSONObject.parseObject(resposeJsonStr,HashMap.class);
|
||||
|
||||
|
||||
boolean success = (boolean) resposeMap.get("success");
|
||||
String msg = resposeMap.get("msg").toString();
|
||||
if (success) {
|
||||
HashMap<String, Object> data = JSONObject.parseObject(JSONObject.toJSONString(resposeMap.get("data")), HashMap.class);
|
||||
Object list = data.get("list");
|
||||
|
||||
if (list != null) {
|
||||
System.out.println(list);
|
||||
List<DosDetectionThreshold> dosDetectionThresholds = JSON.parseArray(JSONObject.toJSONString(list), DosDetectionThreshold.class);
|
||||
System.out.println(dosDetectionThresholds);
|
||||
ArrayList<DosDetectionThreshold> thresholds = (ArrayList)dosDetectionThresholds;
|
||||
for (DosDetectionThreshold dosDetectionThreshold : thresholds) {
|
||||
dosDetectionThreshold.setSuperior_ids(superiorIds);
|
||||
vsysThresholds.add(dosDetectionThreshold);
|
||||
}
|
||||
logger.info("获取到vsys id是{}静态阈值配置{}条", vsysId, thresholds.size());
|
||||
} else {
|
||||
logger.warn("静态阈值配置为空");
|
||||
}
|
||||
} else {
|
||||
logger.error(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("获取静态阈值配置失败,请检查bifang服务或登录配置信息 ", e);
|
||||
}
|
||||
return vsysThresholds;
|
||||
}
|
||||
|
||||
/**
|
||||
* 基于静态阈值构建threshold RangeMap,k:IP段或具体IP,v:配置信息
|
||||
*
|
||||
* @return threshold RangeMap
|
||||
*/
|
||||
static HashMap<Integer, HashMap<String, TreeRangeMap<IPAddress, DosDetectionThreshold>>> createStaticThreshold() {
|
||||
HashMap<Integer, HashMap<String, TreeRangeMap<IPAddress, DosDetectionThreshold>>> thresholdRangeMap = new HashMap<>(4);
|
||||
try {
|
||||
ArrayList<DosDetectionThreshold> dosDetectionThreshold = getDosDetectionThreshold();
|
||||
|
||||
if (dosDetectionThreshold != null && !dosDetectionThreshold.isEmpty()) {
|
||||
for (DosDetectionThreshold threshold : dosDetectionThreshold) {
|
||||
|
||||
String attackType = threshold.getAttack_type();
|
||||
int vsysId = threshold.getVsys_id();
|
||||
HashMap<String, TreeRangeMap<IPAddress, DosDetectionThreshold>> rangeMap = thresholdRangeMap.getOrDefault(vsysId, new HashMap<>());
|
||||
|
||||
TreeRangeMap<IPAddress, DosDetectionThreshold> treeRangeMap = rangeMap.getOrDefault(attackType, TreeRangeMap.create());
|
||||
|
||||
|
||||
ArrayList<String> serverIpList = threshold.getServer_ip_list();
|
||||
|
||||
for (String sip : serverIpList) {
|
||||
IPAddressString ipAddressString = new IPAddressString(sip);
|
||||
if (ipAddressString.isIPAddress()) {
|
||||
IPAddress address = ipAddressString.getAddress();
|
||||
if (address.isPrefixed()) {
|
||||
IPAddress lower = address.getLower();
|
||||
IPAddress upper = address.getUpper();
|
||||
if (!address.isMultiple()) {
|
||||
lower = address.adjustPrefixLength(address.getBitCount());
|
||||
upper = address.toMaxHost().withoutPrefixLength();
|
||||
}
|
||||
Map.Entry<Range<IPAddress>, DosDetectionThreshold> lowerEntry = treeRangeMap.getEntry(lower);
|
||||
Map.Entry<Range<IPAddress>, DosDetectionThreshold> upperEntry = treeRangeMap.getEntry(upper);
|
||||
if (lowerEntry != null && upperEntry == null) {
|
||||
Range<IPAddress> lowerEntryKey = lowerEntry.getKey();
|
||||
DosDetectionThreshold lowerEntryValue = lowerEntry.getValue();
|
||||
treeRangeMap.put(Range.closedOpen(lowerEntryKey.lowerEndpoint(), lower), lowerEntryValue);
|
||||
treeRangeMap.put(Range.closed(lower, upper), threshold);
|
||||
} else if (lowerEntry == null && upperEntry != null) {
|
||||
Range<IPAddress> upperEntryKey = upperEntry.getKey();
|
||||
DosDetectionThreshold upperEntryValue = upperEntry.getValue();
|
||||
treeRangeMap.put(Range.openClosed(upper, upperEntryKey.upperEndpoint()), upperEntryValue);
|
||||
treeRangeMap.put(Range.closed(lower, upper), threshold);
|
||||
} else {
|
||||
treeRangeMap.put(Range.closed(lower, upper), threshold);
|
||||
}
|
||||
} else {
|
||||
treeRangeMap.put(Range.closed(address, address), threshold);
|
||||
}
|
||||
}
|
||||
}
|
||||
rangeMap.put(attackType, treeRangeMap);
|
||||
thresholdRangeMap.put(vsysId, rangeMap);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("构建threshold RangeMap失败", e);
|
||||
}
|
||||
return thresholdRangeMap;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* 登录bifang服务,获取token
|
||||
*
|
||||
@@ -74,18 +250,18 @@ public class ParseStaticThreshold {
|
||||
String token = HttpClientUtils.ERROR_MESSAGE;
|
||||
try {
|
||||
if (!HttpClientUtils.ERROR_MESSAGE.equals(encryptpwd)) {
|
||||
URIBuilder uriBuilder = new URIBuilder(CommonConfig.BIFANG_SERVER_URI);
|
||||
HashMap<String, String> parms = new HashMap<>();
|
||||
URIBuilder uriBuilder = new URIBuilder(FlowWriteConfig.BIFANG_SERVER_URI);
|
||||
HashMap<String, Object> parms = new HashMap<>();
|
||||
parms.put("username", "admin");
|
||||
parms.put("password", encryptpwd);
|
||||
HttpClientUtils.setUrlWithParams(uriBuilder, CommonConfig.BIFANG_SERVER_LOGIN_PATH, parms);
|
||||
HttpClientUtils.setUrlWithParams(uriBuilder, FlowWriteConfig.BIFANG_SERVER_LOGIN_PATH, parms);
|
||||
String resposeJsonStr = HttpClientUtils.httpPost(uriBuilder.build(), null);
|
||||
if (!HttpClientUtils.ERROR_MESSAGE.equals(resposeJsonStr)) {
|
||||
HashMap<String, Object> resposeMap = jsonMapperInstance.fromJson(resposeJsonStr, hashmapJsonType);
|
||||
HashMap<String, Object> resposeMap = JSONObject.parseObject(resposeJsonStr, HashMap.class);
|
||||
boolean success = (boolean) resposeMap.get("success");
|
||||
String msg = resposeMap.get("msg").toString();
|
||||
if (success) {
|
||||
HashMap<String, Object> data = jsonMapperInstance.fromJson(jsonMapperInstance.toJson(resposeMap.get("data")), hashmapJsonType);
|
||||
HashMap<String, Object> data = JSONObject.parseObject(JSONObject.toJSONString(resposeMap.get("data")), HashMap.class);
|
||||
token = data.get("token").toString();
|
||||
} else {
|
||||
logger.error(msg);
|
||||
@@ -97,83 +273,4 @@ public class ParseStaticThreshold {
|
||||
}
|
||||
return token;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取静态阈值配置列表
|
||||
*
|
||||
* @return thresholds
|
||||
*/
|
||||
private static ArrayList<DosDetectionThreshold> getDosDetectionThreshold() {
|
||||
ArrayList<DosDetectionThreshold> thresholds = null;
|
||||
try {
|
||||
URIBuilder uriBuilder = new URIBuilder(CommonConfig.BIFANG_SERVER_URI);
|
||||
HttpClientUtils.setUrlWithParams(uriBuilder, CommonConfig.BIFANG_SERVER_POLICY_THRESHOLD_PATH, null);
|
||||
String token = CommonConfig.BIFANG_SERVER_TOKEN;
|
||||
if (!HttpClientUtils.ERROR_MESSAGE.equals(token)) {
|
||||
BasicHeader authorization = new BasicHeader("Authorization", token);
|
||||
String resposeJsonStr = HttpClientUtils.httpGet(uriBuilder.build(), authorization);
|
||||
if (!HttpClientUtils.ERROR_MESSAGE.equals(resposeJsonStr)) {
|
||||
HashMap<String, Object> resposeMap = jsonMapperInstance.fromJson(resposeJsonStr, hashmapJsonType);
|
||||
boolean success = (boolean) resposeMap.get("success");
|
||||
String msg = resposeMap.get("msg").toString();
|
||||
if (success) {
|
||||
HashMap<String, Object> data = jsonMapperInstance.fromJson(jsonMapperInstance.toJson(resposeMap.get("data")), hashmapJsonType);
|
||||
thresholds = jsonMapperInstance.fromJson(jsonMapperInstance.toJson(data.get("list")), thresholdType);
|
||||
logger.info("获取到静态阈值配置{}条", thresholds.size());
|
||||
} else {
|
||||
logger.error(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("获取静态阈值配置失败,请检查bifang服务或登录配置信息 ", e);
|
||||
}
|
||||
return thresholds;
|
||||
}
|
||||
|
||||
/**
|
||||
* 基于静态阈值构建threshold RangeMap,k:IP段或具体IP,v:配置信息
|
||||
*
|
||||
* @return threshold RangeMap
|
||||
*/
|
||||
static TreeRangeMap<IPAddress, Map<String, DosDetectionThreshold>> createStaticThreshold() {
|
||||
TreeRangeMap<IPAddress, Map<String, DosDetectionThreshold>> thresholdRangeMap = TreeRangeMap.create();
|
||||
try {
|
||||
ArrayList<DosDetectionThreshold> dosDetectionThreshold = getDosDetectionThreshold();
|
||||
if (dosDetectionThreshold != null && !dosDetectionThreshold.isEmpty()) {
|
||||
for (DosDetectionThreshold threshold : dosDetectionThreshold) {
|
||||
ArrayList<String> serverIpList = threshold.getServerIpList();
|
||||
for (String sip : serverIpList) {
|
||||
IPAddressString ipAddressString = new IPAddressString(sip);
|
||||
if (ipAddressString.isIPAddress()) {
|
||||
IPAddress address = ipAddressString.getAddress();
|
||||
Map<String, DosDetectionThreshold> floodTypeThresholdMap = thresholdRangeMap.get(address);
|
||||
if (floodTypeThresholdMap == null) {
|
||||
floodTypeThresholdMap = new HashMap<>();
|
||||
}
|
||||
floodTypeThresholdMap.put(threshold.getAttackType(), threshold);
|
||||
thresholdRangeMap.put(Range.closed(address.getLower(), address.getUpper()), floodTypeThresholdMap);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("构建threshold RangeMap失败", e);
|
||||
}
|
||||
return thresholdRangeMap;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
TreeRangeMap<IPAddress, Map<String, DosDetectionThreshold>> staticThreshold = createStaticThreshold();
|
||||
Map<Range<IPAddress>, Map<String, DosDetectionThreshold>> rangeMapMap = staticThreshold.asMapOfRanges();
|
||||
for (Range<IPAddress> range : rangeMapMap.keySet()) {
|
||||
Map<String, DosDetectionThreshold> thresholdMap = rangeMapMap.get(range);
|
||||
for (String type : thresholdMap.keySet()) {
|
||||
DosDetectionThreshold threshold = thresholdMap.get(type);
|
||||
System.out.println(range + "---" + type + "---" + threshold);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import com.zdjizhi.common.DosMetricsLog;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
||||
class TrafficServerIpMetrics {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TrafficServerIpMetrics.class);
|
||||
// private static final Logger logger = LoggerFactory.getLogger(TrafficServerIpMetrics.class);
|
||||
private static final Log logger = LogFactory.get();
|
||||
|
||||
static DosMetricsLog getOutputMetric(DosSketchLog midResuleLog) {
|
||||
DosMetricsLog dosMetricsLog = new DosMetricsLog();
|
||||
@@ -19,22 +19,18 @@ class TrafficServerIpMetrics {
|
||||
dosMetricsLog.setSession_rate(midResuleLog.getSketch_sessions());
|
||||
dosMetricsLog.setPacket_rate(midResuleLog.getSketch_packets());
|
||||
dosMetricsLog.setBit_rate(midResuleLog.getSketch_bytes());
|
||||
dosMetricsLog.setVsys_id(midResuleLog.getVsys_id());
|
||||
dosMetricsLog.setPartition_num(getPartitionNumByIp(midResuleLog.getDestination_ip()));
|
||||
logger.debug("metric 结果已加载:{}",dosMetricsLog.toString());
|
||||
return dosMetricsLog;
|
||||
}
|
||||
|
||||
private static long timeFloor(long sketchStartTime){
|
||||
return sketchStartTime / CommonConfig.FLINK_WINDOW_MAX_TIME * CommonConfig.FLINK_WINDOW_MAX_TIME;
|
||||
return sketchStartTime / FlowWriteConfig.FLINK_WINDOW_MAX_TIME * FlowWriteConfig.FLINK_WINDOW_MAX_TIME;
|
||||
}
|
||||
|
||||
private static int getPartitionNumByIp(String destinationIp){
|
||||
return Math.abs(destinationIp.hashCode()) % CommonConfig.DESTINATION_IP_PARTITION_NUM;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
System.out.println(getPartitionNumByIp("146.177.223.43"));
|
||||
System.out.println("146.177.223.43".hashCode());
|
||||
return Math.abs(destinationIp.hashCode()) % FlowWriteConfig.DESTINATION_IP_PARTITION_NUM;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
package com.zdjizhi.sink;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.alibaba.fastjson2.JSONObject;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import com.zdjizhi.common.DosEventLog;
|
||||
import com.zdjizhi.utils.JsonMapper;
|
||||
//import com.zdjizhi.utils.JsonMapper;
|
||||
import com.zdjizhi.utils.KafkaUtils;
|
||||
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
||||
|
||||
@@ -13,9 +14,10 @@ class DosEventSink {
|
||||
static void dosEventOutputSink(SingleOutputStreamOperator<DosEventLog> dosEventLogOutputStream){
|
||||
dosEventLogOutputStream
|
||||
.filter(Objects::nonNull)
|
||||
.map(JsonMapper::toJsonString)
|
||||
.addSink(KafkaUtils.getKafkaSink(CommonConfig.KAFKA_OUTPUT_EVENT_TOPIC_NAME))
|
||||
.setParallelism(CommonConfig.KAFKA_OUTPUT_EVENT_PARALLELISM);
|
||||
// .map(JsonMapper::toJsonString)
|
||||
.map(JSONObject::toJSONString)
|
||||
.addSink(KafkaUtils.getKafkaSink(FlowWriteConfig.KAFKA_OUTPUT_EVENT_TOPIC_NAME))
|
||||
.setParallelism(FlowWriteConfig.KAFKA_OUTPUT_EVENT_PARALLELISM);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
package com.zdjizhi.sink;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.alibaba.nacos.api.PropertyKeyConst;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import com.zdjizhi.common.DosEventLog;
|
||||
import com.zdjizhi.common.DosMetricsLog;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
@@ -9,19 +12,20 @@ import com.zdjizhi.etl.EtlProcessFunction;
|
||||
import com.zdjizhi.etl.ParseSketchLog;
|
||||
import com.zdjizhi.utils.FlinkEnvironmentUtils;
|
||||
import org.apache.flink.api.java.functions.KeySelector;
|
||||
import org.apache.flink.api.java.tuple.Tuple2;
|
||||
import org.apache.flink.api.java.tuple.Tuple3;
|
||||
import org.apache.flink.streaming.api.datastream.*;
|
||||
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
|
||||
import org.apache.flink.streaming.api.windowing.time.Time;
|
||||
import org.apache.flink.util.OutputTag;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* @author 94976
|
||||
*/
|
||||
public class OutputStreamSink {
|
||||
private static final Logger logger = LoggerFactory.getLogger(OutputStreamSink.class);
|
||||
// private static final Logger logger = LoggerFactory.getLogger(OutputStreamSink.class);
|
||||
private static final Log logger = LogFactory.get();
|
||||
|
||||
public static OutputTag<DosMetricsLog> outputTag = new OutputTag<DosMetricsLog>("traffic server ip metrics"){};
|
||||
|
||||
@@ -30,30 +34,33 @@ public class OutputStreamSink {
|
||||
SingleOutputStreamOperator<DosSketchLog> middleStream = getMiddleStream();
|
||||
DosEventSink.dosEventOutputSink(getEventSinkStream(middleStream));
|
||||
TrafficServerIpMetricsSink.sideOutputMetricsSink(middleStream);
|
||||
FlinkEnvironmentUtils.streamExeEnv.execute(CommonConfig.STREAM_EXECUTION_JOB_NAME);
|
||||
FlinkEnvironmentUtils.streamExeEnv.execute(FlowWriteConfig.STREAM_EXECUTION_JOB_NAME);
|
||||
} catch (Exception e) {
|
||||
logger.error("任务启动失败 {}",e);
|
||||
}
|
||||
}
|
||||
|
||||
private static SingleOutputStreamOperator<DosEventLog> getEventSinkStream(SingleOutputStreamOperator<DosSketchLog> middleStream){
|
||||
return middleStream.map(new DosDetection()).setParallelism(CommonConfig.FLINK_DETECTION_MAP_PARALLELISM);
|
||||
return middleStream
|
||||
.process(new DosDetection()).setParallelism(FlowWriteConfig.FLINK_DETECTION_MAP_PARALLELISM);
|
||||
|
||||
}
|
||||
|
||||
private static SingleOutputStreamOperator<DosSketchLog> getMiddleStream(){
|
||||
return ParseSketchLog.getSketchSource()
|
||||
.keyBy(new KeysSelector())
|
||||
.window(TumblingEventTimeWindows.of(Time.seconds(CommonConfig.FLINK_WINDOW_MAX_TIME)))
|
||||
.window(TumblingEventTimeWindows.of(Time.seconds(FlowWriteConfig.FLINK_WINDOW_MAX_TIME)))
|
||||
.process(new EtlProcessFunction())
|
||||
.setParallelism(CommonConfig.FLINK_FIRST_AGG_PARALLELISM);
|
||||
.setParallelism(FlowWriteConfig.FLINK_FIRST_AGG_PARALLELISM);
|
||||
}
|
||||
|
||||
private static class KeysSelector implements KeySelector<DosSketchLog, Tuple2<String, String>>{
|
||||
private static class KeysSelector implements KeySelector<DosSketchLog, Tuple3<String, String, Integer>>{
|
||||
@Override
|
||||
public Tuple2<String, String> getKey(DosSketchLog dosSketchLog){
|
||||
return Tuple2.of(
|
||||
public Tuple3<String, String, Integer> getKey(DosSketchLog dosSketchLog){
|
||||
return Tuple3.of(
|
||||
dosSketchLog.getAttack_type(),
|
||||
dosSketchLog.getDestination_ip());
|
||||
dosSketchLog.getDestination_ip(),
|
||||
dosSketchLog.getVsys_id());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
package com.zdjizhi.sink;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.alibaba.fastjson2.JSONObject;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import com.zdjizhi.common.DosMetricsLog;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
import com.zdjizhi.utils.JsonMapper;
|
||||
|
||||
import com.zdjizhi.utils.KafkaUtils;
|
||||
import org.apache.flink.streaming.api.datastream.DataStream;
|
||||
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
||||
@@ -14,8 +15,11 @@ class TrafficServerIpMetricsSink {
|
||||
|
||||
static void sideOutputMetricsSink(SingleOutputStreamOperator<DosSketchLog> outputStream){
|
||||
DataStream<DosMetricsLog> sideOutput = outputStream.getSideOutput(outputTag);
|
||||
sideOutput.map(JsonMapper::toJsonString).addSink(KafkaUtils.getKafkaSink(CommonConfig.KAFKA_OUTPUT_METRIC_TOPIC_NAME))
|
||||
.setParallelism(CommonConfig.KAFKA_OUTPUT_METRIC_PARALLELISM);
|
||||
// sideOutput.map(JsonMapper::toJsonString).addSink(KafkaUtils.getKafkaSink(CommonConfig.KAFKA_OUTPUT_METRIC_TOPIC_NAME))
|
||||
sideOutput.map(JSONObject::toJSONString).addSink(KafkaUtils.getKafkaSink(FlowWriteConfig.KAFKA_OUTPUT_METRIC_TOPIC_NAME))
|
||||
.setParallelism(FlowWriteConfig.KAFKA_OUTPUT_METRIC_PARALLELISM);
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
package com.zdjizhi.source;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
|
||||
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import com.zdjizhi.utils.FlinkEnvironmentUtils;
|
||||
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
||||
import org.apache.flink.streaming.api.datastream.DataStreamSource;
|
||||
@@ -18,16 +20,18 @@ public class DosSketchSource {
|
||||
|
||||
public static DataStreamSource<String> createDosSketchSource(){
|
||||
Properties properties = new Properties();
|
||||
properties.setProperty("bootstrap.servers", CommonConfig.KAFKA_INPUT_BOOTSTRAP_SERVERS);
|
||||
properties.setProperty("group.id", CommonConfig.KAFKA_GROUP_ID);
|
||||
if (CommonConfig.SASL_JAAS_CONFIG_FLAG == 1){
|
||||
properties.setProperty("bootstrap.servers", FlowWriteConfig.KAFKA_INPUT_BOOTSTRAP_SERVERS);
|
||||
properties.setProperty("group.id", FlowWriteConfig.KAFKA_GROUP_ID);
|
||||
if (FlowWriteConfig.SASL_JAAS_CONFIG_FLAG == 1){
|
||||
properties.put("security.protocol", "SASL_PLAINTEXT");
|
||||
properties.put("sasl.mechanism", "PLAIN");
|
||||
properties.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username=\""+CommonConfig.SASL_JAAS_CONFIG_USER+"\" password=\""+CommonConfig.SASL_JAAS_CONFIG_PASSWORD+"\";");
|
||||
properties.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username=\""+ FlowWriteConfig.SASL_JAAS_CONFIG_USER+"\" password=\""+ FlowWriteConfig.SASL_JAAS_CONFIG_PASSWORD+"\";");
|
||||
}
|
||||
|
||||
return streamExeEnv.addSource(new FlinkKafkaConsumer<String>(
|
||||
CommonConfig.KAFKA_INPUT_TOPIC_NAME,
|
||||
FlowWriteConfig.KAFKA_INPUT_TOPIC_NAME,
|
||||
new SimpleStringSchema(), properties))
|
||||
.setParallelism(CommonConfig.KAFKA_INPUT_PARALLELISM);
|
||||
.setParallelism(FlowWriteConfig.KAFKA_INPUT_PARALLELISM);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
|
||||
/**
|
||||
* @author wlh
|
||||
* 扩展集合处理工具
|
||||
*/
|
||||
public class CollectionUtils {
|
||||
|
||||
public static<T> Collection<T> takeUniqueLimit(Collection<T> collection, int limit){
|
||||
int i =0;
|
||||
Collection<T> newSet = new HashSet<>();
|
||||
for (T t:collection){
|
||||
if (i < limit){
|
||||
newSet.add(t);
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
return newSet;
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,9 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import org.apache.zookeeper.*;
|
||||
import org.apache.zookeeper.data.Stat;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
@@ -17,7 +17,8 @@ import java.util.concurrent.locks.Lock;
|
||||
|
||||
|
||||
public class DistributedLock implements Lock, Watcher {
|
||||
private static final Logger logger = LoggerFactory.getLogger(DistributedLock.class);
|
||||
// private static final Logger logger = LoggerFactory.getLogger(DistributedLock.class);
|
||||
private static final Log logger = LogFactory.get();
|
||||
|
||||
private ZooKeeper zk = null;
|
||||
/**
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
||||
|
||||
|
||||
@@ -11,7 +11,34 @@ public class FlinkEnvironmentUtils {
|
||||
public static StreamExecutionEnvironment streamExeEnv = StreamExecutionEnvironment.getExecutionEnvironment();
|
||||
|
||||
static {
|
||||
streamExeEnv.setParallelism(CommonConfig.STREAM_EXECUTION_ENVIRONMENT_PARALLELISM);
|
||||
streamExeEnv.setParallelism(FlowWriteConfig.STREAM_EXECUTION_ENVIRONMENT_PARALLELISM);
|
||||
|
||||
/*
|
||||
// 每 1000ms 开始一次 checkpoint
|
||||
streamExeEnv.enableCheckpointing(CommonConfig.FLINK_WINDOW_MAX_TIME * 1000);
|
||||
|
||||
// 设置模式为精确一次 (这是默认值)
|
||||
streamExeEnv.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
|
||||
|
||||
// 确认 checkpoints 之间的时间会进行 500 ms
|
||||
streamExeEnv.getCheckpointConfig().setMinPauseBetweenCheckpoints(500);
|
||||
|
||||
// Checkpoint 必须在一分钟内完成,否则就会被抛弃
|
||||
streamExeEnv.getCheckpointConfig().setCheckpointTimeout(60000);
|
||||
|
||||
// 允许两个连续的 checkpoint 错误
|
||||
streamExeEnv.getCheckpointConfig().setTolerableCheckpointFailureNumber(2);
|
||||
|
||||
// 同一时间只允许一个 checkpoint 进行
|
||||
streamExeEnv.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
|
||||
|
||||
// 使用 externalized checkpoints,这样 checkpoint 在作业取消后仍就会被保留
|
||||
streamExeEnv.getCheckpointConfig().enableExternalizedCheckpoints(
|
||||
CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
|
||||
|
||||
// 开启实验性的 unaligned checkpoints
|
||||
streamExeEnv.getCheckpointConfig().enableUnalignedCheckpoints();
|
||||
*/
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,17 +1,10 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import org.apache.flink.api.java.tuple.Tuple2;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.*;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.io.ArrayWritable;
|
||||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.DataInputStream;
|
||||
@@ -22,75 +15,8 @@ import java.util.*;
|
||||
* @author wlh
|
||||
*/
|
||||
public class HbaseUtils {
|
||||
private static final Logger logger = LoggerFactory.getLogger(HbaseUtils.class);
|
||||
private static Table table = null;
|
||||
private static Scan scan = null;
|
||||
private static ArrayList<String> floodTypeList = new ArrayList<>();
|
||||
|
||||
static {
|
||||
floodTypeList.add("TCP SYN Flood");
|
||||
floodTypeList.add("UDP Flood");
|
||||
floodTypeList.add("ICMP Flood");
|
||||
floodTypeList.add("DNS Amplification");
|
||||
}
|
||||
|
||||
private static void prepareHbaseEnv() throws IOException {
|
||||
org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
|
||||
|
||||
config.set("hbase.zookeeper.quorum", CommonConfig.HBASE_ZOOKEEPER_QUORUM);
|
||||
config.set("hbase.client.retries.number", "3");
|
||||
config.set("hbase.bulkload.retries.number", "3");
|
||||
config.set("zookeeper.recovery.retry", "3");
|
||||
config.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, CommonConfig.HBASE_CLIENT_OPERATION_TIMEOUT);
|
||||
config.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, CommonConfig.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
|
||||
|
||||
TableName tableName = TableName.valueOf(CommonConfig.HBASE_BASELINE_TABLE_NAME);
|
||||
Connection conn = ConnectionFactory.createConnection(config);
|
||||
table = conn.getTable(tableName);
|
||||
scan = new Scan().setAllowPartialResults(true).setLimit(CommonConfig.HBASE_BASELINE_TOTAL_NUM);
|
||||
logger.info("连接hbase成功,正在读取baseline数据");
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
Map<String, Map<String, Tuple2<ArrayList<Integer>, Integer>>> baselineMap = readFromHbase();
|
||||
Set<String> keySet = baselineMap.keySet();
|
||||
for (String key : keySet) {
|
||||
Map<String, Tuple2<ArrayList<Integer>, Integer>> stringTuple2Map = baselineMap.get(key);
|
||||
Set<String> strings = stringTuple2Map.keySet();
|
||||
for (String s:strings){
|
||||
Tuple2<ArrayList<Integer>, Integer> arrayListIntegerTuple2 = stringTuple2Map.get(s);
|
||||
System.out.println(key+"---"+s+"---"+arrayListIntegerTuple2.f0+"---"+arrayListIntegerTuple2.f1);
|
||||
}
|
||||
}
|
||||
System.out.println(baselineMap.size());
|
||||
}
|
||||
|
||||
public static Map<String, Map<String, Tuple2<ArrayList<Integer>, Integer>>> readFromHbase() {
|
||||
Map<String, Map<String, Tuple2<ArrayList<Integer>, Integer>>> baselineMap = new HashMap<>();
|
||||
try {
|
||||
prepareHbaseEnv();
|
||||
logger.info("开始读取baseline数据");
|
||||
ResultScanner rs = table.getScanner(scan);
|
||||
for (Result result : rs) {
|
||||
Map<String, Tuple2<ArrayList<Integer>, Integer>> floodTypeMap = new HashMap<>();
|
||||
String rowkey = Bytes.toString(result.getRow());
|
||||
for (String type:floodTypeList){
|
||||
ArrayList<Integer> sessionRate = getArraylist(result, type, "session_rate");
|
||||
if (sessionRate != null && !sessionRate.isEmpty()){
|
||||
Integer defaultValue = getDefaultValue(result, type, "session_rate_default_value");
|
||||
floodTypeMap.put(type,Tuple2.of(sessionRate, defaultValue));
|
||||
}
|
||||
}
|
||||
baselineMap.put(rowkey, floodTypeMap);
|
||||
}
|
||||
logger.info("格式化baseline数据成功,读取IP共:{}", baselineMap.size());
|
||||
} catch (Exception e) {
|
||||
logger.error("读取hbase数据失败", e);
|
||||
}
|
||||
return baselineMap;
|
||||
}
|
||||
|
||||
private static Integer getDefaultValue(Result result, String family, String qualifier) {
|
||||
public static Integer getIntegerValue(Result result, String family, String qualifier) {
|
||||
byte[] value = result.getValue(Bytes.toBytes(family), Bytes.toBytes(qualifier));
|
||||
if (value != null){
|
||||
return Bytes.toInt(value);
|
||||
@@ -98,7 +24,7 @@ public class HbaseUtils {
|
||||
return 1;
|
||||
}
|
||||
|
||||
private static ArrayList<Integer> getArraylist(Result result, String family, String qualifier) throws IOException {
|
||||
public static ArrayList<Integer> getArraylist(Result result, String family, String qualifier) throws IOException {
|
||||
if (containsColumn(result, family, qualifier)) {
|
||||
ArrayWritable w = new ArrayWritable(IntWritable.class);
|
||||
w.readFields(new DataInputStream(new ByteArrayInputStream(result.getValue(Bytes.toBytes(family), Bytes.toBytes(qualifier)))));
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.geedgenetworks.utils.StringUtil;
|
||||
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import org.apache.http.*;
|
||||
import org.apache.http.client.ClientProtocolException;
|
||||
import org.apache.http.client.HttpRequestRetryHandler;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.client.methods.*;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.protocol.HttpClientContext;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.apache.http.conn.ConnectTimeoutException;
|
||||
@@ -18,8 +24,6 @@ import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
||||
import org.apache.http.message.BasicHeaderElementIterator;
|
||||
import org.apache.http.protocol.HTTP;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.net.ssl.SSLException;
|
||||
import javax.net.ssl.SSLHandshakeException;
|
||||
@@ -38,7 +42,8 @@ public class HttpClientUtils {
|
||||
/** 全局连接池对象 */
|
||||
private static final PoolingHttpClientConnectionManager CONN_MANAGER = new PoolingHttpClientConnectionManager();
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(HttpClientUtils.class);
|
||||
// private static Logger logger = LoggerFactory.getLogger(HttpClientUtils.class);
|
||||
private static final Log logger = LogFactory.get();
|
||||
public static final String ERROR_MESSAGE = "-1";
|
||||
|
||||
/*
|
||||
@@ -47,9 +52,9 @@ public class HttpClientUtils {
|
||||
static {
|
||||
|
||||
// 设置最大连接数
|
||||
CONN_MANAGER.setMaxTotal(CommonConfig.HTTP_POOL_MAX_CONNECTION);
|
||||
CONN_MANAGER.setMaxTotal(FlowWriteConfig.HTTP_POOL_MAX_CONNECTION);
|
||||
// 设置每个连接的路由数
|
||||
CONN_MANAGER.setDefaultMaxPerRoute(CommonConfig.HTTP_POOL_MAX_PER_ROUTE);
|
||||
CONN_MANAGER.setDefaultMaxPerRoute(FlowWriteConfig.HTTP_POOL_MAX_PER_ROUTE);
|
||||
|
||||
}
|
||||
|
||||
@@ -61,11 +66,11 @@ public class HttpClientUtils {
|
||||
// 创建Http请求配置参数
|
||||
RequestConfig requestConfig = RequestConfig.custom()
|
||||
// 获取连接超时时间
|
||||
.setConnectionRequestTimeout(CommonConfig.HTTP_POOL_REQUEST_TIMEOUT)
|
||||
.setConnectionRequestTimeout(FlowWriteConfig.HTTP_POOL_REQUEST_TIMEOUT)
|
||||
// 请求超时时间
|
||||
.setConnectTimeout(CommonConfig.HTTP_POOL_CONNECT_TIMEOUT)
|
||||
.setConnectTimeout(FlowWriteConfig.HTTP_POOL_CONNECT_TIMEOUT)
|
||||
// 响应超时时间
|
||||
.setSocketTimeout(CommonConfig.HTTP_POOL_RESPONSE_TIMEOUT)
|
||||
.setSocketTimeout(FlowWriteConfig.HTTP_POOL_RESPONSE_TIMEOUT)
|
||||
.build();
|
||||
|
||||
/*
|
||||
@@ -253,12 +258,12 @@ public class HttpClientUtils {
|
||||
* 拼装url
|
||||
* url ,参数map
|
||||
*/
|
||||
public static void setUrlWithParams(URIBuilder uriBuilder,String path, Map<String, String> params) {
|
||||
public static void setUrlWithParams(URIBuilder uriBuilder,String path, Map<String, Object> params) {
|
||||
try {
|
||||
uriBuilder.setPath(path);
|
||||
if (params != null && !params.isEmpty()){
|
||||
for (Map.Entry<String, String> kv : params.entrySet()) {
|
||||
uriBuilder.setParameter(kv.getKey(),kv.getValue());
|
||||
for (Map.Entry<String, Object> kv : params.entrySet()) {
|
||||
uriBuilder.setParameter(kv.getKey(),kv.getValue().toString());
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
||||
160
src/main/java/com/zdjizhi/utils/IpLookupUtils.java
Normal file
160
src/main/java/com/zdjizhi/utils/IpLookupUtils.java
Normal file
@@ -0,0 +1,160 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import cn.hutool.crypto.digest.DigestUtil;
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.alibaba.fastjson2.*;
|
||||
import com.alibaba.nacos.api.config.ConfigService;
|
||||
import com.alibaba.nacos.api.config.listener.Listener;
|
||||
import com.alibaba.nacos.api.exception.NacosException;
|
||||
import com.geedgenetworks.utils.IpLookupV2;
|
||||
import com.geedgenetworks.utils.StringUtil;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import com.zdjizhi.common.pojo.KnowlegeBaseMeta;
|
||||
import com.zdjizhi.utils.connections.http.HttpClientService;
|
||||
import com.zdjizhi.utils.connections.nacos.NacosConnection;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
/**
|
||||
* @author wangchengcheng
|
||||
* @version 2023/11/10 15:23
|
||||
*/
|
||||
public class IpLookupUtils {
|
||||
private static final Log logger = LogFactory.get();
|
||||
private static final String ipBuiltInName = "ip_builtin.mmdb";
|
||||
private static final String ipUserDefinedName = "ip_user_defined.mmdb";
|
||||
|
||||
/**
|
||||
* ip定位库
|
||||
*/
|
||||
private static IpLookupV2 ipLookup;
|
||||
|
||||
/**
|
||||
* 定位库默认分隔符
|
||||
*/
|
||||
private static final String LOCATION_SEPARATOR = ".";
|
||||
|
||||
/**
|
||||
* 最大重试次数
|
||||
*/
|
||||
private static final int TRY_TIMES = 5;
|
||||
|
||||
/**
|
||||
* http connections
|
||||
*/
|
||||
private static final HttpClientService httpClientService;
|
||||
|
||||
/**
|
||||
* 定位库元数据缓存
|
||||
*/
|
||||
private static final HashMap<String, KnowlegeBaseMeta> knowledgeMetaCache = new HashMap<>(16);
|
||||
|
||||
static {
|
||||
JSONPath jsonPath = JSONPath.of(getFilterParameter());
|
||||
httpClientService = new HttpClientService();
|
||||
|
||||
NacosConnection nacosConnection = new NacosConnection();
|
||||
ConfigService schemaService = nacosConnection.getPublicService();
|
||||
try {
|
||||
String configInfo = schemaService.getConfigAndSignListener(FlowWriteConfig.NACOS_KNOWLEDGEBASE_DATA_ID, FlowWriteConfig.NACOS_PUBLIC_GROUP, FlowWriteConfig.NACOS_CONNECTION_TIMEOUT, new Listener() {
|
||||
@Override
|
||||
public Executor getExecutor() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void receiveConfigInfo(String configInfo) {
|
||||
if (StringUtil.isNotBlank(configInfo)) {
|
||||
updateIpLookup(jsonPath, configInfo);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (StringUtil.isNotBlank(configInfo)) {
|
||||
updateIpLookup(jsonPath, configInfo);
|
||||
}
|
||||
} catch (NacosException e) {
|
||||
logger.error("Get Schema config from Nacos error,The exception message is :" + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private static void updateIpLookup(JSONPath jsonPath, String configInfo) {
|
||||
String extract = jsonPath.extract(JSONReader.of(configInfo)).toString();
|
||||
if (StringUtil.isNotBlank(extract)) {
|
||||
JSONArray jsonArray = JSON.parseArray(extract);
|
||||
if (jsonArray.size() > 0) {
|
||||
for (int i = 0; i < jsonArray.size(); i++) {
|
||||
KnowlegeBaseMeta knowlegeBaseMeta = JSONObject.parseObject(jsonArray.getString(i), KnowlegeBaseMeta.class);
|
||||
String fileName = Joiner.on(LOCATION_SEPARATOR).useForNull("").join(knowlegeBaseMeta.getName(), knowlegeBaseMeta.getFormat());
|
||||
knowledgeMetaCache.put(fileName, knowlegeBaseMeta);
|
||||
}
|
||||
reloadIpLookup();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 从HDFS下载文件更新IpLookup
|
||||
*/
|
||||
private static void reloadIpLookup() {
|
||||
IpLookupV2.Builder builder = new IpLookupV2.Builder(false);
|
||||
for (String fileName : knowledgeMetaCache.keySet()) {
|
||||
int retryNum = 0;
|
||||
KnowlegeBaseMeta knowlegeBaseMeta = knowledgeMetaCache.get(fileName);
|
||||
String metaSha256 = knowlegeBaseMeta.getSha256();
|
||||
while (retryNum < TRY_TIMES) {
|
||||
System.out.println("download file :" + fileName + ",HOS path :" + knowlegeBaseMeta.getPath());
|
||||
Long startTime = System.currentTimeMillis();
|
||||
byte[] httpGetByte = httpClientService.httpGetByte(knowlegeBaseMeta.getPath(), FlowWriteConfig.HTTP_SOCKET_TIMEOUT);
|
||||
if (httpGetByte != null && httpGetByte.length > 0) {
|
||||
String downloadFileSha256 = DigestUtil.sha256Hex(httpGetByte);
|
||||
if (metaSha256.equals(downloadFileSha256)) {
|
||||
ByteArrayInputStream inputStream = new ByteArrayInputStream(httpGetByte);
|
||||
switch (fileName) {
|
||||
case ipBuiltInName:
|
||||
builder.loadDataFile(inputStream);
|
||||
break;
|
||||
case ipUserDefinedName:
|
||||
builder.loadDataFilePrivate(inputStream);
|
||||
break;
|
||||
default:
|
||||
}
|
||||
System.out.println("update " + fileName + " finished, speed :" + (System.currentTimeMillis() - startTime) + "ms");
|
||||
retryNum = TRY_TIMES;
|
||||
} else {
|
||||
logger.error("通过HOS下载{}的sha256为:{} ,Nacos内记录为:{} ,sha256不相等 开始第{}次重试下载文件", fileName, downloadFileSha256, metaSha256, retryNum);
|
||||
retryNum++;
|
||||
}
|
||||
} else {
|
||||
logger.error("通过HOS下载{}的流为空 ,开始第{}次重试下载文件", fileName, retryNum);
|
||||
retryNum++;
|
||||
}
|
||||
}
|
||||
}
|
||||
ipLookup = builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据配置组合生成知识库元数据过滤参数
|
||||
*
|
||||
* @return 过滤参数
|
||||
*/
|
||||
private static String getFilterParameter() {
|
||||
// String expr = "$.[?(@.version=='latest' && @.name in ['ip_built_in','ip_user_defined'])].['name','sha256','format','path']";
|
||||
|
||||
|
||||
String expr = "[?(@.version=='latest')][?(@.name in ('ip_builtin','ip_user_defined'))]";
|
||||
|
||||
return expr;
|
||||
}
|
||||
|
||||
public static String getCountryLookup(String ip) {
|
||||
return ipLookup.countryLookup(ip);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
|
||||
public class IpUtils {
|
||||
|
||||
/**
|
||||
* IP定位库工具类
|
||||
*/
|
||||
public static IpLookup ipLookup = new IpLookup.Builder(false)
|
||||
.loadDataFileV4(CommonConfig.IP_MMDB_PATH + "ip_v4.mmdb")
|
||||
.loadDataFileV6(CommonConfig.IP_MMDB_PATH + "ip_v6.mmdb")
|
||||
.loadDataFilePrivateV4(CommonConfig.IP_MMDB_PATH + "ip_private_v4.mmdb")
|
||||
.loadDataFilePrivateV6(CommonConfig.IP_MMDB_PATH + "ip_private_v6.mmdb")
|
||||
.build();
|
||||
|
||||
public static void main(String[] args) {
|
||||
System.out.println(ipLookup.countryLookup("49.7.115.37"));
|
||||
|
||||
// String ips = "192.168.50.23,192.168.50.45,192.168.56.9,192.168.56.8,192.168.50.58,192.168.56.7,192.168.56.6,192.168.50.40,192.168.50.19,192.168.50.6,192.168.50.4,192.168.56.17,192.168.50.27,192.168.50.26,192.168.50.18,192.168.56.3,192.168.56.10";
|
||||
// for (String ip:ips.split(",")){
|
||||
// System.out.println(ip+"--"+ipLookup.countryLookup(ip));
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
@@ -1,31 +1,35 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
|
||||
|
||||
import java.util.Optional;
|
||||
import java.util.Properties;
|
||||
|
||||
public class KafkaUtils {
|
||||
|
||||
private static Properties getKafkaSinkProperty(){
|
||||
Properties properties = new Properties();
|
||||
properties.setProperty("bootstrap.servers", CommonConfig.KAFKA_OUTPUT_BOOTSTRAP_SERVERS);
|
||||
if (CommonConfig.SASL_JAAS_CONFIG_FLAG == 1){
|
||||
properties.setProperty("bootstrap.servers", FlowWriteConfig.KAFKA_OUTPUT_BOOTSTRAP_SERVERS);
|
||||
if (FlowWriteConfig.SASL_JAAS_CONFIG_FLAG == 1){
|
||||
properties.put("security.protocol", "SASL_PLAINTEXT");
|
||||
properties.put("sasl.mechanism", "PLAIN");
|
||||
properties.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username=\""+CommonConfig.SASL_JAAS_CONFIG_USER+"\" password=\""+CommonConfig.SASL_JAAS_CONFIG_PASSWORD+"\";");
|
||||
properties.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username=\""+ FlowWriteConfig.SASL_JAAS_CONFIG_USER+"\" password=\""+ FlowWriteConfig.SASL_JAAS_CONFIG_PASSWORD+"\";");
|
||||
}
|
||||
|
||||
return properties;
|
||||
}
|
||||
|
||||
public static FlinkKafkaProducer<String> getKafkaSink(String topic){
|
||||
return new FlinkKafkaProducer<String>(
|
||||
FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<>(
|
||||
topic,
|
||||
new SimpleStringSchema(),
|
||||
getKafkaSinkProperty()
|
||||
getKafkaSinkProperty(),
|
||||
Optional.empty()
|
||||
);
|
||||
kafkaProducer.setLogFailuresOnly(true);
|
||||
return kafkaProducer;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import com.zdjizhi.common.CommonConfig;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
|
||||
public class SnowflakeId {
|
||||
private static final Logger logger = LoggerFactory.getLogger(SnowflakeId.class);
|
||||
// private static final Logger logger = LoggerFactory.getLogger(SnowflakeId.class);
|
||||
private static final Log logger = LogFactory.get();
|
||||
|
||||
/**
|
||||
* 共64位 第一位为符号位 默认0
|
||||
@@ -98,7 +99,7 @@ public class SnowflakeId {
|
||||
private static ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
|
||||
|
||||
static {
|
||||
idWorker = new SnowflakeId(CommonConfig.HBASE_ZOOKEEPER_QUORUM, CommonConfig.DATA_CENTER_ID_NUM);
|
||||
idWorker = new SnowflakeId(FlowWriteConfig.HBASE_ZOOKEEPER_QUORUM, FlowWriteConfig.DATA_CENTER_ID_NUM);
|
||||
}
|
||||
|
||||
//==============================Constructors=====================================
|
||||
@@ -107,7 +108,7 @@ public class SnowflakeId {
|
||||
* 构造函数
|
||||
*/
|
||||
private SnowflakeId(String zookeeperIp, long dataCenterIdNum) {
|
||||
DistributedLock lock = new DistributedLock(CommonConfig.HBASE_ZOOKEEPER_QUORUM, "disLocks1");
|
||||
DistributedLock lock = new DistributedLock(FlowWriteConfig.HBASE_ZOOKEEPER_QUORUM, "disLocks1");
|
||||
try {
|
||||
lock.lock();
|
||||
int tmpWorkerId = zookeeperUtils.modifyNode("/Snowflake/" + "worker" + dataCenterIdNum, zookeeperIp);
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
package com.zdjizhi.utils;
|
||||
|
||||
import cn.hutool.core.util.StrUtil;
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import org.apache.zookeeper.*;
|
||||
import org.apache.zookeeper.data.ACL;
|
||||
import org.apache.zookeeper.data.Stat;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
@@ -13,7 +13,8 @@ import java.util.concurrent.CountDownLatch;
|
||||
|
||||
|
||||
public class ZookeeperUtils implements Watcher {
|
||||
private static final Logger logger = LoggerFactory.getLogger(ZookeeperUtils.class);
|
||||
// private static final Logger logger = LoggerFactory.getLogger(ZookeeperUtils.class);
|
||||
private static final Log logger = LogFactory.get();
|
||||
|
||||
private ZooKeeper zookeeper;
|
||||
|
||||
|
||||
@@ -0,0 +1,261 @@
|
||||
package com.zdjizhi.utils.connections.http;
|
||||
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
|
||||
import com.geedgenetworks.utils.StringUtil;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
import com.zdjizhi.utils.exception.FlowWriteException;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.*;
|
||||
import org.apache.http.client.ClientProtocolException;
|
||||
import org.apache.http.client.HttpRequestRetryHandler;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.protocol.HttpClientContext;
|
||||
import org.apache.http.config.Registry;
|
||||
import org.apache.http.config.RegistryBuilder;
|
||||
import org.apache.http.conn.ConnectTimeoutException;
|
||||
import org.apache.http.conn.ConnectionKeepAliveStrategy;
|
||||
import org.apache.http.conn.socket.ConnectionSocketFactory;
|
||||
import org.apache.http.conn.socket.PlainConnectionSocketFactory;
|
||||
import org.apache.http.conn.ssl.NoopHostnameVerifier;
|
||||
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
||||
import org.apache.http.message.BasicHeaderElementIterator;
|
||||
import org.apache.http.protocol.HTTP;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
|
||||
import javax.net.ssl.*;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InterruptedIOException;
|
||||
import java.net.SocketTimeoutException;
|
||||
import java.net.UnknownHostException;
|
||||
import java.security.KeyManagementException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.cert.X509Certificate;
|
||||
|
||||
public class HttpClientService {
|
||||
|
||||
private static final Log log = LogFactory.get();
|
||||
|
||||
/**
|
||||
* 在调用SSL之前需要重写验证方法,取消检测SSL
|
||||
* 创建ConnectionManager,添加Connection配置信息
|
||||
*
|
||||
* @return HttpClient 支持https
|
||||
*/
|
||||
private PoolingHttpClientConnectionManager getSslClientManager() {
|
||||
try {
|
||||
// 在调用SSL之前需要重写验证方法,取消检测SSL
|
||||
X509TrustManager trustManager = new X509TrustManager() {
|
||||
@Override
|
||||
public X509Certificate[] getAcceptedIssuers() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkClientTrusted(X509Certificate[] xcs, String str) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkServerTrusted(X509Certificate[] xcs, String str) {
|
||||
}
|
||||
};
|
||||
SSLContext ctx = SSLContext.getInstance(SSLConnectionSocketFactory.TLS);
|
||||
ctx.init(null, new TrustManager[]{trustManager}, null);
|
||||
SSLConnectionSocketFactory socketFactory = new SSLConnectionSocketFactory(ctx, NoopHostnameVerifier.INSTANCE);
|
||||
Registry<ConnectionSocketFactory> socketFactoryRegistry = RegistryBuilder.<ConnectionSocketFactory>create()
|
||||
.register("http", PlainConnectionSocketFactory.INSTANCE)
|
||||
.register("https", socketFactory).build();
|
||||
// 创建ConnectionManager,添加Connection配置信息
|
||||
PoolingHttpClientConnectionManager connManager = new PoolingHttpClientConnectionManager(socketFactoryRegistry);
|
||||
// 设置最大连接数
|
||||
connManager.setMaxTotal(FlowWriteConfig.HTTP_POOL_MAX_CONNECTION);
|
||||
// 设置每个连接的路由数
|
||||
connManager.setDefaultMaxPerRoute(FlowWriteConfig.HTTP_POOL_MAX_PER_ROUTE);
|
||||
return connManager;
|
||||
} catch (KeyManagementException | NoSuchAlgorithmException e) {
|
||||
throw new FlowWriteException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取Http客户端连接对象
|
||||
*
|
||||
* @param socketTimeOut 响应超时时间
|
||||
* @return Http客户端连接对象
|
||||
*/
|
||||
private CloseableHttpClient getHttpClient(int socketTimeOut) {
|
||||
// 创建Http请求配置参数
|
||||
RequestConfig requestConfig = RequestConfig.custom()
|
||||
// 获取连接超时时间
|
||||
.setConnectionRequestTimeout(FlowWriteConfig.HTTP_POOL_REQUEST_TIMEOUT)
|
||||
// 请求超时时间
|
||||
.setConnectTimeout(FlowWriteConfig.HTTP_POOL_CONNECT_TIMEOUT)
|
||||
// 响应超时时间
|
||||
.setSocketTimeout(socketTimeOut)
|
||||
.build();
|
||||
|
||||
/**
|
||||
* 测出超时重试机制为了防止超时不生效而设置
|
||||
* 如果直接放回false,不重试
|
||||
* 这里会根据情况进行判断是否重试
|
||||
*/
|
||||
HttpRequestRetryHandler retry = (exception, executionCount, context) -> {
|
||||
if (executionCount >= 3) {// 如果已经重试了3次,就放弃
|
||||
return false;
|
||||
}
|
||||
if (exception instanceof NoHttpResponseException) {// 如果服务器丢掉了连接,那么就重试
|
||||
return true;
|
||||
}
|
||||
if (exception instanceof SSLHandshakeException) {// 不要重试SSL握手异常
|
||||
return false;
|
||||
}
|
||||
if (exception instanceof SocketTimeoutException) {
|
||||
if (exception.getMessage().contains("Read timed out")) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (exception instanceof UnknownHostException) {// 目标服务器不可达
|
||||
return false;
|
||||
}
|
||||
if (exception instanceof ConnectTimeoutException) {// 连接被拒绝
|
||||
return false;
|
||||
}
|
||||
if (exception instanceof SSLException) {// ssl握手异常
|
||||
return false;
|
||||
}
|
||||
if (exception instanceof InterruptedIOException) {// 超时
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
HttpClientContext clientContext = HttpClientContext.adapt(context);
|
||||
HttpRequest request = clientContext.getRequest();
|
||||
// 如果请求是幂等的,就再次尝试
|
||||
if (!(request instanceof HttpEntityEnclosingRequest)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
|
||||
ConnectionKeepAliveStrategy myStrategy = (response, context) -> {
|
||||
HeaderElementIterator it = new BasicHeaderElementIterator
|
||||
(response.headerIterator(HTTP.CONN_KEEP_ALIVE));
|
||||
while (it.hasNext()) {
|
||||
HeaderElement he = it.nextElement();
|
||||
String param = he.getName();
|
||||
String value = he.getValue();
|
||||
if (value != null && param.equalsIgnoreCase("timeout")) {
|
||||
return Long.parseLong(value) * 1000;
|
||||
}
|
||||
}
|
||||
return 60 * 1000;//如果没有约定,则默认定义时长为60s
|
||||
};
|
||||
|
||||
// 创建httpClient
|
||||
return HttpClients.custom()
|
||||
// 把请求相关的超时信息设置到连接客户端
|
||||
.setDefaultRequestConfig(requestConfig)
|
||||
// 把请求重试设置到连接客户端
|
||||
.setRetryHandler(retry)
|
||||
.setKeepAliveStrategy(myStrategy)
|
||||
// 配置连接池管理对象
|
||||
.setConnectionManager(getSslClientManager())
|
||||
.build();
|
||||
}
|
||||
|
||||
public InputStream httpGetInputStream(String url, int socketTimeout, Header... headers) {
|
||||
InputStream result = null;
|
||||
// 获取客户端连接对象
|
||||
CloseableHttpClient httpClient = getHttpClient(socketTimeout);
|
||||
// 创建GET请求对象
|
||||
HttpGet httpGet = new HttpGet(url);
|
||||
if (StringUtil.isNotEmpty(headers)) {
|
||||
for (Header h : headers) {
|
||||
httpGet.addHeader(h);
|
||||
}
|
||||
}
|
||||
CloseableHttpResponse response = null;
|
||||
|
||||
try {
|
||||
// 执行请求
|
||||
response = httpClient.execute(httpGet);
|
||||
// 获取响应实体
|
||||
result = IOUtils.toBufferedInputStream(response.getEntity().getContent());
|
||||
// 获取响应信息
|
||||
EntityUtils.consume(response.getEntity());
|
||||
} catch (ClientProtocolException e) {
|
||||
log.error("current file: {},Protocol error:{}", url, e.getMessage());
|
||||
|
||||
} catch (ParseException e) {
|
||||
log.error("current file: {}, Parser error:{}", url, e.getMessage());
|
||||
|
||||
} catch (IOException e) {
|
||||
log.error("current file: {},IO error:{}", url, e.getMessage());
|
||||
|
||||
} finally {
|
||||
if (null != response) {
|
||||
try {
|
||||
EntityUtils.consume(response.getEntity());
|
||||
response.close();
|
||||
} catch (IOException e) {
|
||||
log.error("Release Connection error:{}", e.getMessage());
|
||||
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public byte[] httpGetByte(String url, int socketTimeout, Header... headers) {
|
||||
byte[] result = null;
|
||||
// 获取客户端连接对象
|
||||
CloseableHttpClient httpClient = getHttpClient(socketTimeout);
|
||||
// 创建GET请求对象
|
||||
HttpGet httpGet = new HttpGet(url);
|
||||
if (StringUtil.isNotEmpty(headers)) {
|
||||
for (Header h : headers) {
|
||||
httpGet.addHeader(h);
|
||||
}
|
||||
}
|
||||
CloseableHttpResponse response = null;
|
||||
|
||||
try {
|
||||
// 执行请求
|
||||
response = httpClient.execute(httpGet);
|
||||
// 获取响应实体
|
||||
result = IOUtils.toByteArray(response.getEntity().getContent());
|
||||
// 获取响应信息
|
||||
EntityUtils.consume(response.getEntity());
|
||||
} catch (ClientProtocolException e) {
|
||||
log.error("current file: {},Protocol error:{}", url, e.getMessage());
|
||||
|
||||
} catch (ParseException e) {
|
||||
log.error("current file: {}, Parser error:{}", url, e.getMessage());
|
||||
|
||||
} catch (IOException e) {
|
||||
log.error("current file: {},IO error:{}", url, e.getMessage());
|
||||
|
||||
} finally {
|
||||
if (null != response) {
|
||||
try {
|
||||
EntityUtils.consume(response.getEntity());
|
||||
response.close();
|
||||
} catch (IOException e) {
|
||||
log.error("Release Connection error:{}", e.getMessage());
|
||||
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
package com.zdjizhi.utils.connections.nacos;
|
||||
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.alibaba.nacos.api.NacosFactory;
|
||||
import com.alibaba.nacos.api.PropertyKeyConst;
|
||||
import com.alibaba.nacos.api.config.ConfigService;
|
||||
import com.alibaba.nacos.api.exception.NacosException;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* @author qidaijie
|
||||
* @Package com.zdjizhi.tools.connections.nacos
|
||||
* @Description:
|
||||
* @date 2023/7/2714:49
|
||||
*/
|
||||
public class NacosConnection {
|
||||
private static final Log logger = LogFactory.get();
|
||||
|
||||
private ConfigService configService;
|
||||
|
||||
|
||||
public ConfigService getDosService() {
|
||||
Properties properties = new Properties();
|
||||
properties.setProperty(PropertyKeyConst.SERVER_ADDR, FlowWriteConfig.NACOS_SERVER);
|
||||
properties.setProperty(PropertyKeyConst.NAMESPACE, FlowWriteConfig.NACOS_DOS_NAMESPACE);
|
||||
properties.setProperty(PropertyKeyConst.USERNAME, FlowWriteConfig.NACOS_USERNAME);
|
||||
properties.setProperty(PropertyKeyConst.PASSWORD, FlowWriteConfig.NACOS_PIN);
|
||||
try {
|
||||
configService = NacosFactory.createConfigService(properties);
|
||||
} catch (NacosException e) {
|
||||
logger.error("NacosException:{}", e);
|
||||
}
|
||||
return configService;
|
||||
}
|
||||
|
||||
|
||||
public ConfigService getPublicService() {
|
||||
Properties properties = new Properties();
|
||||
properties.setProperty(PropertyKeyConst.SERVER_ADDR, FlowWriteConfig.NACOS_SERVER);
|
||||
properties.setProperty(PropertyKeyConst.NAMESPACE, FlowWriteConfig.NACOS_PUBLIC_NAMESPACE);
|
||||
properties.setProperty(PropertyKeyConst.USERNAME, FlowWriteConfig.NACOS_USERNAME);
|
||||
properties.setProperty(PropertyKeyConst.PASSWORD, FlowWriteConfig.NACOS_PIN);
|
||||
try {
|
||||
configService = NacosFactory.createConfigService(properties);
|
||||
} catch (NacosException e) {
|
||||
logger.error("NacosException:{}", e);
|
||||
}
|
||||
return configService;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
package com.zdjizhi.utils.connections.nacos;
|
||||
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.alibaba.nacos.api.config.ConfigService;
|
||||
import com.alibaba.nacos.api.config.listener.Listener;
|
||||
import com.zdjizhi.common.FlowWriteConfig;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
import java.util.Properties;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
public class NacosUtils {
|
||||
private static final Log logger = LogFactory.get();
|
||||
private static Properties commonProperties = new Properties();
|
||||
|
||||
static {
|
||||
NacosConnection nacosConnection = new NacosConnection();
|
||||
ConfigService dosService = nacosConnection.getDosService();
|
||||
try {
|
||||
String config = dosService.getConfig(FlowWriteConfig.NACOS_DOS_DATA_ID, FlowWriteConfig.NACOS_DOS_GROUP, FlowWriteConfig.NACOS_CONNECTION_TIMEOUT);
|
||||
|
||||
commonProperties.load(new StringReader(config));
|
||||
|
||||
dosService.addListener(FlowWriteConfig.NACOS_DOS_DATA_ID, FlowWriteConfig.NACOS_DOS_GROUP, new Listener() {
|
||||
@Override
|
||||
public Executor getExecutor() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void receiveConfigInfo(String configMsg) {
|
||||
try {
|
||||
commonProperties.clear();
|
||||
commonProperties.load(new StringReader(configMsg));
|
||||
} catch (IOException e) {
|
||||
logger.error("监听nacos配置失败", e);
|
||||
}
|
||||
System.out.println(configMsg);
|
||||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
logger.error("获取nacos配置失败", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static String getStringProperty(String key) {
|
||||
return commonProperties.getProperty(key);
|
||||
}
|
||||
|
||||
public static Integer getIntProperty(String key) {
|
||||
return Integer.parseInt(commonProperties.getProperty(key));
|
||||
}
|
||||
|
||||
public static Double getDoubleProperty(String key) {
|
||||
return Double.parseDouble(commonProperties.getProperty(key));
|
||||
}
|
||||
|
||||
public static Long getLongProperty(String key) {
|
||||
return Long.parseLong(commonProperties.getProperty(key));
|
||||
}
|
||||
|
||||
public static Boolean getBooleanProperty(String key) {
|
||||
return "true".equals(commonProperties.getProperty(key).toLowerCase().trim());
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
package com.zdjizhi.utils.exception;
|
||||
|
||||
|
||||
public class FlowWriteException extends RuntimeException {
|
||||
|
||||
public FlowWriteException() {
|
||||
}
|
||||
|
||||
public FlowWriteException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -5,51 +5,55 @@ stream.execution.environment.parallelism=1
|
||||
stream.execution.job.name=DOS-DETECTION-APPLICATION
|
||||
|
||||
#输入kafka并行度大小
|
||||
kafka.input.parallelism=1
|
||||
kafka.input.parallelism=3
|
||||
|
||||
#输入kafka topic名
|
||||
kafka.input.topic.name=DOS-SKETCH-RECORD
|
||||
|
||||
#输入kafka地址
|
||||
kafka.input.bootstrap.servers=192.168.44.12:9092
|
||||
#kafka.input.bootstrap.servers=192.168.44.11:9092,192.168.44.14:9092,192.168.44.15:9092
|
||||
#kafka.input.bootstrap.servers=192.168.44.12:9094
|
||||
kafka.input.bootstrap.servers=192.168.44.11:9094,192.168.44.14:9094,192.168.44.15:9094
|
||||
|
||||
#读取kafka group id
|
||||
kafka.input.group.id=2108231709
|
||||
kafka.input.group.id=dos-detection-job-221125-1
|
||||
#kafka.input.group.id=dos-detection-job-210813-1
|
||||
|
||||
#发送kafka metrics并行度大小
|
||||
kafka.output.metric.parallelism=1
|
||||
kafka.output.metric.parallelism=3
|
||||
|
||||
#发送kafka metrics topic名
|
||||
kafka.output.metric.topic.name=TRAFFIC-TOP-DESTINATION-IP-METRICS
|
||||
#kafka.output.metric.topic.name=test
|
||||
#kafka.output.metric.topic.name=TRAFFIC-TOP-DESTINATION-IP-METRICS
|
||||
kafka.output.metric.topic.name=test
|
||||
|
||||
#发送kafka event并行度大小
|
||||
kafka.output.event.parallelism=1
|
||||
kafka.output.event.parallelism=3
|
||||
|
||||
#发送kafka event topic名
|
||||
kafka.output.event.topic.name=DOS-EVENT
|
||||
#kafka.output.event.topic.name=test
|
||||
#kafka.output.event.topic.name=DOS-EVENT
|
||||
kafka.output.event.topic.name=abcd
|
||||
|
||||
#kafka输出地址
|
||||
kafka.output.bootstrap.servers=192.168.44.12:9092
|
||||
kafka.output.bootstrap.servers=192.168.44.12:9094
|
||||
#kafka.output.bootstrap.servers=192.168.44.11:9092,192.168.44.14:9092,192.168.44.15:9092
|
||||
|
||||
#zookeeper地址
|
||||
#hbase.zookeeper.quorum=192.168.44.12:2181
|
||||
hbase.zookeeper.quorum=192.168.44.11:2181,192.168.44.14:2181,192.168.44.15:2181
|
||||
hbase.zookeeper.quorum=192.168.44.12:2181
|
||||
#hbase.zookeeper.quorum=192.168.40.151:2181,192.168.40.152:2181,192.168.40.203:2181
|
||||
#hbase.zookeeper.quorum=192.168.44.11:2181,192.168.44.14:2181,192.168.44.15:2181
|
||||
|
||||
#hbase客户端处理时间
|
||||
hbase.client.operation.timeout=30000
|
||||
hbase.client.scanner.timeout.period=30000
|
||||
|
||||
##hbase baseline表名
|
||||
hbase.baseline.table.name=ddos_traffic_baselines
|
||||
hbase.baseline.table.name=dos:ddos_traffic_baselines
|
||||
|
||||
#读取baseline限制
|
||||
hbase.baseline.total.num=1000000
|
||||
|
||||
#baseline ttl,单位:天
|
||||
hbase.baseline.ttl=10
|
||||
|
||||
#设置聚合并行度,2个key
|
||||
flink.first.agg.parallelism=1
|
||||
|
||||
@@ -57,10 +61,10 @@ flink.first.agg.parallelism=1
|
||||
flink.detection.map.parallelism=1
|
||||
|
||||
#watermark延迟
|
||||
flink.watermark.max.orderness=1
|
||||
flink.watermark.max.orderness=300
|
||||
|
||||
#计算窗口大小,默认600s
|
||||
flink.window.max.time=10
|
||||
flink.window.max.time=600
|
||||
|
||||
#dos event结果中distinct source IP限制
|
||||
source.ip.list.limit=10000
|
||||
@@ -70,26 +74,10 @@ destination.ip.partition.num=10000
|
||||
|
||||
data.center.id.num=15
|
||||
|
||||
#IP mmdb库路径
|
||||
ip.mmdb.path=D:\\data\\dat\\
|
||||
#ip.mmdb.path=/home/bigdata/topology/dat/
|
||||
#ip.mmdb.path=/home/bigdata/wlh/topology/dos-detection/dat/
|
||||
|
||||
#敏感阈值,速率小于此值不报警
|
||||
sensitivity.threshold=100
|
||||
|
||||
#基于baseline判定dos攻击的上下限
|
||||
baseline.sessions.minor.threshold=0.1
|
||||
baseline.sessions.warning.threshold=0.5
|
||||
baseline.sessions.major.threshold=1
|
||||
baseline.sessions.severe.threshold=3
|
||||
baseline.sessions.critical.threshold=8
|
||||
|
||||
#bifang服务访问地址
|
||||
bifang.server.uri=http://192.168.44.3:80
|
||||
|
||||
#访问bifang只读权限token,bifang内置,无需修改
|
||||
bifang.server.token=ed04b942-7df4-4e3d-b9a9-a881ca98a867
|
||||
bifang.server.uri=http://192.168.44.72:80
|
||||
#bifang.server.uri=http://192.168.44.3:80
|
||||
|
||||
#加密密码路径信息
|
||||
bifang.server.encryptpwd.path=/v1/user/encryptpwd
|
||||
@@ -97,8 +85,11 @@ bifang.server.encryptpwd.path=/v1/user/encryptpwd
|
||||
#登录bifang服务路径信息
|
||||
bifang.server.login.path=/v1/user/login
|
||||
|
||||
#获取vaysId路径信息
|
||||
bifang.server.policy.vaysid.path=/v1/admin/vsys
|
||||
|
||||
#获取静态阈值路径信息
|
||||
bifang.server.policy.threshold.path=/v1/policy/profile/DoS/detection/threshold
|
||||
bifang.server.policy.threshold.path=/v1/policy/profile/dos/detection/threshold
|
||||
|
||||
#http请求相关参数
|
||||
#最大连接数
|
||||
@@ -120,11 +111,29 @@ http.pool.response.timeout=60000
|
||||
static.threshold.schedule.minutes=10
|
||||
|
||||
#获取baseline周期,默认7天
|
||||
baseline.threshold.schedule.days=7
|
||||
baseline.threshold.schedule.days=1
|
||||
|
||||
#kafka用户认证配置参数
|
||||
sasl.jaas.config.user=admin
|
||||
sasl.jaas.config.password=galaxy2019
|
||||
#sasl.jaas.config.password=galaxy2019
|
||||
sasl.jaas.config.password=6MleDyA3Z73HSaXiKsDJ2k7Ys8YWLhEJ
|
||||
|
||||
#是否开启kafka用户认证配置,1:是;0:否
|
||||
sasl.jaas.config.flag=1
|
||||
sasl.jaas.config.flag=1
|
||||
|
||||
############################## Nacos 配置 ######################################
|
||||
nacos.server.addr=192.168.44.12:8848
|
||||
nacos.username=nacos
|
||||
nacos.password=nacos
|
||||
############################## Nacos ---知识库配置 ######################################
|
||||
nacos.namespace=public
|
||||
nacos.data.id=knowledge_base.json
|
||||
nacos.group=DEFAULT_GROUP
|
||||
nacos.connection.timeout=60000
|
||||
|
||||
############################## Nacos ---静态阈值配置 ######################################
|
||||
nacos.dos.namespace=test
|
||||
nacos.dos.data.id=dos_detection.properties
|
||||
nacos.dos.group=Galaxy
|
||||
|
||||
http.socket.timeout=90000
|
||||
23
src/main/resources/log4j.properties
Normal file
23
src/main/resources/log4j.properties
Normal file
@@ -0,0 +1,23 @@
|
||||
#Log4j
|
||||
log4j.rootLogger=info,console,file
|
||||
# 控制台日志设置
|
||||
log4j.appender.console=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.console.Threshold=info
|
||||
log4j.appender.console.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
|
||||
|
||||
# 文件日志设置
|
||||
log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
|
||||
log4j.appender.file.Threshold=info
|
||||
log4j.appender.file.encoding=UTF-8
|
||||
log4j.appender.file.Append=true
|
||||
#路径请用相对路径,做好相关测试输出到应用目下
|
||||
log4j.appender.file.file=${nis.root}/log/flink-dos-detection.log
|
||||
log4j.appender.file.DatePattern='.'yyyy-MM-dd
|
||||
log4j.appender.file.layout=org.apache.log4j.PatternLayout
|
||||
#log4j.appender.file.layout.ConversionPattern=%d{HH:mm:ss} %X{ip} [%t] %5p %c{1} %m%n
|
||||
log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] %X{ip} [Thread\:%t] %l %x - %m%n
|
||||
#MyBatis 配置,com.nis.web.dao是mybatis接口所在包
|
||||
log4j.logger.com.nis.web.dao=debug
|
||||
#bonecp数据源配置
|
||||
log4j.category.com.jolbox=debug,console
|
||||
8
src/test/java/com/zdjizhi/ThresholdTestData/DNS-Flood
Normal file
8
src/test/java/com/zdjizhi/ThresholdTestData/DNS-Flood
Normal file
@@ -0,0 +1,8 @@
|
||||
--DosSketchLog
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277141, sketch_duration=59, attack_type='DNS Flood', source_ip='23.91.128.115', destination_ip='102.219.30.33', sketch_sessions=945, sketch_packets=945, sketch_bytes=446370, vsys_id=23}
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277205, sketch_duration=86, attack_type='DNS Flood', source_ip='172.217.160.68', destination_ip='10.113.83.88', sketch_sessions=730, sketch_packets=730, sketch_bytes=344575, vsys_id=1}
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277244, sketch_duration=47, attack_type='DNS Flood', source_ip='45.135.144.112', destination_ip='42.62.192.132', sketch_sessions=0, sketch_packets=0, sketch_bytes=47, vsys_id=1}
|
||||
|
||||
--DosDetectionThreshold
|
||||
{profileId='6091', attackType='DNS Flood', serverIpList=[113.113.83.213, 42.62.192.132/28, 10.113.83.1/25, 102.219.30.33/29], serverIpAddr='null', packetsPerSec=1, bitsPerSec=1, sessionsPerSec=1, isValid=1, vsysId=1, superiorIds=[4, 12, 5, 27]}
|
||||
{profileId='5679', attackType='DNS Flood', serverIpList=[102.219.30.33], serverIpAddr='null', packetsPerSec=500, bitsPerSec=1000000, sessionsPerSec=100000, isValid=1, vsysId=23, superiorIds=[4, 5]}
|
||||
6
src/test/java/com/zdjizhi/ThresholdTestData/ICMP-Flood
Normal file
6
src/test/java/com/zdjizhi/ThresholdTestData/ICMP-Flood
Normal file
@@ -0,0 +1,6 @@
|
||||
--DosSketchLog
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277232, sketch_duration=59, attack_type='ICMP Flood', source_ip='45.170.244.25', destination_ip='24.152.57.56', sketch_sessions=499, sketch_packets=499, sketch_bytes=111970, vsys_id=1}
|
||||
|
||||
|
||||
--DosDetectionThreshold
|
||||
{profileId='6093', attackType='ICMP Flood', serverIpList=[31.131.80.88/29, 24.152.57.56/29, 47.93.59.1/25], serverIpAddr='null', packetsPerSec=210, bitsPerSec=0, sessionsPerSec=0, isValid=1, vsysId=1, superiorIds=[4, 12, 5, 27]}
|
||||
@@ -0,0 +1,7 @@
|
||||
--DosSketchLog
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1685003938, sketch_duration=63714, attack_type='TCP SYN Flood', source_ip='5.32.144.55', destination_ip='45.188.134.11', sketch_sessions=0, sketch_packets=0, sketch_bytes=4195, vsys_id=1}
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277234, sketch_duration=57, attack_type='TCP SYN Flood', source_ip='18.65.148.128', destination_ip='23.200.74.224', sketch_sessions=54, sketch_packets=54, sketch_bytes=73427, vsys_id=1}
|
||||
|
||||
|
||||
--DosDetectionThreshold
|
||||
{profileId='6095', attackType='TCP SYN Flood', serverIpList=[23.200.74.224, 45.188.134.11/29, 41.183.0.15/29, 41.183.0.16/30], serverIpAddr='null', packetsPerSec=1, bitsPerSec=1, sessionsPerSec=1, isValid=1, vsysId=1, superiorIds=[5, 4, 12, 27]}
|
||||
8
src/test/java/com/zdjizhi/ThresholdTestData/UDP-Flood
Normal file
8
src/test/java/com/zdjizhi/ThresholdTestData/UDP-Flood
Normal file
@@ -0,0 +1,8 @@
|
||||
--DosSketchLog
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277291, sketch_duration=0, attack_type='UDP Flood', source_ip='121.14.89.209', destination_ip='192.168.50.11', sketch_sessions=0, sketch_packets=0, sketch_bytes=0, vsys_id=1}
|
||||
{common_sled_ip='null', common_data_center='null', sketch_start_time=1686277233, sketch_duration=58, attack_type='UDP Flood', source_ip='192.168.50.56,192.168.50.34,192.168.50.11,192.168.50.33,192.168.50.55,192.168.50.58,192.168.50.36,192.168.50.14,192.168.50.35,192.168.50.13,192.168.50.57,192.168.50.30,192.168.50.51,192.168.50.54,192.168.50.10,192.168.50.32,192.168.50.53,192.168.50.31,192.168.50.16,192.168.50.38,192.168.50.15,192.168.50.37,192.168.50.18,192.168.50.17,192.168.50.50,192.168.50.45,192.168.50.23,192.168.50.22,192.168.50.44,192.168.50.25,192.168.50.47,192.168.50.46,192.168.50.24,192.168.50.63,192.168.50.41,192.168.50.40,192.168.50.62,192.168.50.43,192.168.50.21,192.168.50.20,192.168.50.42,192.168.50.27,192.168.50.26,192.168.50.48,192.168.50.28,192.168.50.61,192.168.50.60', destination_ip='121.14.89.209', sketch_sessions=297, sketch_packets=297, sketch_bytes=371404, vsys_id=1}
|
||||
|
||||
|
||||
--DosDetectionThreshold
|
||||
{profileId='5333', attackType='UDP Flood', serverIpList=[192.168.50.11, 192.168.50.12], serverIpAddr='null', packetsPerSec=50, bitsPerSec=50, sessionsPerSec=50, isValid=1, vsysId=1, superiorIds=[4, 12, 5, 27]}
|
||||
|
||||
@@ -19,12 +19,12 @@ public class HbaseTest {
|
||||
public static void main(String[] args) throws IOException {
|
||||
org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
|
||||
|
||||
config.set("hbase.zookeeper.quorum", CommonConfig.HBASE_ZOOKEEPER_QUORUM);
|
||||
config.set("hbase.zookeeper.quorum", FlowWriteConfig.HBASE_ZOOKEEPER_QUORUM);
|
||||
config.set("hbase.client.retries.number", "3");
|
||||
config.set("hbase.bulkload.retries.number", "3");
|
||||
config.set("zookeeper.recovery.retry", "3");
|
||||
config.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, CommonConfig.HBASE_CLIENT_OPERATION_TIMEOUT);
|
||||
config.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, CommonConfig.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
|
||||
config.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, FlowWriteConfig.HBASE_CLIENT_OPERATION_TIMEOUT);
|
||||
config.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, FlowWriteConfig.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD);
|
||||
|
||||
TableName tableName = TableName.valueOf("dos_test");
|
||||
Connection conn = ConnectionFactory.createConnection(config);
|
||||
|
||||
@@ -41,7 +41,22 @@ public class IpTest {
|
||||
IPAddress pv43 = new IPAddressString("fc00::").getAddress();
|
||||
IPAddress pv44 = new IPAddressString("fc00::10:1").getAddress();
|
||||
|
||||
IPAddress pv45 = new IPAddressString("12.56.4.0/32").getAddress();
|
||||
IPAddress pv45 = new IPAddressString("192.168.42.1").getAddress();
|
||||
IPAddress pv46 = new IPAddressString("192.168.42.1/32").getAddress();
|
||||
IPAddress pv47 = new IPAddressString("12.56.4.0").getAddress();
|
||||
|
||||
IPAddress mask = pv45.getNetwork().getNetworkMask(24, false);
|
||||
|
||||
System.out.println(pv45.isMultiple());
|
||||
System.out.println(pv46.isMultiple());
|
||||
System.out.println(pv46.isPrefixed());
|
||||
System.out.println(pv47.isPrefixed());
|
||||
System.out.println(pv45+"---"+pv45.toMaxHost().withoutPrefixLength()+"---"+pv45.adjustPrefixLength(pv45.getBitCount()));
|
||||
System.out.println(pv45+"---mask:"+pv45.mask(mask).toString());
|
||||
|
||||
System.out.println(pv45.adjustPrefixLength(pv45.getBitCount())+"---"+pv45.toMaxHost().withoutPrefixLength());
|
||||
|
||||
/*
|
||||
System.out.println(str5.getUpper()+"---"+str5.getLower());
|
||||
|
||||
System.out.println(rangeMap.span().contains(pv4));
|
||||
@@ -50,6 +65,7 @@ public class IpTest {
|
||||
System.out.println(rangeMap.get(pv42));
|
||||
System.out.println(rangeMap.get(pv43));
|
||||
System.out.println(rangeMap.get(pv44));
|
||||
*/
|
||||
|
||||
/*
|
||||
System.out.println(str5.toSequentialRange());
|
||||
|
||||
101
src/test/java/com/zdjizhi/common/NacosTest.java
Normal file
101
src/test/java/com/zdjizhi/common/NacosTest.java
Normal file
@@ -0,0 +1,101 @@
|
||||
package com.zdjizhi.common;
|
||||
|
||||
import com.alibaba.nacos.api.NacosFactory;
|
||||
import com.alibaba.nacos.api.PropertyKeyConst;
|
||||
import com.alibaba.nacos.api.config.ConfigService;
|
||||
import com.alibaba.nacos.api.config.listener.Listener;
|
||||
import com.alibaba.nacos.api.exception.NacosException;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
import java.util.Properties;
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
|
||||
/**
|
||||
* @author qidaijie
|
||||
* @Package com.zdjizhi
|
||||
* @Description:
|
||||
* @date 2022/3/1016:58
|
||||
*/
|
||||
public class NacosTest {
|
||||
|
||||
/**
|
||||
* <dependency>
|
||||
* <groupId>com.alibaba.nacos</groupId>
|
||||
* <artifactId>nacos-client</artifactId>
|
||||
* <version>1.2.0</version>
|
||||
* </dependency>
|
||||
*/
|
||||
|
||||
private static Properties properties = new Properties();
|
||||
/**
|
||||
* config data id = config name
|
||||
*/
|
||||
private static final String DATA_ID = "dos_baseline.properties";
|
||||
/**
|
||||
* config group
|
||||
*/
|
||||
private static final String GROUP = "Galaxy";
|
||||
|
||||
private void getProperties() {
|
||||
properties.setProperty(PropertyKeyConst.SERVER_ADDR, "192.168.44.12:8848");
|
||||
properties.setProperty(PropertyKeyConst.NAMESPACE, "test");
|
||||
properties.setProperty(PropertyKeyConst.USERNAME, "nacos");
|
||||
properties.setProperty(PropertyKeyConst.PASSWORD, "nacos");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void GetConfigurationTest() {
|
||||
try {
|
||||
getProperties();
|
||||
ConfigService configService = NacosFactory.createConfigService(properties);
|
||||
String content = configService.getConfig(DATA_ID, GROUP, 5000);
|
||||
Properties nacosConfigMap = new Properties();
|
||||
nacosConfigMap.load(new StringReader(content));
|
||||
System.out.println(nacosConfigMap.getProperty("static.sensitivity.threshold"));
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void ListenerConfigurationTest() {
|
||||
getProperties();
|
||||
try {
|
||||
//first get config
|
||||
ConfigService configService = NacosFactory.createConfigService(properties);
|
||||
String config = configService.getConfig(DATA_ID, GROUP, 5000);
|
||||
// System.out.println(config);
|
||||
|
||||
//start listenner
|
||||
configService.addListener(DATA_ID, GROUP, new Listener() {
|
||||
@Override
|
||||
public Executor getExecutor() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void receiveConfigInfo(String configMsg) {
|
||||
System.out.println(configMsg);
|
||||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
//keep running,change nacos config,print new config
|
||||
/*
|
||||
while (true) {
|
||||
try {
|
||||
Thread.sleep(5000);
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
*/
|
||||
}
|
||||
}
|
||||
237
src/test/java/com/zdjizhi/etl/DosDetectionTest.java
Normal file
237
src/test/java/com/zdjizhi/etl/DosDetectionTest.java
Normal file
@@ -0,0 +1,237 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import com.geedgenetworks.utils.StringUtil;
|
||||
import com.zdjizhi.common.DosDetectionThreshold;
|
||||
import com.zdjizhi.common.DosEventLog;
|
||||
import com.zdjizhi.common.DosSketchLog;
|
||||
|
||||
import com.zdjizhi.utils.IpLookupUtils;
|
||||
import com.zdjizhi.utils.SnowflakeId;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.text.NumberFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
|
||||
public class DosDetectionTest {
|
||||
|
||||
private final static NumberFormat PERCENT_INSTANCE = NumberFormat.getPercentInstance();
|
||||
private final static int BASELINE_SIZE = 144;
|
||||
private final static int STATIC_CONDITION_TYPE = 1;
|
||||
private final static int BASELINE_CONDITION_TYPE = 2;
|
||||
private final static int SENSITIVITY_CONDITION_TYPE = 3;
|
||||
|
||||
private final static String SESSIONS_TAG = "sessions";
|
||||
private final static String PACKETS_TAG = "packets";
|
||||
private final static String BITS_TAG = "bits";
|
||||
@Test
|
||||
public void dosDetectionTest(){
|
||||
DosDetectionThreshold dosDetectionThreshold = new DosDetectionThreshold();
|
||||
ArrayList<String> serverIpList = new ArrayList<>();
|
||||
serverIpList.add("192.168.50.11");
|
||||
serverIpList.add("192.168.50.1/24");
|
||||
serverIpList.add("FC::12:0:0/54");
|
||||
serverIpList.add("FC::12:0:0");
|
||||
dosDetectionThreshold.setProfile_id(4437);
|
||||
dosDetectionThreshold.setAttack_type("DNS Flood");
|
||||
dosDetectionThreshold.setServer_ip_list(serverIpList);
|
||||
dosDetectionThreshold.setSessions_per_sec(1);
|
||||
dosDetectionThreshold.setPackets_per_sec(1);
|
||||
dosDetectionThreshold.setBits_per_sec(100000);
|
||||
dosDetectionThreshold.setIs_valid(1);
|
||||
dosDetectionThreshold.setSuperior_ids(new Integer[]{5,4,12,27});
|
||||
|
||||
|
||||
DosSketchLog dosSketchLog = new DosSketchLog ();
|
||||
|
||||
dosSketchLog.setSketch_sessions(68);
|
||||
dosSketchLog.setSketch_packets(68);
|
||||
dosSketchLog.setSketch_bytes(285820);//185.82
|
||||
dosSketchLog.setVsys_id(1);
|
||||
dosSketchLog.setAttack_type("ICMP Flood");
|
||||
dosSketchLog.setSource_ip("45.170.244.25");
|
||||
dosSketchLog.setDestination_ip("24.152.57.56");
|
||||
//静态阈值获取
|
||||
long sessionBase = dosDetectionThreshold.getSessions_per_sec();
|
||||
long pktBase=dosDetectionThreshold.getPackets_per_sec();
|
||||
long bitBase=dosDetectionThreshold.getBits_per_sec();
|
||||
//基于速率进行计算
|
||||
long diffSession = dosSketchLog.getSketch_sessions() - sessionBase;
|
||||
long diffPkt = dosSketchLog.getSketch_packets() - pktBase;
|
||||
long diffByte = dosSketchLog.getSketch_bytes() - bitBase;
|
||||
|
||||
|
||||
Double diffSessionPercent = getDiffPercent(diffSession, sessionBase)*100;
|
||||
Double diffPktPercent = getDiffPercent(diffPkt, pktBase)*100;
|
||||
Double diffBitPercent = getDiffPercent(diffByte, bitBase)*100;
|
||||
long profileId = 0;
|
||||
DosEventLog result =null;
|
||||
if (diffSessionPercent >= diffPktPercent && diffSessionPercent >= diffBitPercent){
|
||||
profileId = dosDetectionThreshold.getProfile_id();
|
||||
result= getDosEventLog(dosSketchLog, sessionBase, diffSession, profileId, STATIC_CONDITION_TYPE, SESSIONS_TAG);
|
||||
System.out.println(result);
|
||||
}else if (diffPktPercent >= diffSessionPercent && diffPktPercent >= diffBitPercent){
|
||||
profileId = dosDetectionThreshold.getProfile_id();
|
||||
result = getDosEventLog(dosSketchLog, pktBase, diffPkt,profileId, STATIC_CONDITION_TYPE, PACKETS_TAG);
|
||||
System.out.println(result);
|
||||
}else if (diffBitPercent >= diffPktPercent && diffBitPercent >= diffSessionPercent){
|
||||
profileId = dosDetectionThreshold.getProfile_id();
|
||||
result = getDosEventLog(dosSketchLog, bitBase, diffByte, profileId, STATIC_CONDITION_TYPE, BITS_TAG);
|
||||
System.out.println(result);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
private DosEventLog getDosEventLog(DosSketchLog value, long base, long diff, long profileId, int type, String tag) {
|
||||
DosEventLog result = null;
|
||||
String destinationIp = value.getDestination_ip();
|
||||
String attackType = value.getAttack_type();
|
||||
if (diff > 0 && base != 0) {
|
||||
double percent = getDiffPercent(diff, base);
|
||||
Severity severity = judgeSeverity(percent);
|
||||
Integer staticSensitivityThreshold = 100;
|
||||
if (severity != Severity.NORMAL) {
|
||||
if (type == BASELINE_CONDITION_TYPE && percent < 0.2) {
|
||||
// logger.debug("当前server IP:{},类型:{},基线值{}百分比{}未超过基线敏感阈值,日志详情\n{}", destinationIp, attackType, base, percent, value);
|
||||
}else if ((type == BASELINE_CONDITION_TYPE || type == SENSITIVITY_CONDITION_TYPE) && value.getSketch_sessions() < staticSensitivityThreshold){
|
||||
// logger.debug("当前server IP:{},类型:{},基线值{}百分比{}未超过静态敏感阈值,日志详情\n{}",destinationIp, attackType, base, percent, value);
|
||||
}else {
|
||||
result = getResult(value, base, profileId, severity, percent+1, type, tag);
|
||||
if (type == SENSITIVITY_CONDITION_TYPE){
|
||||
result.setSeverity(Severity.MAJOR.severity);
|
||||
}
|
||||
// logger.info("检测到当前server IP {} 存在 {} 异常,超出基线{} {}倍,基于{}:{}检测,日志详情\n {}", destinationIp,attackType,base,percent,type,tag,result);
|
||||
}
|
||||
}
|
||||
// else {
|
||||
// logger.debug("当前server IP:{} 未出现 {} 异常,日志详情 {}", destinationIp, attackType, value);
|
||||
// }
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private DosEventLog getResult(DosSketchLog value, long base, long profileId, Severity severity, double percent, int type, String tag) {
|
||||
DosEventLog dosEventLog = new DosEventLog();
|
||||
// dosEventLog.setLog_id(SnowflakeId.generateId());
|
||||
dosEventLog.setVsys_id(value.getVsys_id());
|
||||
dosEventLog.setStart_time(value.getSketch_start_time());
|
||||
dosEventLog.setEnd_time(value.getSketch_start_time() + value.getSketch_duration());
|
||||
dosEventLog.setProfile_id(profileId);
|
||||
dosEventLog.setAttack_type(value.getAttack_type());
|
||||
dosEventLog.setSeverity(severity.severity);
|
||||
// dosEventLog.setConditions(getConditions(PERCENT_INSTANCE.format(percent), base, value.getSketch_sessions(), type, tag));
|
||||
dosEventLog.setConditions(getConditions(percent, base, value.getSketch_sessions(), type, tag,dosEventLog));
|
||||
dosEventLog.setDestination_ip(value.getDestination_ip());
|
||||
// dosEventLog.setDestination_country(IpUtils.ipLookup.countryLookup(value.getDestination_ip()));
|
||||
String ipList = value.getSource_ip();
|
||||
dosEventLog.setSource_ip_list(ipList);
|
||||
dosEventLog.setSource_country_list(getSourceCountryList(ipList));
|
||||
dosEventLog.setSession_rate(value.getSketch_sessions());
|
||||
dosEventLog.setPacket_rate(value.getSketch_packets());
|
||||
dosEventLog.setBit_rate(value.getSketch_bytes());
|
||||
return dosEventLog;
|
||||
}
|
||||
|
||||
public String getConditions(double percent, long base, long sessions, int type, String tag,DosEventLog dosEventLog) {
|
||||
int condition =0;
|
||||
if ("Minor".equals(dosEventLog.getSeverity())){
|
||||
condition=50;
|
||||
}else if ("Warning".equals(dosEventLog.getSeverity())){
|
||||
condition=100;
|
||||
}else if ("Major".equals(dosEventLog.getSeverity())){
|
||||
condition=250;
|
||||
}else if ("Severe".equals(dosEventLog.getSeverity())){
|
||||
condition=500;
|
||||
}else if ("Critical".equals(dosEventLog.getSeverity())){
|
||||
condition =800;
|
||||
}
|
||||
switch (type) {
|
||||
case STATIC_CONDITION_TYPE:
|
||||
return "Rate > " +
|
||||
base + " " +
|
||||
tag + "/s" + "(>"+condition+"%)";
|
||||
case BASELINE_CONDITION_TYPE:
|
||||
return tag + " > " +
|
||||
PERCENT_INSTANCE.format(percent) + " of baseline";
|
||||
case SENSITIVITY_CONDITION_TYPE:
|
||||
return String.valueOf(sessions) + " " +
|
||||
tag + "/s Unusually high " +
|
||||
StringUtils.capitalize(tag);
|
||||
default:
|
||||
throw new IllegalArgumentException("Illegal Argument type:" + type + ", known types = [1,2,3]");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private String getSourceCountryList(String sourceIpList) {
|
||||
if (StringUtil.isNotBlank(sourceIpList)) {
|
||||
String countryList;
|
||||
try {
|
||||
String[] ipArr = sourceIpList.split(",");
|
||||
HashSet<String> countrySet = new HashSet<>();
|
||||
for (String ip : ipArr) {
|
||||
String country = IpLookupUtils.getCountryLookup(ip);
|
||||
if (StringUtil.isNotBlank(country)){
|
||||
countrySet.add(country);
|
||||
}
|
||||
}
|
||||
countryList = StringUtils.join(countrySet, ", ");
|
||||
return countryList;
|
||||
} catch (Exception e) {
|
||||
// logger.error("{} source IP lists 获取国家失败", sourceIpList, e);
|
||||
return StringUtil.EMPTY;
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Illegal Argument sourceIpList = null");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private Double getDiffPercent(long diff, long base) {
|
||||
return BigDecimal.valueOf((float) diff / base).setScale(4, BigDecimal.ROUND_HALF_UP).doubleValue();
|
||||
}
|
||||
|
||||
private Severity judgeSeverity(double diffPercent) {
|
||||
if (diffPercent >= 0.5 && diffPercent < 1) {
|
||||
return Severity.MINOR;
|
||||
} else if (diffPercent >= 1 && diffPercent < 2.5) {
|
||||
return Severity.WARNING;
|
||||
} else if (diffPercent >= 2.5 && diffPercent < 5) {
|
||||
return Severity.MAJOR;
|
||||
} else if (diffPercent >= 5 && diffPercent < 8) {
|
||||
return Severity.SEVERE;
|
||||
} else if (diffPercent >= 8) {
|
||||
return Severity.CRITICAL;
|
||||
} else {
|
||||
return Severity.NORMAL;
|
||||
}
|
||||
}
|
||||
|
||||
private enum Severity {
|
||||
/**
|
||||
* 判断严重程度枚举类型
|
||||
*/
|
||||
CRITICAL("Critical"),
|
||||
SEVERE("Severe"),
|
||||
MAJOR("Major"),
|
||||
WARNING("Warning"),
|
||||
MINOR("Minor"),
|
||||
NORMAL("Normal");
|
||||
|
||||
private final String severity;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.severity;
|
||||
}
|
||||
|
||||
Severity(String severity) {
|
||||
this.severity = severity;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
11
src/test/java/com/zdjizhi/etl/EtlProcessFunctionTest.java
Normal file
11
src/test/java/com/zdjizhi/etl/EtlProcessFunctionTest.java
Normal file
@@ -0,0 +1,11 @@
|
||||
package com.zdjizhi.etl;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
public class EtlProcessFunctionTest {
|
||||
|
||||
@Test
|
||||
public void EtlProcessFunctionTest(){
|
||||
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user