添加异常检测信息,README.md文件
修复maven打包携带META信息
This commit is contained in:
10
README.md
Normal file
10
README.md
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
# flink-dos-detection
|
||||||
|
## 简介
|
||||||
|
解析并聚合功能端发出的疑似DDoS攻击日志,基于静态阈值与基线判断是否为DoS攻击。
|
||||||
|
|
||||||
|
###输入:
|
||||||
|
DosSketchLog,功能端发出的TopN日志
|
||||||
|
###输出:
|
||||||
|
DosMetricsLog: 目标IP流量统计日志;
|
||||||
|
DosEventLog: Dos事件日志,即最终检查结果
|
||||||
|
|
||||||
30
pom.xml
30
pom.xml
@@ -80,6 +80,9 @@
|
|||||||
<artifact>*:*</artifact>
|
<artifact>*:*</artifact>
|
||||||
<excludes>
|
<excludes>
|
||||||
<exclude>META-INF</exclude>
|
<exclude>META-INF</exclude>
|
||||||
|
<exclude>META-INF/*.SF</exclude>
|
||||||
|
<exclude>META-INF/*.DSA</exclude>
|
||||||
|
<exclude>META-INF/*.RSA</exclude>
|
||||||
</excludes>
|
</excludes>
|
||||||
</filter>
|
</filter>
|
||||||
</filters>
|
</filters>
|
||||||
@@ -125,13 +128,13 @@
|
|||||||
<!--<scope>provided</scope>-->
|
<!--<scope>provided</scope>-->
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-table -->
|
<!--<!– https://mvnrepository.com/artifact/org.apache.flink/flink-table –>-->
|
||||||
<dependency>
|
<!--<dependency>-->
|
||||||
<groupId>org.apache.flink</groupId>
|
<!--<groupId>org.apache.flink</groupId>-->
|
||||||
<artifactId>flink-table</artifactId>
|
<!--<artifactId>flink-table</artifactId>-->
|
||||||
<version>${flink.version}</version>
|
<!--<version>${flink.version}</version>-->
|
||||||
<!--<scope>provided</scope>-->
|
<!--<!–<scope>provided</scope>–>-->
|
||||||
</dependency>
|
<!--</dependency>-->
|
||||||
|
|
||||||
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-json -->
|
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-json -->
|
||||||
<dependency>
|
<dependency>
|
||||||
@@ -187,16 +190,13 @@
|
|||||||
<artifactId>zookeeper</artifactId>
|
<artifactId>zookeeper</artifactId>
|
||||||
<groupId>org.apache.zookeeper</groupId>
|
<groupId>org.apache.zookeeper</groupId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<artifactId>jdk.tools</artifactId>
|
||||||
|
<groupId>jdk.tools</groupId>
|
||||||
|
</exclusion>
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<!--<dependency>-->
|
|
||||||
<!--<groupId>org.apache.flink</groupId>-->
|
|
||||||
<!--<artifactId>flink-table-api-java-bridge_2.11</artifactId>-->
|
|
||||||
<!--<version>${flink.version}</version>-->
|
|
||||||
<!--<!–<scope>provided</scope>–>-->
|
|
||||||
<!--</dependency>-->
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.zookeeper</groupId>
|
<groupId>org.apache.zookeeper</groupId>
|
||||||
<artifactId>zookeeper</artifactId>
|
<artifactId>zookeeper</artifactId>
|
||||||
@@ -245,8 +245,6 @@
|
|||||||
</exclusion>
|
</exclusion>
|
||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import com.zdjizhi.utils.CommonConfigurations;
|
|||||||
public class CommonConfig {
|
public class CommonConfig {
|
||||||
|
|
||||||
public static final int STREAM_EXECUTION_ENVIRONMENT_PARALLELISM = CommonConfigurations.getIntProperty("stream.execution.environment.parallelism");
|
public static final int STREAM_EXECUTION_ENVIRONMENT_PARALLELISM = CommonConfigurations.getIntProperty("stream.execution.environment.parallelism");
|
||||||
|
public static final String STREAM_EXECUTION_JOB_NAME = CommonConfigurations.getStringProperty("stream.execution.job.name");
|
||||||
|
|
||||||
public static final int KAFKA_INPUT_PARALLELISM = CommonConfigurations.getIntProperty("kafka.input.parallelism");
|
public static final int KAFKA_INPUT_PARALLELISM = CommonConfigurations.getIntProperty("kafka.input.parallelism");
|
||||||
public static final String KAFKA_INPUT_TOPIC_NAME = CommonConfigurations.getStringProperty("kafka.input.topic.name");
|
public static final String KAFKA_INPUT_TOPIC_NAME = CommonConfigurations.getStringProperty("kafka.input.topic.name");
|
||||||
@@ -22,12 +23,10 @@ public class CommonConfig {
|
|||||||
public static final String KAFKA_OUTPUT_BOOTSTRAP_SERVERS = CommonConfigurations.getStringProperty("kafka.output.bootstrap.servers");
|
public static final String KAFKA_OUTPUT_BOOTSTRAP_SERVERS = CommonConfigurations.getStringProperty("kafka.output.bootstrap.servers");
|
||||||
|
|
||||||
public static final String HBASE_ZOOKEEPER_QUORUM = CommonConfigurations.getStringProperty("hbase.zookeeper.quorum");
|
public static final String HBASE_ZOOKEEPER_QUORUM = CommonConfigurations.getStringProperty("hbase.zookeeper.quorum");
|
||||||
public static final String HBASE_ZOOKEEPER_CLIENT_PORT = CommonConfigurations.getStringProperty("hbase.zookeeper.client.port");
|
|
||||||
public static final int HBASE_CLIENT_OPERATION_TIMEOUT = CommonConfigurations.getIntProperty("hbase.client.operation.timeout");
|
public static final int HBASE_CLIENT_OPERATION_TIMEOUT = CommonConfigurations.getIntProperty("hbase.client.operation.timeout");
|
||||||
public static final int HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD = CommonConfigurations.getIntProperty("hbase.client.scanner.timeout.period");
|
public static final int HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD = CommonConfigurations.getIntProperty("hbase.client.scanner.timeout.period");
|
||||||
|
|
||||||
public static final String HBASE_BASELINE_TABLE_NAME = CommonConfigurations.getStringProperty("hbase.baseline.table.name");
|
public static final String HBASE_BASELINE_TABLE_NAME = CommonConfigurations.getStringProperty("hbase.baseline.table.name");
|
||||||
public static final String HBASE_BASELINE_FAMLIY_NAME = CommonConfigurations.getStringProperty("hbase.baseline.famliy.name");
|
|
||||||
public static final int HBASE_BASELINE_TOTAL_NUM = CommonConfigurations.getIntProperty("hbase.baseline.total.num");
|
public static final int HBASE_BASELINE_TOTAL_NUM = CommonConfigurations.getIntProperty("hbase.baseline.total.num");
|
||||||
|
|
||||||
public static final int FLINK_WATERMARK_MAX_ORDERNESS = CommonConfigurations.getIntProperty("flink.watermark.max.orderness");
|
public static final int FLINK_WATERMARK_MAX_ORDERNESS = CommonConfigurations.getIntProperty("flink.watermark.max.orderness");
|
||||||
|
|||||||
@@ -27,24 +27,25 @@ public class DosDetection extends BroadcastProcessFunction<DosSketchLog, Map<Str
|
|||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(DosDetection.class);
|
private static final Logger logger = LoggerFactory.getLogger(DosDetection.class);
|
||||||
|
|
||||||
private final static int BASELINESIZE = 144;
|
private final static int BASELINE_SIZE = 144;
|
||||||
|
|
||||||
private final static NumberFormat PERCENTINSTANCE = NumberFormat.getPercentInstance();
|
private final static NumberFormat PERCENT_INSTANCE = NumberFormat.getPercentInstance();
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void open(Configuration parameters) {
|
public void open(Configuration parameters) {
|
||||||
PERCENTINSTANCE.setMinimumFractionDigits(2);
|
PERCENT_INSTANCE.setMinimumFractionDigits(2);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void processElement(DosSketchLog value, ReadOnlyContext ctx, Collector<DosEventLog> out) throws Exception {
|
public void processElement(DosSketchLog value, ReadOnlyContext ctx, Collector<DosEventLog> out) throws Exception {
|
||||||
|
try {
|
||||||
Map<String, Map<String, List<Integer>>> broadcast = ctx.getBroadcastState(OutputStreamSink.descriptor).get("broadcast-state");
|
Map<String, Map<String, List<Integer>>> broadcast = ctx.getBroadcastState(OutputStreamSink.descriptor).get("broadcast-state");
|
||||||
String destinationIp = value.getDestination_ip();
|
String destinationIp = value.getDestination_ip();
|
||||||
String attackType = value.getAttack_type();
|
String attackType = value.getAttack_type();
|
||||||
logger.info("当前判断数据:{}",value.toString());
|
logger.info("当前判断IP:{}, 类型: {}",destinationIp,attackType);
|
||||||
if (broadcast.containsKey(destinationIp)){
|
if (broadcast.containsKey(destinationIp)){
|
||||||
List<Integer> baseline = broadcast.get(destinationIp).get(attackType);
|
List<Integer> baseline = broadcast.get(destinationIp).get(attackType);
|
||||||
if (baseline != null && baseline.size() == BASELINESIZE){
|
if (baseline != null && baseline.size() == BASELINE_SIZE){
|
||||||
int timeIndex = getCurrentTimeIndex(value.getSketch_start_time());
|
int timeIndex = getCurrentTimeIndex(value.getSketch_start_time());
|
||||||
Integer base = baseline.get(timeIndex);
|
Integer base = baseline.get(timeIndex);
|
||||||
long sketchSessions = value.getSketch_sessions();
|
long sketchSessions = value.getSketch_sessions();
|
||||||
@@ -55,15 +56,18 @@ public class DosDetection extends BroadcastProcessFunction<DosSketchLog, Map<Str
|
|||||||
Severity severity = judgeSeverity(diffPercentDouble);
|
Severity severity = judgeSeverity(diffPercentDouble);
|
||||||
if (severity != Severity.NORMAL){
|
if (severity != Severity.NORMAL){
|
||||||
DosEventLog result = getResult(value, severity, percent);
|
DosEventLog result = getResult(value, severity, percent);
|
||||||
logger.info("检测到当前server IP {} 存在异常,日志详情 {}",destinationIp,result.toString());
|
logger.info("检测到当前server IP {} 存在 {} 异常,日志详情\n {}",destinationIp,attackType,result.toString());
|
||||||
out.collect(result);
|
out.collect(result);
|
||||||
}else {
|
}else {
|
||||||
logger.info("当前server IP:{} 未出现异常,日志详情 {}",destinationIp,value.toString());
|
logger.info("当前server IP:{} 未出现 {} 异常,日志详情 {}",destinationIp,attackType,value.toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}else {
|
}else {
|
||||||
logger.info("未获取到当前server IP:{} baseline数据",destinationIp);
|
logger.info("未获取到当前server IP:{} 类型 {} baseline数据",destinationIp,attackType);
|
||||||
|
}
|
||||||
|
}catch (Exception e){
|
||||||
|
logger.error("判定失败\n {} \n{}",value,e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -82,9 +86,8 @@ public class DosDetection extends BroadcastProcessFunction<DosSketchLog, Map<Str
|
|||||||
// strings.add("153.146.241.196");
|
// strings.add("153.146.241.196");
|
||||||
// strings.add("132.46.241.21");
|
// strings.add("132.46.241.21");
|
||||||
// String join = StringUtils.join(strings, ",");
|
// String join = StringUtils.join(strings, ",");
|
||||||
System.out.println(dosDetection.getCurrentTimeIndex(1627378879));
|
System.out.println(IpUtils.ipLookup.countryLookup("192.168.50.150"));
|
||||||
|
|
||||||
System.out.println();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private DosEventLog getResult(DosSketchLog value,Severity severity,String percent){
|
private DosEventLog getResult(DosSketchLog value,Severity severity,String percent){
|
||||||
@@ -128,11 +131,11 @@ public class DosDetection extends BroadcastProcessFunction<DosSketchLog, Map<Str
|
|||||||
private String getDiffPercent(long diff,long sketchSessions){
|
private String getDiffPercent(long diff,long sketchSessions){
|
||||||
double diffDou = Double.parseDouble(Long.toString(diff));
|
double diffDou = Double.parseDouble(Long.toString(diff));
|
||||||
double sessDou = Double.parseDouble(Long.toString(sketchSessions));
|
double sessDou = Double.parseDouble(Long.toString(sketchSessions));
|
||||||
return PERCENTINSTANCE.format(diffDou / sessDou);
|
return PERCENT_INSTANCE.format(diffDou / sessDou);
|
||||||
}
|
}
|
||||||
|
|
||||||
private double getDiffPercentDouble(String diffPercent) throws ParseException {
|
private double getDiffPercentDouble(String diffPercent) throws ParseException {
|
||||||
return PERCENTINSTANCE.parse(diffPercent).doubleValue();
|
return PERCENT_INSTANCE.parse(diffPercent).doubleValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
private Severity judgeSeverity(double diffPercent){
|
private Severity judgeSeverity(double diffPercent){
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import com.zdjizhi.common.CommonConfig;
|
|||||||
import com.zdjizhi.common.DosSketchLog;
|
import com.zdjizhi.common.DosSketchLog;
|
||||||
import org.apache.commons.lang.StringUtils;
|
import org.apache.commons.lang.StringUtils;
|
||||||
import org.apache.flink.api.java.tuple.Tuple4;
|
import org.apache.flink.api.java.tuple.Tuple4;
|
||||||
|
import org.apache.flink.api.java.tuple.Tuple6;
|
||||||
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
|
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
|
||||||
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
|
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
|
||||||
import org.apache.flink.util.Collector;
|
import org.apache.flink.util.Collector;
|
||||||
@@ -28,7 +29,7 @@ public class EtlProcessFunction extends ProcessWindowFunction<DosSketchLog, DosS
|
|||||||
try {
|
try {
|
||||||
if (middleResult != null){
|
if (middleResult != null){
|
||||||
out.collect(middleResult);
|
out.collect(middleResult);
|
||||||
logger.debug("获取中间聚合结果:{}",middleResult.toString());
|
logger.info("获取中间聚合结果:{}",middleResult.toString());
|
||||||
context.output(outputTag,TrafficServerIpMetrics.getOutputMetric(keys, middleResult));
|
context.output(outputTag,TrafficServerIpMetrics.getOutputMetric(keys, middleResult));
|
||||||
}
|
}
|
||||||
}catch (Exception e){
|
}catch (Exception e){
|
||||||
@@ -39,13 +40,15 @@ public class EtlProcessFunction extends ProcessWindowFunction<DosSketchLog, DosS
|
|||||||
private DosSketchLog getMiddleResult(Tuple4<String,String, String, String> keys,Iterable<DosSketchLog> elements){
|
private DosSketchLog getMiddleResult(Tuple4<String,String, String, String> keys,Iterable<DosSketchLog> elements){
|
||||||
|
|
||||||
DosSketchLog midResuleLog = new DosSketchLog();
|
DosSketchLog midResuleLog = new DosSketchLog();
|
||||||
Tuple4<Long, Long, Long,String> values = sketchAggregate(elements);
|
Tuple6<Long, Long, Long,String,Long,Long> values = sketchAggregate(elements);
|
||||||
try {
|
try {
|
||||||
if (values != null){
|
if (values != null){
|
||||||
midResuleLog.setCommon_sled_ip(keys.f0);
|
midResuleLog.setCommon_sled_ip(keys.f0);
|
||||||
midResuleLog.setCommon_data_center(keys.f1);
|
midResuleLog.setCommon_data_center(keys.f1);
|
||||||
midResuleLog.setDestination_ip(keys.f3);
|
midResuleLog.setDestination_ip(keys.f3);
|
||||||
midResuleLog.setAttack_type(keys.f2);
|
midResuleLog.setAttack_type(keys.f2);
|
||||||
|
midResuleLog.setSketch_start_time(values.f4);
|
||||||
|
midResuleLog.setSketch_duration(values.f5);
|
||||||
midResuleLog.setSource_ip(values.f3);
|
midResuleLog.setSource_ip(values.f3);
|
||||||
midResuleLog.setSketch_sessions(values.f0);
|
midResuleLog.setSketch_sessions(values.f0);
|
||||||
midResuleLog.setSketch_packets(values.f1);
|
midResuleLog.setSketch_packets(values.f1);
|
||||||
@@ -58,24 +61,28 @@ public class EtlProcessFunction extends ProcessWindowFunction<DosSketchLog, DosS
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Tuple4<Long, Long, Long,String> sketchAggregate(Iterable<DosSketchLog> elements){
|
private Tuple6<Long, Long, Long,String,Long,Long> sketchAggregate(Iterable<DosSketchLog> elements){
|
||||||
int cnt = 1;
|
int cnt = 1;
|
||||||
long sessions = 0;
|
long sessions = 0;
|
||||||
long packets = 0 ;
|
long packets = 0 ;
|
||||||
long bytes = 0;
|
long bytes = 0;
|
||||||
|
long startTime = 0;
|
||||||
|
long duration = 0;
|
||||||
HashSet<String> sourceIpSet = new HashSet<>();
|
HashSet<String> sourceIpSet = new HashSet<>();
|
||||||
try {
|
try {
|
||||||
for (DosSketchLog newSketchLog : elements){
|
for (DosSketchLog newSketchLog : elements){
|
||||||
sessions += newSketchLog.getSketch_sessions();
|
sessions += newSketchLog.getSketch_sessions();
|
||||||
packets += newSketchLog.getSketch_packets();
|
packets += newSketchLog.getSketch_packets();
|
||||||
bytes += newSketchLog.getSketch_bytes();
|
bytes += newSketchLog.getSketch_bytes();
|
||||||
|
startTime = newSketchLog.getSketch_start_time();
|
||||||
|
duration = newSketchLog.getSketch_duration();
|
||||||
cnt += 1;
|
cnt += 1;
|
||||||
if (sourceIpSet.size() < CommonConfig.SOURCE_IP_LIST_LIMIT){
|
if (sourceIpSet.size() < CommonConfig.SOURCE_IP_LIST_LIMIT){
|
||||||
sourceIpSet.add(newSketchLog.getSource_ip());
|
sourceIpSet.add(newSketchLog.getSource_ip());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
String sourceIpList = StringUtils.join(sourceIpSet, ",");
|
String sourceIpList = StringUtils.join(sourceIpSet, ",");
|
||||||
return Tuple4.of(sessions/cnt,packets/cnt,bytes/cnt,sourceIpList);
|
return Tuple6.of(sessions/cnt,packets/cnt,bytes/cnt,sourceIpList,startTime,duration);
|
||||||
}catch (Exception e){
|
}catch (Exception e){
|
||||||
logger.error("聚合中间结果集失败 {}",e);
|
logger.error("聚合中间结果集失败 {}",e);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ class TrafficServerIpMetrics {
|
|||||||
dosMetricsLog.setSession_rate(midResuleLog.getSketch_sessions());
|
dosMetricsLog.setSession_rate(midResuleLog.getSketch_sessions());
|
||||||
dosMetricsLog.setPacket_rate(midResuleLog.getSketch_packets());
|
dosMetricsLog.setPacket_rate(midResuleLog.getSketch_packets());
|
||||||
dosMetricsLog.setBit_rate(midResuleLog.getSketch_bytes());
|
dosMetricsLog.setBit_rate(midResuleLog.getSketch_bytes());
|
||||||
logger.debug("metric 结果已加载:{}",dosMetricsLog.toString());
|
logger.info("metric 结果已加载:{}",dosMetricsLog.toString());
|
||||||
return dosMetricsLog;
|
return dosMetricsLog;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import com.zdjizhi.utils.JsonMapper;
|
|||||||
import com.zdjizhi.utils.KafkaUtils;
|
import com.zdjizhi.utils.KafkaUtils;
|
||||||
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
||||||
|
|
||||||
public class DosEventSink {
|
class DosEventSink {
|
||||||
|
|
||||||
static void dosEventOutputSink(SingleOutputStreamOperator<DosEventLog> dosEventLogOutputStream){
|
static void dosEventOutputSink(SingleOutputStreamOperator<DosEventLog> dosEventLogOutputStream){
|
||||||
dosEventLogOutputStream.map(JsonMapper::toJsonString).addSink(KafkaUtils.getKafkaSink(CommonConfig.KAFKA_OUTPUT_EVENT_TOPIC_NAME))
|
dosEventLogOutputStream.map(JsonMapper::toJsonString).addSink(KafkaUtils.getKafkaSink(CommonConfig.KAFKA_OUTPUT_EVENT_TOPIC_NAME))
|
||||||
|
|||||||
@@ -39,26 +39,35 @@ public class OutputStreamSink {
|
|||||||
new MapTypeInfo<>(String.class, new MapTypeInfo<>(String.class, (Class<List<Integer>>) (Class<?>) List.class).getTypeClass()));
|
new MapTypeInfo<>(String.class, new MapTypeInfo<>(String.class, (Class<List<Integer>>) (Class<?>) List.class).getTypeClass()));
|
||||||
|
|
||||||
public static void finalOutputSink(){
|
public static void finalOutputSink(){
|
||||||
DosEventSink.dosEventOutputSink(getOutputSinkStream());
|
try {
|
||||||
TrafficServerIpMetricsSink.sideOutputMetricsSink(getMiddleStream());
|
SingleOutputStreamOperator<DosSketchLog> middleStream = getMiddleStream();
|
||||||
|
SingleOutputStreamOperator<DosEventLog> dosEventLogOutputStream = getOutputSinkStream(middleStream);
|
||||||
|
DosEventSink.dosEventOutputSink(dosEventLogOutputStream);
|
||||||
|
TrafficServerIpMetricsSink.sideOutputMetricsSink(middleStream);
|
||||||
|
FlinkEnvironmentUtils.streamExeEnv.execute(CommonConfig.STREAM_EXECUTION_JOB_NAME);
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
SingleOutputStreamOperator<DosEventLog> dosEventLogOutputStream = getOutputSinkStream();
|
SingleOutputStreamOperator<DosSketchLog> middleStream = getMiddleStream();
|
||||||
|
SingleOutputStreamOperator<DosEventLog> dosEventLogOutputStream = getOutputSinkStream(middleStream);
|
||||||
DosEventSink.dosEventOutputSink(dosEventLogOutputStream);
|
DosEventSink.dosEventOutputSink(dosEventLogOutputStream);
|
||||||
TrafficServerIpMetricsSink.sideOutputMetricsSink(getMiddleStream());
|
TrafficServerIpMetricsSink.sideOutputMetricsSink(middleStream);
|
||||||
dosEventLogOutputStream.print();
|
dosEventLogOutputStream.print();
|
||||||
FlinkEnvironmentUtils.streamExeEnv.execute();
|
FlinkEnvironmentUtils.streamExeEnv.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static SingleOutputStreamOperator<DosEventLog> getOutputSinkStream(){
|
private static SingleOutputStreamOperator<DosEventLog> getOutputSinkStream(SingleOutputStreamOperator<DosSketchLog> middleStream){
|
||||||
|
|
||||||
BroadcastStream<Map<String, Map<String,List<Integer>>>> broadcast = FlinkEnvironmentUtils.streamExeEnv
|
BroadcastStream<Map<String, Map<String,List<Integer>>>> broadcast = FlinkEnvironmentUtils.streamExeEnv
|
||||||
.addSource(new BaselineSource())
|
.addSource(new BaselineSource())
|
||||||
.broadcast(descriptor);
|
.broadcast(descriptor);
|
||||||
logger.info("广播变量加载成功!!");
|
logger.info("广播变量加载成功!!");
|
||||||
|
|
||||||
return getMiddleStream().keyBy(new SecondKeySelector())
|
return middleStream.keyBy(new SecondKeySelector())
|
||||||
|
// .window(TumblingEventTimeWindows.of(Time.seconds(CommonConfig.FLINK_WINDOW_MAX_TIME)))
|
||||||
.reduce(new SecondReduceFunc())
|
.reduce(new SecondReduceFunc())
|
||||||
.connect(broadcast)
|
.connect(broadcast)
|
||||||
.process(new DosDetection());
|
.process(new DosDetection());
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
stream.execution.environment.parallelism=1
|
stream.execution.environment.parallelism=1
|
||||||
|
stream.execution.job.name=dos-detection-job
|
||||||
|
|
||||||
kafka.input.parallelism=1
|
kafka.input.parallelism=1
|
||||||
kafka.input.topic.name=DOS-SKETCH-LOG
|
kafka.input.topic.name=DOS-SKETCH-LOG
|
||||||
kafka.input.bootstrap.servers=192.168.44.12:9092
|
kafka.input.bootstrap.servers=192.168.44.12:9092
|
||||||
kafka.input.scan.startup.mode=latest-offset
|
kafka.input.scan.startup.mode=latest-offset
|
||||||
|
#kafka.input.group.id=2107291738
|
||||||
kafka.input.group.id=test
|
kafka.input.group.id=test
|
||||||
|
|
||||||
kafka.output.metric.parallelism=1
|
kafka.output.metric.parallelism=1
|
||||||
@@ -13,22 +15,21 @@ kafka.output.event.topic.name=DOS-EVENT-LOG
|
|||||||
kafka.output.bootstrap.servers=192.168.44.12:9092
|
kafka.output.bootstrap.servers=192.168.44.12:9092
|
||||||
|
|
||||||
hbase.zookeeper.quorum=192.168.44.12:2181
|
hbase.zookeeper.quorum=192.168.44.12:2181
|
||||||
hbase.zookeeper.client.port=2181
|
|
||||||
hbase.client.operation.timeout=30000
|
hbase.client.operation.timeout=30000
|
||||||
hbase.client.scanner.timeout.period=30000
|
hbase.client.scanner.timeout.period=30000
|
||||||
|
|
||||||
hbase.baseline.table.name=ddos_traffic_baselines
|
hbase.baseline.table.name=ddos_traffic_baselines
|
||||||
hbase.baseline.famliy.name=TCP SYN Flood
|
|
||||||
hbase.baseline.total.num=1000000
|
hbase.baseline.total.num=1000000
|
||||||
|
|
||||||
flink.watermark.max.orderness=1
|
flink.watermark.max.orderness=1
|
||||||
flink.window.max.time=600
|
flink.window.max.time=10
|
||||||
|
|
||||||
source.ip.list.limit=10000
|
source.ip.list.limit=10000
|
||||||
|
|
||||||
data.center.id.num=15
|
data.center.id.num=15
|
||||||
|
|
||||||
ip.mmdb.path=D:\\data\\dat\\
|
ip.mmdb.path=D:\\data\\dat\\
|
||||||
|
#ip.mmdb.path=/home/bigdata/topology/dat/
|
||||||
|
|
||||||
baseline.sessions.minor.threshold=0.1
|
baseline.sessions.minor.threshold=0.1
|
||||||
baseline.sessions.warning.threshold=0.5
|
baseline.sessions.warning.threshold=0.5
|
||||||
|
|||||||
Reference in New Issue
Block a user