diff --git a/src/main/java/cn/mesalab/dao/DruidData.java b/src/main/java/cn/mesalab/dao/DruidData.java index 94bdf6d..27d6dc4 100644 --- a/src/main/java/cn/mesalab/dao/DruidData.java +++ b/src/main/java/cn/mesalab/dao/DruidData.java @@ -29,6 +29,7 @@ public class DruidData { Map>> rsList = null; try{ ResultSet resultSet = DruidUtils.executeQuery(statement, sql); + LOG.info("完成数据读取,开始处理数据……"); rsList = selectAll(resultSet); } catch (Exception e){ e.printStackTrace(); diff --git a/src/main/java/cn/mesalab/service/BaselineSingleThread.java b/src/main/java/cn/mesalab/service/BaselineSingleThread.java index 1df95cd..3fd1698 100644 --- a/src/main/java/cn/mesalab/service/BaselineSingleThread.java +++ b/src/main/java/cn/mesalab/service/BaselineSingleThread.java @@ -85,12 +85,8 @@ public class BaselineSingleThread extends Thread { batchDruidData = new HashMap<>(); } - try { - LOG.info("完成数据读取:获取Server IP:" + batchDruidData.size() + - " 运行时间:" + (System.currentTimeMillis() - start)); - } catch (Exception e){ - e.printStackTrace(); - } + LOG.info("完成数据处理:获取Server IP:" + batchDruidData.size() + + " 运行时间:" + (System.currentTimeMillis() - start)); // 基线生成 @@ -136,6 +132,7 @@ public class BaselineSingleThread extends Thread { AvaticaConnection connection = DruidUtils.getConn(); AvaticaStatement stat = connection.createStatement(); String sql = DruidData.getBatchDruidQuerySql(attackTypeList, currentBatch, batchPartitionRange); + LOG.debug("Read Druid SQL: " + sql); readFromDruid = DruidData.readFromDruid(sql, stat); connection.close(); stat.close(); @@ -188,14 +185,17 @@ public class BaselineSingleThread extends Thread { baselineGenerationType = 1; double exceptionPercentile = SeriesUtils.percentile(series, ApplicationConfig.BASELINE_EXECEPTION_PERCENTILE); double exceptionFillPercentile = SeriesUtils.percentile(series, ApplicationConfig.BASELINE_EXCECPTION_FILL_PERCENTILE); + LOG.debug(ip + ": series-" + series); for(int i=0; i exceptionPercentile){ series.set(i, (int) exceptionFillPercentile); } } + LOG.debug(ip + ": execpSeries-" + series); // KF baselineArr = baselineFunction(series); // System.out.println("type-01:" + ipPercentile + " " + ip + " " + Arrays.toString(baselineArr)); + LOG.debug(ip + ": baseline-" + baselineArr); } else { // 判断周期性 if (SeriesUtils.isPeriod(series)){ diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 58ee25f..66d55db 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -18,10 +18,8 @@ hbase.zookeeper.client.port=2181 # 0:读取默认范围天数read.historical.days; # 1:指定时间范围 read.druid.time.limit.type=0 -#07-05 -read.druid.min.time=1625414400000 -#07-08 -read.druid.max.time=1625673600000 +read.druid.min.time=1630771200000 +read.druid.max.time=1631030400000 #Druid字段映射 druid.attacktype.tcpsynflood=TCP SYN Flood @@ -34,7 +32,7 @@ druid.columnname.recvtime=__time druid.columnname.partition.num=partition_num baseline.metric.type=session_rate #baseline存储级别,1-type1;2-type1及type2;3-all -baseline.save.level=1 +baseline.save.level=3 #Hbase字段映射 hbase.baseline.generation.type.suffix=baseline_type @@ -71,10 +69,11 @@ monitor.frequency.bin.num=100 ################ 并发参数 ################# ########################################## #druid.read.batch.time.grad.hour=4 -thread.pool.num=10 +thread.pool.num=500 #druid分区字段partition_num的最大值为9999 druid.partition.num.max=10000 -druid.connection.retry.time.max=10 +druid.connection.retry.time.max=10000 +#druid重连等待时间约为一个线程处理完成时间 druid.connection.retry.sleep.time=1000 diff --git a/src/main/resources/log4j.properties b/src/main/resources/log4j.properties index ac2c528..4973875 100644 --- a/src/main/resources/log4j.properties +++ b/src/main/resources/log4j.properties @@ -10,7 +10,7 @@ log4j.appender.console.layout=org.apache.log4j.PatternLayout log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n log4j.appender.file=org.apache.log4j.DailyRollingFileAppender -log4j.appender.file.Threshold=info +log4j.appender.file.Threshold=DEBUG log4j.appender.file.encoding=UTF-8 log4j.appender.file.Append=true log4j.appender.file.file=./logs/ddos_baselines.log