Initial commit 单线程成功,并行报错

This commit is contained in:
yinjiangyi
2021-08-01 17:28:31 +08:00
commit bff209ac5a
46 changed files with 3994 additions and 0 deletions

3
.idea/.gitignore generated vendored Normal file
View File

@@ -0,0 +1,3 @@
# Default ignored files
/shelf/
/workspace.xml

12
.idea/codeStyles/Project.xml generated Normal file
View File

@@ -0,0 +1,12 @@
<component name="ProjectCodeStyleConfiguration">
<code_scheme name="Project" version="173">
<option name="OTHER_INDENT_OPTIONS">
<value>
<option name="USE_TAB_CHARACTER" value="true" />
</value>
</option>
<ScalaCodeStyleSettings>
<option name="MULTILINE_STRING_CLOSING_QUOTES_ON_NEW_LINE" value="true" />
</ScalaCodeStyleSettings>
</code_scheme>
</component>

5
.idea/codeStyles/codeStyleConfig.xml generated Normal file
View File

@@ -0,0 +1,5 @@
<component name="ProjectCodeStyleConfiguration">
<state>
<option name="PREFERRED_PROJECT_CODE_STYLE" value="Default" />
</state>
</component>

13
.idea/compiler.xml generated Normal file
View File

@@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
<annotationProcessing>
<profile name="Maven default annotation processors profile" enabled="true">
<sourceOutputDir name="target/generated-sources/annotations" />
<sourceTestOutputDir name="target/generated-test-sources/test-annotations" />
<outputRelativeToContentRoot value="true" />
<module name="generate-baselines" />
</profile>
</annotationProcessing>
</component>
</project>

View File

@@ -0,0 +1,36 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="JavaDoc" enabled="true" level="WARNING" enabled_by_default="true">
<option name="TOP_LEVEL_CLASS_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="" />
</value>
</option>
<option name="INNER_CLASS_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="" />
</value>
</option>
<option name="METHOD_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="@return@param@throws or @exception" />
</value>
</option>
<option name="FIELD_OPTIONS">
<value>
<option name="ACCESS_JAVADOC_REQUIRED_FOR" value="none" />
<option name="REQUIRED_TAGS" value="" />
</value>
</option>
<option name="IGNORE_DEPRECATED" value="false" />
<option name="IGNORE_JAVADOC_PERIOD" value="true" />
<option name="IGNORE_DUPLICATED_THROWS" value="false" />
<option name="IGNORE_POINT_TO_ITSELF" value="false" />
<option name="myAdditionalJavadocTags" value="date" />
</inspection_tool>
</profile>
</component>

20
.idea/jarRepositories.xml generated Normal file
View File

@@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RemoteRepositoriesConfiguration">
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Central Repository" />
<option name="url" value="https://repo.maven.apache.org/maven2" />
</remote-repository>
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Maven Central repository" />
<option name="url" value="https://repo1.maven.org/maven2" />
</remote-repository>
<remote-repository>
<option name="id" value="jboss.community" />
<option name="name" value="JBoss Community repository" />
<option name="url" value="https://repository.jboss.org/nexus/content/repositories/public/" />
</remote-repository>
</component>
</project>

7
.idea/junitgenerator-prj-settings.xml generated Normal file
View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JUnitGeneratorProjectSettings">
<option name="outputFilePattern" value="${SOURCEPATH}/test/java/${PACKAGE}/${FILENAME}" />
<option name="selectedTemplateKey" value="JUnit 4" />
</component>
</project>

14
.idea/misc.xml generated Normal file
View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="MavenProjectsManager">
<option name="originalFiles">
<list>
<option value="$PROJECT_DIR$/pom.xml" />
</list>
</option>
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" project-jdk-name="1.8" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/out" />
</component>
</project>

2
generate-baselines.iml Normal file
View File

@@ -0,0 +1,2 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4" />

1220
logs/ddos_baselines.log Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

75
pom.xml Normal file
View File

@@ -0,0 +1,75 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>cn.mesalab</groupId>
<artifactId>generate-baselines</artifactId>
<version>1.0-SNAPSHOT</version>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>2.2.3</version>
</dependency>
<dependency>
<groupId>org.jfree</groupId>
<artifactId>jfreechart</artifactId>
<version>1.0.18</version>
</dependency>
<dependency>
<groupId>org.apache.calcite.avatica</groupId>
<artifactId>avatica-core</artifactId>
<version>1.15.0</version>
</dependency>
<dependency>
<groupId>com.typesafe</groupId>
<artifactId>config</artifactId>
<version>1.2.1</version>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>3.5.1</version>
</dependency>
<dependency>
<groupId>io.vavr</groupId>
<artifactId>vavr</artifactId>
<version>0.10.2</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>5.1.4.RELEASE</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.26</version>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,53 @@
package cn.mesalab.config;
import cn.mesalab.utils.ConfigUtils;
/**
* @author yjy
* @version 1.0
* @date 2021/7/24 10:23 上午
*/
public class ApplicationConfig {
public static final String DRUID_URL= ConfigUtils.getStringProperty("druid.url");
public static final String DRUID_DRIVER = ConfigUtils.getStringProperty("druid.driver");
public static final String DRUID_TABLE = ConfigUtils.getStringProperty("druid.table");
public static final Integer DRUID_TIME_LIMIT_TYPE = ConfigUtils.getIntProperty("read.druid.time.limit.type");
public static final Long READ_DRUID_MAX_TIME = ConfigUtils.getLongProperty("read.druid.max.time");
public static final Long READ_DRUID_MIN_TIME = ConfigUtils.getLongProperty("read.druid.min.time");
public static final Integer READ_HISTORICAL_DAYS = ConfigUtils.getIntProperty("read.historical.days");
public static final Integer HISTORICAL_GRAD = ConfigUtils.getIntProperty("historical.grad");
public static final String TIME_FORMAT = ConfigUtils.getStringProperty("time.format");
public static final String BASELINE_METRIC_TYPE = ConfigUtils.getStringProperty("baseline.metric.type");
public static final String DRUID_ATTACKTYPE_TCP_SYN_FLOOD = ConfigUtils.getStringProperty("druid.attacktype.tcpsynflood");
public static final String DRUID_ATTACKTYPE_UDP_FLOOD = ConfigUtils.getStringProperty("druid.attacktype.udpflood");
public static final String DRUID_ATTACKTYPE_ICMP_FLOOD = ConfigUtils.getStringProperty("druid.attacktype.icmpflood");
public static final String DRUID_ATTACKTYPE_DNS_AMPL = ConfigUtils.getStringProperty("druid.attacktype.dnsamplification");
public static final String DRUID_SERVERIP_COLUMN_NAME = ConfigUtils.getStringProperty("druid.serverip.columnname");
public static final String DRUID_ATTACKTYPE_COLUMN_NAME = ConfigUtils.getStringProperty("druid.attacktype.columnname");
public static final String DRUID_RECVTIME_COLUMN_NAME = ConfigUtils.getStringProperty("druid.recvtime.columnname");
public static final float BASELINE_PERIOD_CORR_THRE = ConfigUtils.getFloatProperty("baseline.period.correlative.threshold");
public static final float BASELINE_HISTORICAL_RATIO = ConfigUtils.getFloatProperty("baseline.historical.ratio.threshold");
public static final float BASELINE_SPARSE_FILL_PERCENTILE = ConfigUtils.getFloatProperty("baseline.historical.sparse.fill.percentile");
public static final String BASELINE_FUNCTION = ConfigUtils.getStringProperty("baseline.function");
public static final Integer BASELINE_RANGE_DAYS = ConfigUtils.getIntProperty("baseline.range.days");
public static final float BASELINE_RATIONAL_PERCENTILE = ConfigUtils.getFloatProperty("baseline.rational.percentile");
public static final String HBASE_TABLE = ConfigUtils.getStringProperty("hbase.table");
public static final String HBASE_ZOOKEEPER_QUORUM= ConfigUtils.getStringProperty("hbase.zookeeper.quorum");
public static final String HBASE_ZOOKEEPER_CLIENT_PORT= ConfigUtils.getStringProperty("hbase.zookeeper.client.port");
public static final Double BASELINE_KALMAN_Q = ConfigUtils.getDoubleProperty("baseline.kalman.q");
public static final Double BASELINE_KALMAN_R = ConfigUtils.getDoubleProperty("baseline.kalman.r");
public static final Integer LOG_WRITE_COUNT = ConfigUtils.getIntProperty("log.write.count");
public static final Integer GENERATE_BATCH_NUM= ConfigUtils.getIntProperty("generate.batch.number");
}

View File

@@ -0,0 +1,134 @@
package cn.mesalab.dao;
import cn.mesalab.config.ApplicationConfig;
import cn.mesalab.dao.Impl.ResultSetToListServiceImp;
import cn.mesalab.utils.DruidUtils;
import io.vavr.Tuple;
import io.vavr.Tuple2;
import org.apache.calcite.avatica.AvaticaConnection;
import org.apache.calcite.avatica.AvaticaStatement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
/**
* @author yjy
* @version 1.0
* @date 2021/7/23 4:56 下午
*/
public class DruidData {
private static final Logger LOG = LoggerFactory.getLogger(DruidData.class);
private static DruidData druidData;
private static DruidUtils druidUtils;
private String timeFilter = ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
+ " >= MILLIS_TO_TIMESTAMP(" + getTimeLimit()._2
+ ") AND " + ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
+ " < MILLIS_TO_TIMESTAMP(" + getTimeLimit()._1 + ")";
static {
druidUtils = DruidUtils.getInstance();
}
public static DruidData getInstance() {
if (druidData == null){
druidData = new DruidData();
}
return druidData;
}
public ArrayList<String> getServerIpList(String attackType) {
ArrayList<String> serverIPs = new ArrayList<String>();
String sql = "SELECT distinct " + ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME
+ " FROM " + ApplicationConfig.DRUID_TABLE
+ " WHERE " + ApplicationConfig.DRUID_ATTACKTYPE_COLUMN_NAME + " = '" + attackType + "'"
+ " AND " + timeFilter
+ " LIMIT 20"; // FOR TEST
try{
ResultSet resultSet = druidUtils.executeQuery(sql);
while(resultSet.next()){
String ip = resultSet.getString(ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME);
serverIPs.add(ip);
}
} catch (Exception e){
e.printStackTrace();
}
return serverIPs;
}
public ArrayList<String> getServerIpList(String attackType, String test) {
ArrayList<String> serverIPs = new ArrayList<String>();
serverIPs.add("153.99.250.54");
return serverIPs;
}
public List<Map<String, Object>> getTimeSeriesData(String ip, String attackType){
List<Map<String, Object>> rsList = null;
String sql = "SELECT "+ ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME
+ ", "+ ApplicationConfig.BASELINE_METRIC_TYPE
+ ", " + ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
+ " FROM " + ApplicationConfig.DRUID_TABLE
+ " WHERE " + ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME
+ " = '" + ip + "'"
+ " AND " + ApplicationConfig.DRUID_ATTACKTYPE_COLUMN_NAME
+ " = '" + attackType + "'"
+ " AND " + timeFilter;
System.out.println("getTimeSeriesData:" + sql);
try{
ResultSet resultSet = druidUtils.executeQuery(sql);
ResultSetToListService service = new ResultSetToListServiceImp();
rsList = service.selectAll(resultSet);
} catch (Exception e){
e.printStackTrace();
}
return rsList;
}
public Tuple2<Long, Long> getTimeLimit(){
long maxTime = 0L;
long minTime = 0L;
switch(ApplicationConfig.DRUID_TIME_LIMIT_TYPE){
case 0:
maxTime = getCurrentDay();
minTime = getCurrentDay(-ApplicationConfig.READ_HISTORICAL_DAYS);
break;
case 1:
maxTime = ApplicationConfig.READ_DRUID_MAX_TIME;
minTime = ApplicationConfig.READ_DRUID_MIN_TIME;
break;
default:
LOG.warn("没有设置Druid数据读取方式");
}
return Tuple.of(maxTime, minTime);
}
private long getCurrentDay(int bias) {
Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.DAY_OF_YEAR, calendar.get(Calendar.DAY_OF_YEAR) + bias);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
return calendar.getTimeInMillis();
}
private long getCurrentDay(){
return getCurrentDay(0);
}
public void closeConn(){
druidUtils.closeConnection();
}
}

View File

@@ -0,0 +1,44 @@
package cn.mesalab.dao.Impl;
import cn.mesalab.dao.ResultSetToListService;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author yjy
* @version 1.0
* @date 2021/7/24 4:29 下午
*/
public class ResultSetToListServiceImp implements ResultSetToListService {
/**
* SELECT 查询记录以List结构返回每一个元素是一条记录
* 每条记录保存在Map<String, Object>里面String类型指字段名字Object对应字段值
*
* @param rs
* @return List<Map<String, Object>>
*/
@Override
public List<Map<String, Object>> selectAll(ResultSet rs) {
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
try {
ResultSetMetaData rmd = rs.getMetaData();
int columnCount = rmd.getColumnCount();
while (rs.next()) {
Map<String, Object> rowData = new HashMap<String, Object>();
for (int i = 1; i <= columnCount; ++i) {
rowData.put(rmd.getColumnName(i), rs.getObject(i));
}
list.add(rowData);
}
} catch (Exception ex) {
ex.printStackTrace();
}
return list;
}
}

View File

@@ -0,0 +1,24 @@
package cn.mesalab.dao;
import java.sql.ResultSet;
import java.util.List;
import java.util.Map;
/**
* @author yjy
* @version 1.0
* @date 2021/7/24 4:27 下午
*/
public interface ResultSetToListService {
/**
* SELECT * FROM websites
* 查询所有记录以List返回
* list对象的每一个元素都是一条记录
* 每条记录保存在Map<String, Object>里面String类型指字段名字Object对应字段值
*
* @param rs
* @return List<Map < String, Object>>
*/
public List<Map<String, Object>> selectAll(ResultSet rs);
}

View File

@@ -0,0 +1,14 @@
package cn.mesalab.main;
import cn.mesalab.service.BaselineGeneration;
/**
* @author yjy
* @version 1.0
* @date 2021/7/23 5:34 下午
*/
public class BaselineApplication {
public static void main(String[] args) {
BaselineGeneration.perform();
}
}

View File

@@ -0,0 +1,174 @@
package cn.mesalab.service;
import cn.mesalab.config.ApplicationConfig;
import cn.mesalab.dao.DruidData;
import cn.mesalab.service.BaselineService.KalmanFilter;
import cn.mesalab.utils.HbaseUtils;
import cn.mesalab.utils.SeriesUtils;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.commons.math3.stat.StatUtils;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.*;
import java.util.stream.Collectors;
/**
* @author yjy
* @version 1.0
* @date 2021/7/23 5:38 下午
*/
public class BaselineGeneration {
private static final Logger LOG = LoggerFactory.getLogger(BaselineGeneration.class);
private static DruidData druidData;
private static HbaseUtils hbaseUtils;
private static Table hbaseTable;
private static final Integer BASELINE_POINT_NUM = ApplicationConfig.BASELINE_RANGE_DAYS * 24 * (60/ApplicationConfig.HISTORICAL_GRAD);
public static void perform() {
long start = System.currentTimeMillis();
druidData = DruidData.getInstance();
hbaseUtils = HbaseUtils.getInstance();
hbaseTable = hbaseUtils.getHbaseTable();
LOG.info("Druid 成功建立连接");
try{
generateBaselinesThread(ApplicationConfig.DRUID_ATTACKTYPE_TCP_SYN_FLOOD);
//generateBaselines(ApplicationConfig.DRUID_ATTACKTYPE_UDP_FLOOD);
//generateBaselines(ApplicationConfig.DRUID_ATTACKTYPE_ICMP_FLOOD);
//generateBaselines(ApplicationConfig.DRUID_ATTACKTYPE_DNS_AMPL);
long last = System.currentTimeMillis();
LOG.warn("运行时间:" + (last - start));
druidData.closeConn();
hbaseTable.close();
LOG.info("Druid 关闭连接");
} catch (Exception e){
e.printStackTrace();
}
System.exit(0);
}
private static void generateBaselinesThread(String attackType) throws InterruptedException {
int threadNum = Runtime.getRuntime().availableProcessors();
ThreadFactory namedThreadFactory = new ThreadFactoryBuilder()
.setNameFormat(attackType+"-baseline-demo-%d").build();
// 创建线程池
ThreadPoolExecutor executor = new ThreadPoolExecutor(
threadNum,
threadNum,
0L,
TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<>(1024),
namedThreadFactory,
new ThreadPoolExecutor.AbortPolicy());
// baseline 生成及写入
ArrayList<String> destinationIps = druidData.getServerIpList(attackType);
LOG.info("查询到服务端ip共 " +destinationIps.size() + "");
int batchCount = destinationIps.size() / ApplicationConfig.GENERATE_BATCH_NUM;
for (int batchCurrent = 0; batchCurrent <batchCount; batchCurrent++){
List<String> batchIps = destinationIps.subList(batchCurrent*ApplicationConfig.GENERATE_BATCH_NUM,
(batchCurrent+1)*ApplicationConfig.GENERATE_BATCH_NUM);
executor.execute(() -> generateBaselines(batchIps, attackType));
}
executor.shutdown();
executor.awaitTermination(10L, TimeUnit.SECONDS);
LOG.info("BaselineGeneration 完成:" + attackType);
LOG.info("BaselineGeneration 共写入数据条数:" + destinationIps.size());
}
static void generateBaselines(String attackType){
ArrayList<String> destinationIps = druidData.getServerIpList(attackType);
generateBaselines(destinationIps, attackType);
LOG.info("BaselineGeneration 完成:" + attackType);
LOG.info("BaselineGeneration 共写入数据条数:" + destinationIps.size());
}
public static void generateBaselines(List<String> ipList, String attackType){
List<Put> putList = new ArrayList<>();
for(String ip: ipList){
int[] ipBaseline = generateSingleIpBaseline(ip, attackType);
putList = hbaseUtils.cachedInPut(putList, ip, ipBaseline, attackType, ApplicationConfig.BASELINE_METRIC_TYPE);
}
try {
hbaseTable.put(putList);
LOG.info("HBase 写入数据条数 " + ApplicationConfig.GENERATE_BATCH_NUM);
} catch (IOException e) {
e.printStackTrace();
}
}
private static int[] generateSingleIpBaseline(String ip, String attackType){
// 查询
List<Map<String, Object>> originSeries = druidData.getTimeSeriesData(ip, attackType);
// 时间序列缺失值补0
System.out.println("当前线程id"+Thread.currentThread().getId());
System.out.println("origin 大小"+originSeries.size());
List<Map<String, Object>> completSeries = SeriesUtils.complementSeries(originSeries);
int[] baselineArr = new int[completSeries.size()];
List<Integer>series = completSeries.stream().map(
i -> Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList());
// 判断ip出现频率
if(originSeries.size()/(float)completSeries.size()>ApplicationConfig.BASELINE_HISTORICAL_RATIO){
// 低频率
double percentile = StatUtils.percentile(series.stream().mapToDouble(Double::valueOf).toArray(),
ApplicationConfig.BASELINE_SPARSE_FILL_PERCENTILE);
Arrays.fill(baselineArr, (int)percentile);
baselineArr = baselineFunction(series);
} else {
// 判断周期性
if (SeriesUtils.isPeriod(series)){
baselineArr = baselineFunction(series);
} else {
int ipPercentile = SeriesUtils.percentile(
originSeries.stream().map(i ->
Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList()),
ApplicationConfig.BASELINE_RATIONAL_PERCENTILE);
Arrays.fill(baselineArr, ipPercentile);
}
}
System.out.println(ip);
System.out.println(Arrays.toString(baselineArr));
return baselineArr;
}
private static int[] baselineFunction(List<Integer> timeSeries){
int[] result;
switch (ApplicationConfig.BASELINE_FUNCTION){
case "KalmanFilter":
KalmanFilter kalmanFilter = new KalmanFilter();
kalmanFilter.forcast(timeSeries, BASELINE_POINT_NUM);
result = kalmanFilter.getForecastSeries().stream().mapToInt(Integer::valueOf).toArray();
break;
default:
result = timeSeries.subList(0, BASELINE_POINT_NUM).stream().mapToInt(Integer::valueOf).toArray();
}
return result;
}
public static void main(String[] args) {
perform();
}
}

View File

@@ -0,0 +1,92 @@
package cn.mesalab.service.BaselineService;
import cn.mesalab.config.ApplicationConfig;
import java.util.ArrayList;
import java.util.List;
/**
* @author yjy
* @version 1.0
* @date 2021/7/25 1:42 下午
*/
public class KalmanFilter {
/**Kalman Filter*/
private Integer predict;
private Integer current;
private Integer estimate;
private double pdelt;
private double mdelt;
private double Gauss;
private double kalmanGain;
private final static double Q = ApplicationConfig.BASELINE_KALMAN_Q;
private final static double R = ApplicationConfig.BASELINE_KALMAN_R;
public KalmanFilter() {
initial();
}
public void initial(){
pdelt = 1;
mdelt = 1;
}
private ArrayList<Integer> smoothSeries;
private ArrayList<Integer> forecastSeries;
public Integer calSingleKalPoint(Integer oldValue, Integer value){
//第一个估计值
predict = oldValue;
current = value;
//高斯噪声方差
Gauss = Math.sqrt(pdelt * pdelt + mdelt * mdelt) + Q;
//估计方差
kalmanGain = Math.sqrt((Gauss * Gauss)/(Gauss * Gauss + pdelt * pdelt)) + R;
//估计值
estimate = (int) (kalmanGain * (current - predict) + predict);
//新的估计方差
mdelt = Math.sqrt((1-kalmanGain) * Gauss * Gauss);
return estimate;
}
public void forcast(List<Integer> historicalSeries, Integer length){
// 初始值计算
int oldvalue = (historicalSeries.stream().mapToInt(Integer::intValue).sum())/historicalSeries.size();
// 滤波
smoothSeries = new ArrayList<Integer>();
for(int i = 0; i < historicalSeries.size(); i++){
int value = historicalSeries.get(i);
oldvalue = calSingleKalPoint(oldvalue,value);
smoothSeries.add(oldvalue);
}
forecastSeries = new ArrayList<>();
Integer partitonNum = historicalSeries.size()/length;
for(int i = 0; i<length; i++){
long sum = 0;
for (int period=0; period<partitonNum; period++){
sum += smoothSeries.get(length*period+i);
}
forecastSeries.add((int)sum/partitonNum);
}
}
public ArrayList<Integer> getSmoothSeries() {
return smoothSeries;
}
public ArrayList<Integer> getAllRangeSeries() {
ArrayList<Integer> results = new ArrayList<>();
results.addAll(smoothSeries);
results.addAll(forecastSeries);
return results;
}
public ArrayList<Integer> getForecastSeries() {
return forecastSeries;
}
}

View File

@@ -0,0 +1,154 @@
package cn.mesalab.utils;
import com.google.common.collect.Lists;
import org.springframework.core.task.AsyncTaskExecutor;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
/**
* 〈一句话功能简述〉:
* 〈并发工具〉
*
* @create 2020-05-28
* @since 1.0.0
*/
public class ConcurrentUtils {
/**
* @Description
* 并发执行工具类
*
* @param batchProcessNum 并发执行的个数
* @param executor 执行需要的线程池
* @param keys 批量执行的key
* @param kvConvert 从key生成value
* @param valueProcessor 对value的后置处理
* @param exceptionHook 通过key获取value时发生异常自定义处理
* @return void
**/
public static <K, V> void concurrentProcess(int batchProcessNum, AsyncTaskExecutor executor, K[] keys,
Function<K, V> kvConvert,
ValueProcessor<K, V> valueProcessor,
ExceptionHook<K> exceptionHook ){
AtomicInteger index = new AtomicInteger(1);
List<Future> futureList = Lists.newArrayListWithExpectedSize(batchProcessNum);
final int length = keys.length;
System.out.println("total jobs size:{}" + length);
for (int i = 0; i < batchProcessNum; i++) {
int finalI = i;
futureList.add(executor.submit(() -> {
System.out.println("batch process start thread num:{}"+ finalI);
int currentIndex;
while (length >= (currentIndex = index.getAndIncrement())) {
System.out.println("current job index:"+currentIndex+" of "+ length);
K key = keys[currentIndex-1];
try {
valueProcessor.process(key, kvConvert.apply(key));
} catch (Exception e) {
exceptionHook.process(key, e);
}
}
System.out.println("batch process end thread num:{}"+finalI);
}));
}
waitFutureFinished(futureList);
}
public static <K, V> void concurrentProcess(int batchProcessNum, AsyncTaskExecutor executor, K[] keys,
Function<K, V> kvConvert,
ValueProcessor<K, V> valueProcessor) {
concurrentProcess(batchProcessNum, executor, keys, kvConvert, valueProcessor,
(key,e) -> System.out.println("通过key:"+ key +" 获取value异常, e:"+e));
}
/**
* @Description
* 并发执行工具类
*
* @param batchProcessNum 并发执行的个数
* @param executor 执行需要的线程池
* @param keys 批量执行的key
* @param kvConvert 从key生成value
* @return Collection<V> 返回value的集合
**/
public static <K, V> Collection<V> concurrentGet(int batchProcessNum, AsyncTaskExecutor executor, K[] keys,
Function<K, V> kvConvert) {
List<V> rt = Lists.newArrayListWithCapacity(keys.length);
concurrentProcess(batchProcessNum, executor, keys, kvConvert, (k, v) -> {
if (v == null) {
System.out.println("key:{} apply value is null");
return;
}
rt.add(v);
});
return rt;
}
public static void waitFutureFinished(List<Future> unfinishedFuture, boolean ignoreExcetion) {
boolean interrupt = false;
while (!unfinishedFuture.isEmpty()) {
Iterator<Future> iterator = unfinishedFuture.iterator();
while (iterator.hasNext()) {
Future next = iterator.next();
if (next.isDone()) {
try {
next.get();
} catch (InterruptedException | ExecutionException e) {
System.out.println("执行异常, e:"+e);
if (!ignoreExcetion) {
interrupt = true;
break;
}
}
iterator.remove();
}
}
if (interrupt) {
break;
}
sleep();
}
if (interrupt) {
Iterator<Future> iterator = unfinishedFuture.iterator();
while (iterator.hasNext()) {
Future next = iterator.next();
if (next.isDone()) {
next.cancel(true);
}
}
throw new RuntimeException("任务异常终止");
}
}
public static void waitFutureFinished(List<Future> unfinishedFuture) {
waitFutureFinished(unfinishedFuture, false);
}
public static void sleep() {
sleep(5000);
}
public static void sleep(long millis) {
try {
Thread.sleep(millis);
} catch (InterruptedException e) {
System.out.println("sleep error, e:" + e);
}
}
@FunctionalInterface
public interface ValueProcessor<K, V> {
void process(K key, V value);
}
@FunctionalInterface
public interface ExceptionHook<K>{
void process(K key, Exception e);
}
}

View File

@@ -0,0 +1,45 @@
package cn.mesalab.utils;
import org.apache.log4j.Logger;
import java.util.Properties;
public class ConfigUtils {
private static final Logger LOG = Logger.getLogger(ConfigUtils.class);
private static Properties propCommon = new Properties();
public static String getStringProperty(String key) {
return propCommon.getProperty(key);
}
public static Float getFloatProperty(String key) {
return Float.parseFloat(propCommon.getProperty(key));
}
public static Integer getIntProperty(String key) {
return Integer.parseInt(propCommon.getProperty(key));
}
public static Long getLongProperty(String key) {
return Long.parseLong(propCommon.getProperty(key));
}
public static Double getDoubleProperty(String key) {
return Double.parseDouble(propCommon.getProperty(key));
}
public static Boolean getBooleanProperty(String key) {
return "true".equals(propCommon.getProperty(key).toLowerCase().trim());
}
static {
try {
propCommon.load(ConfigUtils.class.getClassLoader().getResourceAsStream("application.properties"));
} catch (Exception e) {
propCommon = null;
LOG.error("配置加载失败");
}
}
}

View File

@@ -0,0 +1,77 @@
package cn.mesalab.utils;
import cn.mesalab.config.ApplicationConfig;
import cn.mesalab.service.BaselineGeneration;
import org.apache.calcite.avatica.AvaticaConnection;
import org.apache.calcite.avatica.AvaticaStatement;
import org.jfree.util.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Properties;
/**
* @author yjy
* @version 1.0
* @date 2021/7/23 4:50 下午
*/
public class DruidUtils {
private static final Logger LOG = LoggerFactory.getLogger(DruidUtils.class);
private static DruidUtils druidUtils;
private static final String DRUID_URL = ApplicationConfig.DRUID_URL;
private static AvaticaConnection connection;
private static AvaticaStatement statement;
public DruidUtils() throws SQLException {
Properties properties = new Properties();
connection = (AvaticaConnection) DriverManager.getConnection(DRUID_URL, properties);
statement = connection.createStatement();
}
public static DruidUtils getInstance() {
if (connection==null){
try{
druidUtils = new DruidUtils();
} catch (SQLException e){
LOG.error("Druid 建立连接失败!");
e.printStackTrace();
}
}
return druidUtils;
}
private static AvaticaConnection getConn() throws SQLException {
Properties properties = new Properties();
AvaticaConnection connection = (AvaticaConnection) DriverManager.getConnection(DRUID_URL, properties);
return connection;
}
public void closeConnection() {
if(connection != null){
try{
connection.close();
} catch (SQLException e){
LOG.error("Druid 关闭连接失败!");
e.printStackTrace();
}
}
}
public ResultSet executeQuery (String sql) throws SQLException{
System.out.println("executeQuery:"+sql);
ResultSet resultSet = statement.executeQuery(sql);
return resultSet;
}
public AvaticaConnection getConnection() {
return connection;
}
public AvaticaStatement getStatement() {
return statement;
}
}

View File

@@ -0,0 +1 @@
package cn.mesalab.utils;

View File

@@ -0,0 +1,214 @@
package cn.mesalab.utils;
import cn.mesalab.config.ApplicationConfig;
import cn.mesalab.dao.DruidData;
import cn.mesalab.service.BaselineGeneration;
import com.google.common.collect.Lists;
import org.jfree.util.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.FileReader;
import java.lang.reflect.Array;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.stream.Stream;
/**
* @author joy
*/
public class SeriesUtils {
private static final Logger LOG = LoggerFactory.getLogger(SeriesUtils.class);
private static DruidData druidData = new DruidData();
public static List<Map<String, Object>> readCsvToList(String filePath) {
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
String line;
try (BufferedReader br = new BufferedReader(new FileReader(filePath))) {
br.readLine();
while ((line = br.readLine()) != null) {
List<String> column = Arrays.asList(line.split(","));
// 保存记录中的每个<字段名-字段值>
Map<String, Object> rowData = new HashMap<String, Object>();
rowData.put("__time", column.get(0));
rowData.put(ApplicationConfig.BASELINE_METRIC_TYPE, Integer.valueOf(column.get(1)));
list.add(rowData);
}
} catch (Exception e) {
e.printStackTrace();
}
return list;
}
/**
* 时序数据补齐
*/
public static List<Map<String, Object>> complementSeries(List<Map<String, Object>> originSeries){
LocalDateTime startTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(druidData.getTimeLimit()._2), TimeZone
.getDefault().toZoneId());
LocalDateTime endTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(druidData.getTimeLimit()._1), TimeZone
.getDefault().toZoneId());
List<String> dateList = completionDate(startTime, endTime);
// 补全后的结果
List<Map<String, Object>> result = new ArrayList<>();
boolean dbDateExist = false;
for (String date : dateList) {
//table为数据库查询出来的对象列表结构为List<Map<String, Object>>
for (Map<String, Object> row : originSeries) {
if (row.get(ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME).toString().substring(0,19).equals(date)) {
//集合已包含该日期
dbDateExist = true;
result.add(row);
break;
}
}
//添加补全的数据到最后结果列表
if (!dbDateExist) {
Map<String, Object> temp = new HashMap<>(2);
temp.put(ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME, date);
temp.put(ApplicationConfig.BASELINE_METRIC_TYPE, 0);
result.add(temp);
}
dbDateExist = false;
}
return result;
}
private static List<String> completionDate(LocalDateTime startTime, LocalDateTime endTime) {
//日期格式化
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(ApplicationConfig.TIME_FORMAT);
List<String> timeList = new ArrayList<>();
//遍历给定的日期期间的每一天
for (int i = 0; !Duration.between(startTime.plusMinutes(i+1), endTime).isNegative(); i+= ApplicationConfig.HISTORICAL_GRAD) {
//添加日期
timeList.add(startTime.plusMinutes(i).format(formatter));
}
return timeList;
}
/**
* 判断是否存在以天为单位的周期特征
* @param historicalSeries
* @return
*/
public static Boolean isPeriod(List<Integer> historicalSeries){
Boolean result = true;
List<List<Integer>> partitions = Lists.partition(historicalSeries, 24*60/ApplicationConfig.HISTORICAL_GRAD);
List<Integer> aggregatedPart = Arrays.asList();
try{
aggregatedPart = columnAverage(partitions.subList(0, ApplicationConfig.READ_HISTORICAL_DAYS-1));
} catch (IndexOutOfBoundsException e){
Log.error("历史");
}
// Pearson corrcoef
double pearsonCorrelationScore = getPearsonCorrelationScore(aggregatedPart.stream().mapToInt(Integer::valueOf).toArray(),
partitions.get(partitions.size() - 1).stream().mapToInt(Integer::valueOf).toArray());
if (pearsonCorrelationScore < ApplicationConfig.BASELINE_PERIOD_CORR_THRE){
result=false;
}
return result;
}
public static double getPearsonCorrelationScore(int[] xData, int[] yData) {
if (xData.length != yData.length) {
Log.error("Pearson CorrelationScore 数组长度不相等!");
}
int xMeans;
int yMeans;
double numerator = 0;
double denominator = 0;
double result = 0;
// 拿到两个数据的平均值
xMeans = (int) getMeans(xData);
yMeans = (int) getMeans(yData);
// 计算皮尔逊系数的分子
numerator = generateNumerator(xData, xMeans, yData, yMeans);
// 计算皮尔逊系数的分母
denominator = generateDenomiator(xData, xMeans, yData, yMeans);
// 计算皮尔逊系数
if(denominator>0) {
result = numerator / denominator;
}
//FOR TEST
System.out.println(result);
return result;
}
private static int generateNumerator(int[] xData, int xMeans, int[] yData, int yMeans) {
int numerator = 0;
for (int i = 0; i < xData.length; i++) {
numerator += (xData[i] - xMeans) * (yData[i] - yMeans);
}
return numerator;
}
private static double generateDenomiator(int[] xData, int xMeans, int[] yData, int yMeans) {
double xSum = 0.0;
for (int i = 0; i < xData.length; i++) {
xSum += (xData[i] - xMeans) * (xData[i] - xMeans);
}
double ySum = 0.0;
for (int i = 0; i < yData.length; i++) {
ySum += (yData[i] - yMeans) * (yData[i] - yMeans);
}
return Math.sqrt(xSum) * Math.sqrt(ySum);
}
private static double getMeans(int[] datas) {
double sum = 0.0;
for (int i = 0; i < datas.length; i++) {
sum += datas[i];
}
return sum / datas.length;
}
public static List<Integer> columnAverage(List<List<Integer>> list){
ArrayList<Integer> averages = new ArrayList<>();
for(int i=0; i<list.get(0).size(); i++){
int columnSum = 0;
for(int j = 0; j< list.size(); j++){
columnSum += list.get(j).get(i);
}
averages.add(columnSum / list.size());
}
return averages;
}
public static int percentile(List<Integer> latencies, double percentile) {
Collections.sort(latencies);
int index = (int) Math.ceil(percentile * latencies.size());
return latencies.get(index-1);
}
public static void main(String[] args) {
List<Integer> test = Arrays.asList(
1,2,3,4,5,
1,2,3,4,5,
1,2,3,4,5,
1,2,3,4,5,
1,2,3,4,5,
1,2,3,4,5,
1,2,3,4,5);
System.out.println(columnAverage(Lists.partition(test, 5)));
}
}

View File

@@ -0,0 +1,60 @@
#Druid配置
druid.url=jdbc:avatica:remote:url=http://192.168.44.12:8082/druid/v2/sql/avatica/
druid.driver=org.apache.calcite.avatica.remote.Driver
druid.table=top_server_ip_test_log
#字段映射
druid.attacktype.tcpsynflood=sessions
#druid.attacktype.udpflood=IPv6_TCP
#druid.attacktype.icmpflood=IPv6_UDP
#druid.attacktype.dnsamplification=IPv4_UDP
druid.serverip.columnname=destination
druid.attacktype.columnname=order_by
druid.recvtime.columnname=__time
#baseline生成metric
baseline.metric.type=session_num
#HBase配置
hbase.table=ddos_traffic_baselines
hbase.zookeeper.quorum=192.168.44.12
hbase.zookeeper.client.port=2181
#读取druid时间范围方式0读取默认范围read.druid.time.range天数1指定时间范围
read.druid.time.limit.type=1
#07-01
read.druid.min.time=1625068800000
#06-01
#read.druid.min.time=1622476800000
read.druid.max.time=1625673600000
#读取过去N天数据最小值为3天需要判断周期性
read.historical.days=7
#历史数据汇聚粒度为10分钟
historical.grad=10
#baseline生成方法
baseline.function=KalmanFilter
#baseline时间1天
baseline.range.days=1
# 数据库Time格式
time.format=yyyy-MM-dd HH:mm:ss
#算法参数
baseline.period.correlative.threshold=0.5
baseline.historical.ratio.threshold=0.1
baseline.historical.sparse.fill.percentile=0.95
baseline.rational.percentile=0.95
#Kalman Filter
baseline.kalman.q=0.000001
baseline.kalman.r=0.002
# 每更新1000个记录打印log
log.write.count=10000
# FOR TEST
generate.batch.number=10

View File

@@ -0,0 +1,19 @@
######################### logger ##############################
log4j.logger.org.apache.http=OFF
log4j.logger.org.apache.http.wire=OFF
#Log4j
log4j.rootLogger=info,console,file
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.Threshold=info
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
log4j.appender.file.Threshold=info
log4j.appender.file.encoding=UTF-8
log4j.appender.file.Append=true
log4j.appender.file.file=./logs/ddos_baselines.log
log4j.appender.file.DatePattern='.'yyyy-MM-dd
log4j.appender.file.layout=org.apache.log4j.PatternLayout
log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] %X{ip} [Thread\:%t] %l %x - %m%n

View File

@@ -0,0 +1,61 @@
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.*;
/**
* @author yjy
* @version 1.0
* @date 2021/7/30 11:09 上午
*/
public class ThreadList {
public static void main(String[] args) throws InterruptedException, ExecutionException {
List<String> list = new ArrayList<>(); //造list数据
for (int i = 0; i < 5300; i++) {
list.add("" + i);
}
// 计算线程数
int threadSize = 500;//每500条数据开启一个线程
int remainder = list.size() % threadSize; //取余
int threadNum = 0; //线程数
if (remainder == 0) { //能整除500
threadNum = list.size() / threadSize;
} else { //不能整除线程数要加1
threadNum = list.size() / threadSize + 1;
}
ExecutorService eService = Executors.newFixedThreadPool(threadNum); //创建一个线程池
List<Callable<String>> cList = new ArrayList<>(); //定义添加线程的集合
Callable<String> task = null; //创建单个线程
List<String> sList = null;
for (int i = 0; i < threadNum; i++) { //每个线程中加入分段数据
if (i == threadNum - 1) {
sList = list.subList(i * threadSize, list.size());
} else {
sList = list.subList(i * threadSize, (i + 1) * threadSize);
}
final List<String> nowList = sList;
//创建单个线程
task = new Callable<String>() {
@Override
public String call() throws Exception {
StringBuffer sb = new StringBuffer();
for (int j = 0; j < nowList.size(); j++) {
sb.append("" + nowList.get(j));
}
return sb.toString();
}
};
cList.add(task); //添加线程
}
List<Future<String>> results = eService.invokeAll(cList); //执行所有创建的线程,并获取返回值(会把所有线程的返回值都返回)
for (Future<String> str : results) { //打印返回值
System.out.println(str.get());
}
eService.shutdown();
}
}

View File

@@ -0,0 +1,60 @@
#Druid配置
druid.url=jdbc:avatica:remote:url=http://192.168.44.12:8082/druid/v2/sql/avatica/
druid.driver=org.apache.calcite.avatica.remote.Driver
druid.table=top_server_ip_test_log
#字段映射
druid.attacktype.tcpsynflood=sessions
#druid.attacktype.udpflood=IPv6_TCP
#druid.attacktype.icmpflood=IPv6_UDP
#druid.attacktype.dnsamplification=IPv4_UDP
druid.serverip.columnname=destination
druid.attacktype.columnname=order_by
druid.recvtime.columnname=__time
#baseline生成metric
baseline.metric.type=session_num
#HBase配置
hbase.table=ddos_traffic_baselines
hbase.zookeeper.quorum=192.168.44.12
hbase.zookeeper.client.port=2181
#读取druid时间范围方式0读取默认范围read.druid.time.range天数1指定时间范围
read.druid.time.limit.type=1
#07-01
read.druid.min.time=1625068800000
#06-01
#read.druid.min.time=1622476800000
read.druid.max.time=1625673600000
#读取过去N天数据最小值为3天需要判断周期性
read.historical.days=7
#历史数据汇聚粒度为10分钟
historical.grad=10
#baseline生成方法
baseline.function=KalmanFilter
#baseline时间1天
baseline.range.days=1
# 数据库Time格式
time.format=yyyy-MM-dd HH:mm:ss
#算法参数
baseline.period.correlative.threshold=0.5
baseline.historical.ratio.threshold=0.1
baseline.historical.sparse.fill.percentile=0.95
baseline.rational.percentile=0.95
#Kalman Filter
baseline.kalman.q=0.000001
baseline.kalman.r=0.002
# 每更新1000个记录打印log
log.write.count=10000
# FOR TEST
generate.batch.number=10

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,19 @@
######################### logger ##############################
log4j.logger.org.apache.http=OFF
log4j.logger.org.apache.http.wire=OFF
#Log4j
log4j.rootLogger=info,console,file
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.Threshold=info
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
log4j.appender.file.Threshold=info
log4j.appender.file.encoding=UTF-8
log4j.appender.file.Append=true
log4j.appender.file.file=./logs/ddos_baselines.log
log4j.appender.file.DatePattern='.'yyyy-MM-dd
log4j.appender.file.layout=org.apache.log4j.PatternLayout
log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] %X{ip} [Thread\:%t] %l %x - %m%n

Binary file not shown.

Binary file not shown.