to get help
This commit is contained in:
17059
logs/ddos_baselines.log
17059
logs/ddos_baselines.log
File diff suppressed because one or more lines are too long
11510
logs/ddos_baselines.log.2021-08-02
Normal file
11510
logs/ddos_baselines.log.2021-08-02
Normal file
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -86,3 +86,30 @@
|
|||||||
[2021-08-02 18:23:01] [INFO ] [Thread:baseline-demo-3] cn.mesalab.service.BaselineGeneration.generateBaselines(BaselineGeneration.java:128) - Baseline 线程 18 成功写入Baseline条数共计 1
|
[2021-08-02 18:23:01] [INFO ] [Thread:baseline-demo-3] cn.mesalab.service.BaselineGeneration.generateBaselines(BaselineGeneration.java:128) - Baseline 线程 18 成功写入Baseline条数共计 1
|
||||||
[2021-08-02 18:23:01] [INFO ] [Thread:baseline-demo-0] cn.mesalab.service.BaselineGeneration.generateBaselines(BaselineGeneration.java:128) - Baseline 线程 15 成功写入Baseline条数共计 6
|
[2021-08-02 18:23:01] [INFO ] [Thread:baseline-demo-0] cn.mesalab.service.BaselineGeneration.generateBaselines(BaselineGeneration.java:128) - Baseline 线程 15 成功写入Baseline条数共计 6
|
||||||
[2021-08-02 18:23:02] [INFO ] [Thread:baseline-demo-1] cn.mesalab.service.BaselineGeneration.generateBaselines(BaselineGeneration.java:128) - Baseline 线程 16 成功写入Baseline条数共计 5
|
[2021-08-02 18:23:02] [INFO ] [Thread:baseline-demo-1] cn.mesalab.service.BaselineGeneration.generateBaselines(BaselineGeneration.java:128) - Baseline 线程 16 成功写入Baseline条数共计 5
|
||||||
|
[2021-08-02 20:32:57] [WARN ] [Thread:main] org.apache.hadoop.util.NativeCodeLoader.<clinit>(NativeCodeLoader.java:62) - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:zookeeper.version=3.4.10-39d3a4f269333c922ed3db283be479f9deacaa0f, built on 03/23/2017 10:13 GMT
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:host.name=localhost
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:java.version=1.8.0_271
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:java.vendor=Oracle Corporation
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:java.home=/Library/Java/JavaVirtualMachines/jdk1.8.0_271.jdk/Contents/Home/jre
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:java.class.path=generate-baselines.jar
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:java.library.path=/Users/joy/Library/Java/Extensions:/Library/Java/Extensions:/Network/Library/Java/Extensions:/System/Library/Java/Extensions:/usr/lib/java:.
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:java.io.tmpdir=/var/folders/y_/8fnngp3d0v96m1nvj7m_v9gm0000gn/T/
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:java.compiler=<NA>
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:os.name=Mac OS X
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:os.arch=x86_64
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:os.version=10.16
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:user.name=joy
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:user.home=/Users/joy
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.Environment.logEnv(Environment.java:100) - Client environment:user.dir=/Users/joy/work/iie/project/cyber_narrator/DDos/code/generate-baselines/out/artifacts/generate_baselines_jar
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62] org.apache.zookeeper.ZooKeeper.<init>(ZooKeeper.java:438) - Initiating client connection, connectString=192.168.44.12:2181 sessionTimeout=90000 watcher=org.apache.hadoop.hbase.zookeeper.ReadOnlyZKClient$$Lambda$13/106865101@475140b4
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62-SendThread(192.168.44.12:2181)] org.apache.zookeeper.ClientCnxn$SendThread.logStartConnect(ClientCnxn.java:1032) - Opening socket connection to server 192.168.44.12/192.168.44.12:2181. Will not attempt to authenticate using SASL (unknown error)
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62-SendThread(192.168.44.12:2181)] org.apache.zookeeper.ClientCnxn$SendThread.primeConnection(ClientCnxn.java:876) - Socket connection established to 192.168.44.12/192.168.44.12:2181, initiating session
|
||||||
|
[2021-08-02 20:32:57] [INFO ] [Thread:ReadOnlyZKClient-192.168.44.12:2181@0x102cec62-SendThread(192.168.44.12:2181)] org.apache.zookeeper.ClientCnxn$SendThread.onConnected(ClientCnxn.java:1299) - Session establishment complete on server 192.168.44.12/192.168.44.12:2181, sessionid = 0x17aaebc65074806, negotiated timeout = 90000
|
||||||
|
[2021-08-02 20:32:58] [INFO ] [Thread:main] cn.mesalab.service.BaselineGeneration.perform(BaselineGeneration.java:52) - Druid 成功建立连接
|
||||||
|
[2021-08-02 20:32:58] [INFO ] [Thread:main] cn.mesalab.dao.DruidData.getServerIpList(DruidData.java:76) - 性能测试:ip list查询耗时——160
|
||||||
|
[2021-08-02 20:32:58] [INFO ] [Thread:main] cn.mesalab.service.BaselineGeneration.generateBaselinesThread(BaselineGeneration.java:94) - 共查询到服务端ip 1000 个
|
||||||
|
[2021-08-02 20:32:58] [INFO ] [Thread:main] cn.mesalab.service.BaselineGeneration.generateBaselinesThread(BaselineGeneration.java:95) - Baseline batch 大小: 1000
|
||||||
|
[2021-08-02 20:33:10] [INFO ] [Thread:baseline-demo-0] cn.mesalab.service.BaselineGeneration.generateBaselines(BaselineGeneration.java:132) - Baseline 线程 15 成功写入Baseline条数共计 2602
|
||||||
|
[2021-08-02 20:33:10] [WARN ] [Thread:main] cn.mesalab.service.BaselineGeneration.perform(BaselineGeneration.java:58) - 运行时间:14432
|
||||||
|
[2021-08-02 20:33:10] [INFO ] [Thread:main] cn.mesalab.service.BaselineGeneration.perform(BaselineGeneration.java:62) - Druid 关闭连接
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import cn.mesalab.utils.DruidUtils;
|
|||||||
import io.vavr.Tuple;
|
import io.vavr.Tuple;
|
||||||
import io.vavr.Tuple2;
|
import io.vavr.Tuple2;
|
||||||
import org.apache.calcite.avatica.AvaticaConnection;
|
import org.apache.calcite.avatica.AvaticaConnection;
|
||||||
|
import org.apache.calcite.avatica.AvaticaStatement;
|
||||||
import org.apache.commons.lang.StringUtils;
|
import org.apache.commons.lang.StringUtils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
@@ -22,69 +23,79 @@ import java.util.stream.Collectors;
|
|||||||
/**
|
/**
|
||||||
* @author yjy
|
* @author yjy
|
||||||
* @version 1.0
|
* @version 1.0
|
||||||
|
* Druid 数据库操作
|
||||||
* @date 2021/7/23 4:56 下午
|
* @date 2021/7/23 4:56 下午
|
||||||
*/
|
*/
|
||||||
public class DruidData {
|
public class DruidData {
|
||||||
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(DruidData.class);
|
private static final Logger LOG = LoggerFactory.getLogger(DruidData.class);
|
||||||
private static DruidData druidData;
|
private static DruidData druidData;
|
||||||
|
|
||||||
private AvaticaConnection connection;
|
private AvaticaConnection connection;
|
||||||
|
private AvaticaStatement statement;
|
||||||
{
|
|
||||||
try {
|
|
||||||
connection = DruidUtils.getConn();
|
|
||||||
} catch (SQLException exception) {
|
|
||||||
exception.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private String timeFilter = ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
|
private String timeFilter = ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
|
||||||
+ " >= MILLIS_TO_TIMESTAMP(" + getTimeLimit()._2
|
+ " >= MILLIS_TO_TIMESTAMP(" + getTimeLimit()._2
|
||||||
+ ") AND " + ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
|
+ ") AND " + ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
|
||||||
+ " < MILLIS_TO_TIMESTAMP(" + getTimeLimit()._1 + ")";
|
+ " < MILLIS_TO_TIMESTAMP(" + getTimeLimit()._1 + ")";
|
||||||
|
|
||||||
|
|
||||||
|
{
|
||||||
|
connectionInit();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 连接初始化
|
||||||
|
*/
|
||||||
|
private void connectionInit(){
|
||||||
|
try {
|
||||||
|
connection = DruidUtils.getConn();
|
||||||
|
statement = connection.createStatement();
|
||||||
|
statement.setQueryTimeout(0);
|
||||||
|
|
||||||
|
} catch (SQLException exception) {
|
||||||
|
exception.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取实例
|
||||||
|
* @return DruidData实例
|
||||||
|
*/
|
||||||
public static DruidData getInstance() {
|
public static DruidData getInstance() {
|
||||||
druidData = new DruidData();
|
druidData = new DruidData();
|
||||||
return druidData;
|
return druidData;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 获取distinct server ip
|
||||||
|
* @return ArrayList<String> ip列表
|
||||||
|
*/
|
||||||
public ArrayList<String> getServerIpList() {
|
public ArrayList<String> getServerIpList() {
|
||||||
Long startQueryIPLIstTime = System.currentTimeMillis();
|
Long startQueryIpLIstTime = System.currentTimeMillis();
|
||||||
ArrayList<String> serverIPs = new ArrayList<String>();
|
ArrayList<String> serverIps = new ArrayList<String>();
|
||||||
String sql = "SELECT distinct " + ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME
|
String sql = "SELECT distinct " + ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME
|
||||||
+ " FROM " + ApplicationConfig.DRUID_TABLE
|
+ " FROM " + ApplicationConfig.DRUID_TABLE
|
||||||
+ " WHERE " + timeFilter;// FOR TEST
|
+ " WHERE " + timeFilter
|
||||||
|
+ " LIMIT 10";// FOR TEST
|
||||||
try{
|
try{
|
||||||
ResultSet resultSet = DruidUtils.executeQuery(connection,sql);
|
ResultSet resultSet = DruidUtils.executeQuery(statement,sql);
|
||||||
while(resultSet.next()){
|
while(resultSet.next()){
|
||||||
String ip = resultSet.getString(ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME);
|
String ip = resultSet.getString(ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME);
|
||||||
serverIPs.add(ip);
|
serverIps.add(ip);
|
||||||
}
|
}
|
||||||
} catch (Exception e){
|
} catch (Exception e){
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
Long endQueryIPListTime = System.currentTimeMillis();
|
Long endQueryIpListTime = System.currentTimeMillis();
|
||||||
LOG.info("性能测试:ip list查询耗时——"+(endQueryIPListTime-startQueryIPLIstTime));
|
LOG.info("性能测试:ip list查询耗时——"+(endQueryIpListTime-startQueryIpLIstTime));
|
||||||
|
|
||||||
return serverIPs;
|
return serverIps;
|
||||||
}
|
|
||||||
|
|
||||||
public List<Map<String, Object>> getTimeSeriesData(List<Map<String, Object>> allData, String ip, String attackType){
|
|
||||||
List<Map<String, Object>> rsList = new ArrayList<>();
|
|
||||||
try{
|
|
||||||
rsList = allData.stream().
|
|
||||||
filter(i->((i.get(ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME).equals(ip))
|
|
||||||
)&&(i.get(ApplicationConfig.DRUID_ATTACKTYPE_COLUMN_NAME).equals(attackType)))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
} catch (NullPointerException e){
|
|
||||||
}
|
|
||||||
return rsList;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 从Druid读取目标IP相关数据
|
||||||
|
* @param ipList ip列表
|
||||||
|
* @return 数据库读取结果
|
||||||
|
*/
|
||||||
public List<Map<String, Object>> readFromDruid(List<String> ipList){
|
public List<Map<String, Object>> readFromDruid(List<String> ipList){
|
||||||
List<Map<String, Object>> rsList = null;
|
List<Map<String, Object>> rsList = null;
|
||||||
ipList = ipList.stream().map( ip -> "\'"+ip+"\'").collect(Collectors.toList());
|
ipList = ipList.stream().map( ip -> "\'"+ip+"\'").collect(Collectors.toList());
|
||||||
@@ -98,7 +109,7 @@ public class DruidData {
|
|||||||
+ " IN " + ipString
|
+ " IN " + ipString
|
||||||
+ " AND " + timeFilter;
|
+ " AND " + timeFilter;
|
||||||
try{
|
try{
|
||||||
ResultSet resultSet = DruidUtils.executeQuery(connection,sql);
|
ResultSet resultSet = DruidUtils.executeQuery(statement, sql);
|
||||||
ResultSetToListService service = new ResultSetToListServiceImp();
|
ResultSetToListService service = new ResultSetToListServiceImp();
|
||||||
rsList = service.selectAll(resultSet);
|
rsList = service.selectAll(resultSet);
|
||||||
} catch (Exception e){
|
} catch (Exception e){
|
||||||
@@ -107,6 +118,29 @@ public class DruidData {
|
|||||||
return rsList;
|
return rsList;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 从数据库读取结果中筛选指定ip的指定攻击类型的数据
|
||||||
|
* @param allData 数据库读取结果
|
||||||
|
* @param ip 指定ip
|
||||||
|
* @param attackType 指定攻击类型
|
||||||
|
* @return 筛选结果
|
||||||
|
*/
|
||||||
|
public List<Map<String, Object>> getTimeSeriesData(List<Map<String, Object>> allData, String ip, String attackType){
|
||||||
|
List<Map<String, Object>> rsList = new ArrayList<>();
|
||||||
|
try{
|
||||||
|
rsList = allData.stream().
|
||||||
|
filter(i->((i.get(ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME).equals(ip))
|
||||||
|
)&&(i.get(ApplicationConfig.DRUID_ATTACKTYPE_COLUMN_NAME).equals(attackType)))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
} catch (NullPointerException e){
|
||||||
|
}
|
||||||
|
return rsList;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 计算查询时间范围,可指定时间范围(测试)或使用默认配置
|
||||||
|
* @return 时间范围起始点和终止点
|
||||||
|
*/
|
||||||
public Tuple2<Long, Long> getTimeLimit(){
|
public Tuple2<Long, Long> getTimeLimit(){
|
||||||
long maxTime = 0L;
|
long maxTime = 0L;
|
||||||
long minTime = 0L;
|
long minTime = 0L;
|
||||||
@@ -140,6 +174,9 @@ public class DruidData {
|
|||||||
return getCurrentDay(0);
|
return getCurrentDay(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 关闭当前DruidData
|
||||||
|
*/
|
||||||
public void closeConn(){
|
public void closeConn(){
|
||||||
try {
|
try {
|
||||||
DruidUtils.closeConnection();
|
DruidUtils.closeConnection();
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package cn.mesalab.main;
|
package cn.mesalab.main;
|
||||||
|
|
||||||
import cn.mesalab.service.BaselineGeneration;
|
import cn.mesalab.service.BaselineGeneration;
|
||||||
|
import sun.rmi.runtime.Log;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author yjy
|
* @author yjy
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ package cn.mesalab.service;
|
|||||||
|
|
||||||
import cn.mesalab.config.ApplicationConfig;
|
import cn.mesalab.config.ApplicationConfig;
|
||||||
import cn.mesalab.dao.DruidData;
|
import cn.mesalab.dao.DruidData;
|
||||||
import cn.mesalab.service.BaselineService.KalmanFilter;
|
import cn.mesalab.service.algorithm.KalmanFilter;
|
||||||
import cn.mesalab.utils.HbaseUtils;
|
import cn.mesalab.utils.HbaseUtils;
|
||||||
import cn.mesalab.utils.SeriesUtils;
|
import cn.mesalab.utils.SeriesUtils;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
@@ -21,6 +21,7 @@ import java.util.stream.Collectors;
|
|||||||
/**
|
/**
|
||||||
* @author yjy
|
* @author yjy
|
||||||
* @version 1.0
|
* @version 1.0
|
||||||
|
* baseline生成及写入
|
||||||
* @date 2021/7/23 5:38 下午
|
* @date 2021/7/23 5:38 下午
|
||||||
*/
|
*/
|
||||||
public class BaselineGeneration {
|
public class BaselineGeneration {
|
||||||
@@ -37,18 +38,22 @@ public class BaselineGeneration {
|
|||||||
ApplicationConfig.DRUID_ATTACKTYPE_UDP_FLOOD,
|
ApplicationConfig.DRUID_ATTACKTYPE_UDP_FLOOD,
|
||||||
ApplicationConfig.DRUID_ATTACKTYPE_DNS_AMPL
|
ApplicationConfig.DRUID_ATTACKTYPE_DNS_AMPL
|
||||||
);
|
);
|
||||||
private static final Integer BASELINE_POINT_NUM = ApplicationConfig.BASELINE_RANGE_DAYS * 24 * (60/ApplicationConfig.HISTORICAL_GRAD);
|
private static final Integer BASELINE_POINT_NUM =
|
||||||
|
ApplicationConfig.BASELINE_RANGE_DAYS * 24 * (60/ApplicationConfig.HISTORICAL_GRAD);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 程序执行
|
||||||
|
*/
|
||||||
public static void perform() {
|
public static void perform() {
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
|
|
||||||
druidData = DruidData.getInstance();
|
druidData = DruidData.getInstance();
|
||||||
hbaseUtils = HbaseUtils.getInstance();
|
hbaseUtils = HbaseUtils.getInstance();
|
||||||
hbaseTable = hbaseUtils.getHbaseTable();
|
hbaseTable = hbaseUtils.getHbaseTable();
|
||||||
|
|
||||||
LOG.info("Druid 成功建立连接");
|
LOG.info("Druid 成功建立连接");
|
||||||
|
|
||||||
try{
|
try{
|
||||||
|
// baseline生成并写入
|
||||||
generateBaselinesThread();
|
generateBaselinesThread();
|
||||||
|
|
||||||
long last = System.currentTimeMillis();
|
long last = System.currentTimeMillis();
|
||||||
@@ -64,10 +69,12 @@ public class BaselineGeneration {
|
|||||||
System.exit(0);
|
System.exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 多线程baseline生成入口
|
||||||
|
* @throws InterruptedException
|
||||||
|
*/
|
||||||
private static void generateBaselinesThread() throws InterruptedException {
|
private static void generateBaselinesThread() throws InterruptedException {
|
||||||
int threadNum = Runtime.getRuntime().availableProcessors();
|
int threadNum = Runtime.getRuntime().availableProcessors();
|
||||||
// int threadNum = 10;
|
|
||||||
|
|
||||||
|
|
||||||
ThreadFactory namedThreadFactory = new ThreadFactoryBuilder()
|
ThreadFactory namedThreadFactory = new ThreadFactoryBuilder()
|
||||||
.setNameFormat("baseline-demo-%d").build();
|
.setNameFormat("baseline-demo-%d").build();
|
||||||
@@ -82,15 +89,13 @@ public class BaselineGeneration {
|
|||||||
namedThreadFactory,
|
namedThreadFactory,
|
||||||
new ThreadPoolExecutor.AbortPolicy());
|
new ThreadPoolExecutor.AbortPolicy());
|
||||||
|
|
||||||
// baseline 生成及写入
|
// IP列表获取
|
||||||
// 耗时测试
|
|
||||||
Long startQueryIPList = System.currentTimeMillis();
|
|
||||||
ArrayList<String> destinationIps = druidData.getServerIpList();
|
ArrayList<String> destinationIps = druidData.getServerIpList();
|
||||||
Long endQueryIPList = System.currentTimeMillis();
|
|
||||||
|
|
||||||
LOG.info("共查询到服务端ip " +destinationIps.size() + " 个");
|
LOG.info("共查询到服务端ip " +destinationIps.size() + " 个");
|
||||||
LOG.info("Baseline batch 大小: " + ApplicationConfig.GENERATE_BATCH_SIZE);
|
LOG.info("Baseline batch 大小: " + ApplicationConfig.GENERATE_BATCH_SIZE);
|
||||||
|
|
||||||
|
// 分批进行IP baseline生成和处理
|
||||||
List<List<String>> batchIpLists = Lists.partition(destinationIps, ApplicationConfig.GENERATE_BATCH_SIZE);
|
List<List<String>> batchIpLists = Lists.partition(destinationIps, ApplicationConfig.GENERATE_BATCH_SIZE);
|
||||||
for (List<String> batchIps: batchIpLists){
|
for (List<String> batchIps: batchIpLists){
|
||||||
if(batchIps.size()>0){
|
if(batchIps.size()>0){
|
||||||
@@ -102,27 +107,24 @@ public class BaselineGeneration {
|
|||||||
executor.awaitTermination(10L, TimeUnit.HOURS);
|
executor.awaitTermination(10L, TimeUnit.HOURS);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 批量生成IP baseline
|
||||||
|
* @param ipList ip列表
|
||||||
|
*/
|
||||||
public static void generateBaselines(List<String> ipList){
|
public static void generateBaselines(List<String> ipList){
|
||||||
Long startGenerationBaselines= System.currentTimeMillis();
|
druidData = DruidData.getInstance();
|
||||||
Long startReadDruidData = System.currentTimeMillis();
|
|
||||||
|
|
||||||
batchDruidData = druidData.readFromDruid(ipList);
|
batchDruidData = druidData.readFromDruid(ipList);
|
||||||
Long endReadDruidData = System.currentTimeMillis();
|
|
||||||
//LOG.info("读取Druid数据耗时:"+(endReadDruidData-startReadDruidData));
|
|
||||||
|
|
||||||
List<Put> putList = new ArrayList<>();
|
List<Put> putList = new ArrayList<>();
|
||||||
for(String attackType: attackTypeList){
|
for(String attackType: attackTypeList){
|
||||||
for(String ip: ipList){
|
for(String ip: ipList){
|
||||||
int[] ipBaseline = generateSingleIpBaseline(ip, attackType);
|
int[] ipBaseline = generateSingleIpBaseline(ip, attackType);
|
||||||
if (!(ipBaseline ==null)){
|
if (ipBaseline!= null){
|
||||||
putList = hbaseUtils.cachedInPut(putList, ip, ipBaseline, attackType, ApplicationConfig.BASELINE_METRIC_TYPE);
|
putList = hbaseUtils.cachedInPut(putList, ip, ipBaseline, attackType, ApplicationConfig.BASELINE_METRIC_TYPE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Long endGenerationBaselines= System.currentTimeMillis();
|
|
||||||
//LOG.info("BaselineGeneration耗时:"+(endGenerationBaselines-endReadDruidData));
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
hbaseTable.put(putList);
|
hbaseTable.put(putList);
|
||||||
LOG.info("Baseline 线程 " + Thread.currentThread().getId() + " 成功写入Baseline条数共计 " + putList.size());
|
LOG.info("Baseline 线程 " + Thread.currentThread().getId() + " 成功写入Baseline条数共计 " + putList.size());
|
||||||
@@ -130,25 +132,27 @@ public class BaselineGeneration {
|
|||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
|
|
||||||
Long endWriteTime = System.currentTimeMillis();
|
druidData.closeConn();
|
||||||
//LOG.info("BaselineWriteIn耗时:"+(endWriteTime-endGenerationBaselines));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 单ip baseline生成逻辑
|
||||||
|
* @param ip ip
|
||||||
|
* @param attackType 攻击类型
|
||||||
|
* @return baseline序列,长度为 60/HISTORICAL_GRAD*24
|
||||||
|
*/
|
||||||
private static int[] generateSingleIpBaseline(String ip, String attackType){
|
private static int[] generateSingleIpBaseline(String ip, String attackType){
|
||||||
// 查询
|
// 查询
|
||||||
Long startQuerySingleIPTime = System.currentTimeMillis();
|
|
||||||
List<Map<String, Object>> originSeries = druidData.getTimeSeriesData(batchDruidData, ip, attackType);
|
List<Map<String, Object>> originSeries = druidData.getTimeSeriesData(batchDruidData, ip, attackType);
|
||||||
|
|
||||||
if (originSeries.size()==0){
|
if (originSeries.size()==0){
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
Long endQuerySingleIPTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
// 时间序列缺失值补0
|
// 时间序列缺失值补0
|
||||||
List<Map<String, Object>> completSeries = SeriesUtils.complementSeries(originSeries);
|
List<Map<String, Object>> completSeries = SeriesUtils.complementSeries(originSeries);
|
||||||
|
|
||||||
int[] baselineArr = new int[completSeries.size()];
|
int[] baselineArr = new int[BASELINE_POINT_NUM];
|
||||||
List<Integer>series = completSeries.stream().map(
|
List<Integer>series = completSeries.stream().map(
|
||||||
i -> Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList());
|
i -> Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList());
|
||||||
|
|
||||||
@@ -173,14 +177,14 @@ public class BaselineGeneration {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Long endGenerateSingleIPTime = System.currentTimeMillis();
|
|
||||||
//LOG.info("性能测试:单个baseline生成耗时——"+(endGenerateSingleIPTime-endQuerySingleIPTime));
|
|
||||||
//System.out.println(ip);
|
|
||||||
//System.out.println(attackType + Arrays.toString(baselineArr));
|
|
||||||
|
|
||||||
return baselineArr;
|
return baselineArr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* baseline 生成算法
|
||||||
|
* @param timeSeries 输入序列
|
||||||
|
* @return 输出序列
|
||||||
|
*/
|
||||||
private static int[] baselineFunction(List<Integer> timeSeries){
|
private static int[] baselineFunction(List<Integer> timeSeries){
|
||||||
int[] result;
|
int[] result;
|
||||||
switch (ApplicationConfig.BASELINE_FUNCTION){
|
switch (ApplicationConfig.BASELINE_FUNCTION){
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
package cn.mesalab.service.BaselineService;
|
package cn.mesalab.service.algorithm;
|
||||||
|
|
||||||
import cn.mesalab.config.ApplicationConfig;
|
import cn.mesalab.config.ApplicationConfig;
|
||||||
|
|
||||||
@@ -8,12 +8,11 @@ import java.util.List;
|
|||||||
/**
|
/**
|
||||||
* @author yjy
|
* @author yjy
|
||||||
* @version 1.0
|
* @version 1.0
|
||||||
|
* kalman滤波器
|
||||||
* @date 2021/7/25 1:42 下午
|
* @date 2021/7/25 1:42 下午
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public class KalmanFilter {
|
public class KalmanFilter {
|
||||||
|
|
||||||
/**Kalman Filter*/
|
|
||||||
private Integer predict;
|
private Integer predict;
|
||||||
private Integer current;
|
private Integer current;
|
||||||
private Integer estimate;
|
private Integer estimate;
|
||||||
@@ -29,6 +28,7 @@ public class KalmanFilter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void initial(){
|
public void initial(){
|
||||||
|
// TODO 调整
|
||||||
pdelt = 1;
|
pdelt = 1;
|
||||||
mdelt = 1;
|
mdelt = 1;
|
||||||
}
|
}
|
||||||
@@ -54,9 +54,7 @@ public class KalmanFilter {
|
|||||||
|
|
||||||
|
|
||||||
public void forcast(List<Integer> historicalSeries, Integer length){
|
public void forcast(List<Integer> historicalSeries, Integer length){
|
||||||
// 初始值计算
|
|
||||||
int oldvalue = (historicalSeries.stream().mapToInt(Integer::intValue).sum())/historicalSeries.size();
|
int oldvalue = (historicalSeries.stream().mapToInt(Integer::intValue).sum())/historicalSeries.size();
|
||||||
// 滤波
|
|
||||||
smoothSeries = new ArrayList<Integer>();
|
smoothSeries = new ArrayList<Integer>();
|
||||||
for(int i = 0; i < historicalSeries.size(); i++){
|
for(int i = 0; i < historicalSeries.size(); i++){
|
||||||
int value = historicalSeries.get(i);
|
int value = historicalSeries.get(i);
|
||||||
@@ -19,6 +19,7 @@ public class DruidUtils {
|
|||||||
private static ThreadLocal<AvaticaConnection> threadLocal = new ThreadLocal<AvaticaConnection>();
|
private static ThreadLocal<AvaticaConnection> threadLocal = new ThreadLocal<AvaticaConnection>();
|
||||||
|
|
||||||
private static final String DRUID_URL = ApplicationConfig.DRUID_URL;
|
private static final String DRUID_URL = ApplicationConfig.DRUID_URL;
|
||||||
|
private static AvaticaStatement statement = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 打开连接
|
* 打开连接
|
||||||
@@ -46,9 +47,8 @@ public class DruidUtils {
|
|||||||
/**
|
/**
|
||||||
* 根据sql查询结果
|
* 根据sql查询结果
|
||||||
*/
|
*/
|
||||||
public static ResultSet executeQuery (AvaticaConnection connection, String sql) throws SQLException{
|
public static ResultSet executeQuery (AvaticaStatement statement, String sql) throws SQLException{
|
||||||
AvaticaStatement statement = connection.createStatement();
|
ResultSet resultSet = statement.executeQuery(sql);
|
||||||
ResultSet resultSet = statement.executeQuery(sql);
|
|
||||||
return resultSet;
|
return resultSet;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -54,5 +54,5 @@ baseline.kalman.r=0.002
|
|||||||
# 每更新1000个记录打印log
|
# 每更新1000个记录打印log
|
||||||
log.write.count=10000
|
log.write.count=10000
|
||||||
# FOR TEST
|
# FOR TEST
|
||||||
generate.batch.size=100
|
generate.batch.size=1
|
||||||
|
|
||||||
|
|||||||
92
src/test/java/cn/mesalab/service/HBaseTest.java
Normal file
92
src/test/java/cn/mesalab/service/HBaseTest.java
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
package cn.mesalab.service;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author yjy
|
||||||
|
* @version 1.0
|
||||||
|
* @date 2021/8/3 11:21 上午
|
||||||
|
*/
|
||||||
|
|
||||||
|
import cn.mesalab.config.ApplicationConfig;
|
||||||
|
import cn.mesalab.dao.DruidData;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.hbase.*;
|
||||||
|
import org.apache.hadoop.hbase.client.*;
|
||||||
|
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
|
||||||
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
import org.apache.hadoop.io.ArrayWritable;
|
||||||
|
import org.apache.hadoop.io.IntWritable;
|
||||||
|
import org.apache.hadoop.io.Writable;
|
||||||
|
import org.apache.hadoop.io.WritableUtils;
|
||||||
|
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.DataInputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class HBaseTest {
|
||||||
|
public static void main(String[] args) throws IOException {
|
||||||
|
org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
|
||||||
|
|
||||||
|
config.set(HConstants.ZOOKEEPER_QUORUM, ApplicationConfig.HBASE_ZOOKEEPER_QUORUM);
|
||||||
|
config.set(HConstants.ZOOKEEPER_CLIENT_PORT, ApplicationConfig.HBASE_ZOOKEEPER_CLIENT_PORT);
|
||||||
|
|
||||||
|
TableName tableName = TableName.valueOf(ApplicationConfig.HBASE_TABLE);
|
||||||
|
Connection conn = ConnectionFactory.createConnection(config);
|
||||||
|
Table table = conn.getTable(tableName);
|
||||||
|
|
||||||
|
|
||||||
|
DruidData druidData = DruidData.getInstance();
|
||||||
|
ArrayList<String> destinationIps = druidData.getServerIpList();
|
||||||
|
|
||||||
|
for (String ip : destinationIps){
|
||||||
|
Get abcGet = new Get(Bytes.toBytes(ip));
|
||||||
|
Result r = table.get(abcGet);
|
||||||
|
ArrayWritable w = new ArrayWritable(IntWritable.class);
|
||||||
|
List<String> attackTypeList = Arrays.asList(
|
||||||
|
"TCP SYN Flood",
|
||||||
|
"ICMP Flood",
|
||||||
|
"UDP Flood",
|
||||||
|
"DNS Amplification"
|
||||||
|
);
|
||||||
|
for (String attackType : attackTypeList){
|
||||||
|
byte[] session_nums = r.getValue(Bytes.toBytes(attackType), Bytes.toBytes("session_num"));
|
||||||
|
if (session_nums==null){
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
w.readFields(new DataInputStream(new ByteArrayInputStream(session_nums)));
|
||||||
|
ArrayList<Integer> arr2 = fromWritable(w);
|
||||||
|
System.out.println(ip + "-" + attackType + ": " + arr2.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get abcGet = new Get(Bytes.toBytes("1.0.0.1"));
|
||||||
|
// Result r = table.get(abcGet);
|
||||||
|
// ArrayWritable w = new ArrayWritable(IntWritable.class);
|
||||||
|
// w.readFields(new DataInputStream(new ByteArrayInputStream(r.getValue(Bytes.toBytes("TCP SYN Flood"), Bytes.toBytes("session_num")))));
|
||||||
|
// ArrayList<Integer> arr2 = fromWritable(w);
|
||||||
|
// System.out.println(arr2.toString());
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Writable toWritable(int[] arr) {
|
||||||
|
Writable[] content = new Writable[arr.length];
|
||||||
|
for (int i = 0; i < content.length; i++) {
|
||||||
|
content[i] = new IntWritable(arr[i]);
|
||||||
|
}
|
||||||
|
return new ArrayWritable(IntWritable.class, content);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ArrayList<Integer> fromWritable(ArrayWritable writable) {
|
||||||
|
Writable[] writables = ((ArrayWritable) writable).get();
|
||||||
|
ArrayList<Integer> list = new ArrayList<Integer>(writables.length);
|
||||||
|
for (Writable wrt : writables) {
|
||||||
|
list.add(((IntWritable)wrt).get());
|
||||||
|
}
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
@@ -54,5 +54,5 @@ baseline.kalman.r=0.002
|
|||||||
# 每更新1000个记录打印log
|
# 每更新1000个记录打印log
|
||||||
log.write.count=10000
|
log.write.count=10000
|
||||||
# FOR TEST
|
# FOR TEST
|
||||||
generate.batch.size=100
|
generate.batch.size=1
|
||||||
|
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
target/test-classes/cn/mesalab/service/HBaseTest.class
Normal file
BIN
target/test-classes/cn/mesalab/service/HBaseTest.class
Normal file
Binary file not shown.
Reference in New Issue
Block a user