修改多线程组织结构
This commit is contained in:
@@ -32,10 +32,6 @@ public class DruidData {
|
|||||||
private static DruidData druidData;
|
private static DruidData druidData;
|
||||||
private AvaticaConnection connection;
|
private AvaticaConnection connection;
|
||||||
private AvaticaStatement statement;
|
private AvaticaStatement statement;
|
||||||
private String timeFilter = ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
|
|
||||||
+ " >= MILLIS_TO_TIMESTAMP(" + getTimeLimit()._2
|
|
||||||
+ ") AND " + ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
|
|
||||||
+ " < MILLIS_TO_TIMESTAMP(" + getTimeLimit()._1 + ")";
|
|
||||||
|
|
||||||
|
|
||||||
{
|
{
|
||||||
@@ -69,13 +65,13 @@ public class DruidData {
|
|||||||
* 获取distinct server ip
|
* 获取distinct server ip
|
||||||
* @return ArrayList<String> ip列表
|
* @return ArrayList<String> ip列表
|
||||||
*/
|
*/
|
||||||
public ArrayList<String> getServerIpList() {
|
public static ArrayList<String> getServerIpList(AvaticaStatement statement, String timeFilter) {
|
||||||
Long startQueryIpLIstTime = System.currentTimeMillis();
|
Long startQueryIpLIstTime = System.currentTimeMillis();
|
||||||
ArrayList<String> serverIps = new ArrayList<String>();
|
ArrayList<String> serverIps = new ArrayList<String>();
|
||||||
String sql = "SELECT distinct " + ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME
|
String sql = "SELECT distinct " + ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME
|
||||||
+ " FROM " + ApplicationConfig.DRUID_TABLE
|
+ " FROM " + ApplicationConfig.DRUID_TABLE
|
||||||
+ " WHERE " + timeFilter
|
+ " WHERE " + timeFilter
|
||||||
+ " LIMIT 1000";// FOR TEST
|
+ " LIMIT 200";// FOR TEST
|
||||||
try{
|
try{
|
||||||
ResultSet resultSet = DruidUtils.executeQuery(statement,sql);
|
ResultSet resultSet = DruidUtils.executeQuery(statement,sql);
|
||||||
while(resultSet.next()){
|
while(resultSet.next()){
|
||||||
@@ -96,7 +92,7 @@ public class DruidData {
|
|||||||
* @param ipList ip列表
|
* @param ipList ip列表
|
||||||
* @return 数据库读取结果
|
* @return 数据库读取结果
|
||||||
*/
|
*/
|
||||||
public List<Map<String, Object>> readFromDruid(List<String> ipList){
|
public static List<Map<String, Object>> readFromDruid(AvaticaConnection connection, AvaticaStatement statement, List<String> ipList, String timeFilter){
|
||||||
List<Map<String, Object>> rsList = null;
|
List<Map<String, Object>> rsList = null;
|
||||||
ipList = ipList.stream().map( ip -> "\'"+ip+"\'").collect(Collectors.toList());
|
ipList = ipList.stream().map( ip -> "\'"+ip+"\'").collect(Collectors.toList());
|
||||||
String ipString = "(" + StringUtils.join(ipList, ",").toString() + ")";
|
String ipString = "(" + StringUtils.join(ipList, ",").toString() + ")";
|
||||||
@@ -125,7 +121,7 @@ public class DruidData {
|
|||||||
* @param attackType 指定攻击类型
|
* @param attackType 指定攻击类型
|
||||||
* @return 筛选结果
|
* @return 筛选结果
|
||||||
*/
|
*/
|
||||||
public List<Map<String, Object>> getTimeSeriesData(List<Map<String, Object>> allData, String ip, String attackType){
|
public static List<Map<String, Object>> getTimeSeriesData(List<Map<String, Object>> allData, String ip, String attackType){
|
||||||
List<Map<String, Object>> rsList = new ArrayList<>();
|
List<Map<String, Object>> rsList = new ArrayList<>();
|
||||||
try{
|
try{
|
||||||
rsList = allData.stream().
|
rsList = allData.stream().
|
||||||
@@ -141,7 +137,7 @@ public class DruidData {
|
|||||||
* 计算查询时间范围,可指定时间范围(测试)或使用默认配置
|
* 计算查询时间范围,可指定时间范围(测试)或使用默认配置
|
||||||
* @return 时间范围起始点和终止点
|
* @return 时间范围起始点和终止点
|
||||||
*/
|
*/
|
||||||
public Tuple2<Long, Long> getTimeLimit(){
|
public static Tuple2<Long, Long> getTimeLimit(){
|
||||||
long maxTime = 0L;
|
long maxTime = 0L;
|
||||||
long minTime = 0L;
|
long minTime = 0L;
|
||||||
switch(ApplicationConfig.DRUID_TIME_LIMIT_TYPE){
|
switch(ApplicationConfig.DRUID_TIME_LIMIT_TYPE){
|
||||||
@@ -159,7 +155,7 @@ public class DruidData {
|
|||||||
return Tuple.of(maxTime, minTime);
|
return Tuple.of(maxTime, minTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
private long getCurrentDay(int bias) {
|
private static long getCurrentDay(int bias) {
|
||||||
Calendar calendar = Calendar.getInstance();
|
Calendar calendar = Calendar.getInstance();
|
||||||
calendar.set(Calendar.DAY_OF_YEAR, calendar.get(Calendar.DAY_OF_YEAR) + bias);
|
calendar.set(Calendar.DAY_OF_YEAR, calendar.get(Calendar.DAY_OF_YEAR) + bias);
|
||||||
calendar.set(Calendar.HOUR_OF_DAY, 0);
|
calendar.set(Calendar.HOUR_OF_DAY, 0);
|
||||||
@@ -170,7 +166,7 @@ public class DruidData {
|
|||||||
return calendar.getTimeInMillis();
|
return calendar.getTimeInMillis();
|
||||||
}
|
}
|
||||||
|
|
||||||
private long getCurrentDay(){
|
private static long getCurrentDay(){
|
||||||
return getCurrentDay(0);
|
return getCurrentDay(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
package cn.mesalab.main;
|
package cn.mesalab.main;
|
||||||
|
|
||||||
import cn.mesalab.service.BaselineGeneration;
|
import cn.mesalab.service.BaselineGeneration;
|
||||||
import sun.rmi.runtime.Log;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author yjy
|
* @author yjy
|
||||||
@@ -10,6 +9,6 @@ import sun.rmi.runtime.Log;
|
|||||||
*/
|
*/
|
||||||
public class BaselineApplication {
|
public class BaselineApplication {
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
BaselineGeneration.perform();
|
new BaselineGeneration().perform();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,21 +2,19 @@ package cn.mesalab.service;
|
|||||||
|
|
||||||
import cn.mesalab.config.ApplicationConfig;
|
import cn.mesalab.config.ApplicationConfig;
|
||||||
import cn.mesalab.dao.DruidData;
|
import cn.mesalab.dao.DruidData;
|
||||||
import cn.mesalab.service.algorithm.KalmanFilter;
|
import cn.mesalab.utils.DruidUtils;
|
||||||
import cn.mesalab.utils.HbaseUtils;
|
import cn.mesalab.utils.HbaseUtils;
|
||||||
import cn.mesalab.utils.SeriesUtils;
|
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||||
import org.apache.commons.math3.stat.StatUtils;
|
import org.apache.calcite.avatica.AvaticaConnection;
|
||||||
import org.apache.hadoop.hbase.client.Put;
|
import org.apache.calcite.avatica.AvaticaStatement;
|
||||||
import org.apache.hadoop.hbase.client.Table;
|
import org.apache.hadoop.hbase.client.Table;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.sql.SQLException;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.concurrent.*;
|
import java.util.concurrent.*;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author yjy
|
* @author yjy
|
||||||
@@ -27,10 +25,18 @@ import java.util.stream.Collectors;
|
|||||||
public class BaselineGeneration {
|
public class BaselineGeneration {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(BaselineGeneration.class);
|
private static final Logger LOG = LoggerFactory.getLogger(BaselineGeneration.class);
|
||||||
|
|
||||||
private static DruidData druidData;
|
private static AvaticaConnection druidConn = DruidUtils.getConn();
|
||||||
private static HbaseUtils hbaseUtils;
|
private static AvaticaStatement druidStatement;
|
||||||
private static Table hbaseTable;
|
|
||||||
private static List<Map<String, Object>> batchDruidData = new ArrayList<>();
|
static {
|
||||||
|
try {
|
||||||
|
druidStatement = DruidUtils.getStatement(druidConn);
|
||||||
|
} catch (SQLException exception) {
|
||||||
|
exception.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Table hbaseTable = HbaseUtils.getInstance().getHbaseTable();
|
||||||
|
|
||||||
private static List<String> attackTypeList = Arrays.asList(
|
private static List<String> attackTypeList = Arrays.asList(
|
||||||
ApplicationConfig.DRUID_ATTACKTYPE_TCP_SYN_FLOOD,
|
ApplicationConfig.DRUID_ATTACKTYPE_TCP_SYN_FLOOD,
|
||||||
@@ -41,17 +47,17 @@ public class BaselineGeneration {
|
|||||||
private static final Integer BASELINE_POINT_NUM =
|
private static final Integer BASELINE_POINT_NUM =
|
||||||
ApplicationConfig.BASELINE_RANGE_DAYS * 24 * (60/ApplicationConfig.HISTORICAL_GRAD);
|
ApplicationConfig.BASELINE_RANGE_DAYS * 24 * (60/ApplicationConfig.HISTORICAL_GRAD);
|
||||||
|
|
||||||
|
private static String timeFilter = ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
|
||||||
|
+ " >= MILLIS_TO_TIMESTAMP(" + DruidData.getTimeLimit()._2
|
||||||
|
+ ") AND " + ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
|
||||||
|
+ " < MILLIS_TO_TIMESTAMP(" + DruidData.getTimeLimit()._1 + ")";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 程序执行
|
* 程序执行
|
||||||
*/
|
*/
|
||||||
public static void perform() {
|
public void perform() {
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
|
|
||||||
druidData = DruidData.getInstance();
|
|
||||||
hbaseUtils = HbaseUtils.getInstance();
|
|
||||||
hbaseTable = hbaseUtils.getHbaseTable();
|
|
||||||
LOG.info("Druid 成功建立连接");
|
|
||||||
|
|
||||||
try{
|
try{
|
||||||
// baseline生成并写入
|
// baseline生成并写入
|
||||||
generateBaselinesThread();
|
generateBaselinesThread();
|
||||||
@@ -59,7 +65,7 @@ public class BaselineGeneration {
|
|||||||
long last = System.currentTimeMillis();
|
long last = System.currentTimeMillis();
|
||||||
LOG.warn("运行时间:" + (last - start));
|
LOG.warn("运行时间:" + (last - start));
|
||||||
|
|
||||||
druidData.closeConn();
|
druidConn.close();
|
||||||
hbaseTable.close();
|
hbaseTable.close();
|
||||||
LOG.info("Druid 关闭连接");
|
LOG.info("Druid 关闭连接");
|
||||||
|
|
||||||
@@ -73,7 +79,7 @@ public class BaselineGeneration {
|
|||||||
* 多线程baseline生成入口
|
* 多线程baseline生成入口
|
||||||
* @throws InterruptedException
|
* @throws InterruptedException
|
||||||
*/
|
*/
|
||||||
private static void generateBaselinesThread() throws InterruptedException {
|
private void generateBaselinesThread() throws InterruptedException {
|
||||||
int threadNum = Runtime.getRuntime().availableProcessors();
|
int threadNum = Runtime.getRuntime().availableProcessors();
|
||||||
|
|
||||||
ThreadFactory namedThreadFactory = new ThreadFactoryBuilder()
|
ThreadFactory namedThreadFactory = new ThreadFactoryBuilder()
|
||||||
@@ -90,16 +96,26 @@ public class BaselineGeneration {
|
|||||||
new ThreadPoolExecutor.AbortPolicy());
|
new ThreadPoolExecutor.AbortPolicy());
|
||||||
|
|
||||||
// IP列表获取
|
// IP列表获取
|
||||||
ArrayList<String> destinationIps = druidData.getServerIpList();
|
ArrayList<String> destinationIps = DruidData.getServerIpList(druidStatement, timeFilter);
|
||||||
|
|
||||||
LOG.info("共查询到服务端ip " +destinationIps.size() + " 个");
|
LOG.info("共查询到服务端ip " +destinationIps.size() + " 个");
|
||||||
LOG.info("Baseline batch 大小: " + ApplicationConfig.GENERATE_BATCH_SIZE);
|
LOG.info("Baseline batch 大小: " + ApplicationConfig.GENERATE_BATCH_SIZE);
|
||||||
|
|
||||||
// 分批进行IP baseline生成和处理
|
// 分批进行IP baseline生成和处理
|
||||||
List<List<String>> batchIpLists = Lists.partition(destinationIps, ApplicationConfig.GENERATE_BATCH_SIZE);
|
List<List<String>> batchIpLists = Lists.partition(destinationIps, ApplicationConfig.GENERATE_BATCH_SIZE);
|
||||||
|
|
||||||
for (List<String> batchIps: batchIpLists){
|
for (List<String> batchIps: batchIpLists){
|
||||||
if(batchIps.size()>0){
|
if(batchIps.size()>0){
|
||||||
executor.execute(() -> generateBaselines(batchIps));
|
BaselineSingleThread testForInsider = new BaselineSingleThread(
|
||||||
|
batchIps,
|
||||||
|
druidConn,
|
||||||
|
druidStatement,
|
||||||
|
hbaseTable,
|
||||||
|
attackTypeList,
|
||||||
|
BASELINE_POINT_NUM,
|
||||||
|
timeFilter
|
||||||
|
);
|
||||||
|
executor.execute(testForInsider);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -107,100 +123,4 @@ public class BaselineGeneration {
|
|||||||
executor.awaitTermination(10L, TimeUnit.HOURS);
|
executor.awaitTermination(10L, TimeUnit.HOURS);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* 批量生成IP baseline
|
|
||||||
* @param ipList ip列表
|
|
||||||
*/
|
|
||||||
public static void generateBaselines(List<String> ipList){
|
|
||||||
druidData = DruidData.getInstance();
|
|
||||||
batchDruidData = druidData.readFromDruid(ipList);
|
|
||||||
|
|
||||||
List<Put> putList = new ArrayList<>();
|
|
||||||
for(String attackType: attackTypeList){
|
|
||||||
for(String ip: ipList){
|
|
||||||
int[] ipBaseline = generateSingleIpBaseline(ip, attackType);
|
|
||||||
if (ipBaseline!= null){
|
|
||||||
putList = hbaseUtils.cachedInPut(putList, ip, ipBaseline, attackType, ApplicationConfig.BASELINE_METRIC_TYPE);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
hbaseTable.put(putList);
|
|
||||||
LOG.info("Baseline 线程 " + Thread.currentThread().getId() + " 成功写入Baseline条数共计 " + putList.size());
|
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
|
|
||||||
druidData.closeConn();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 单ip baseline生成逻辑
|
|
||||||
* @param ip ip
|
|
||||||
* @param attackType 攻击类型
|
|
||||||
* @return baseline序列,长度为 60/HISTORICAL_GRAD*24
|
|
||||||
*/
|
|
||||||
private static int[] generateSingleIpBaseline(String ip, String attackType){
|
|
||||||
// 查询
|
|
||||||
List<Map<String, Object>> originSeries = druidData.getTimeSeriesData(batchDruidData, ip, attackType);
|
|
||||||
|
|
||||||
if (originSeries.size()==0){
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 时间序列缺失值补0
|
|
||||||
List<Map<String, Object>> completSeries = SeriesUtils.complementSeries(originSeries);
|
|
||||||
|
|
||||||
int[] baselineArr = new int[BASELINE_POINT_NUM];
|
|
||||||
List<Integer>series = completSeries.stream().map(
|
|
||||||
i -> Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList());
|
|
||||||
|
|
||||||
// 判断ip出现频率
|
|
||||||
if(originSeries.size()/(float)completSeries.size()>ApplicationConfig.BASELINE_HISTORICAL_RATIO){
|
|
||||||
// 高频率
|
|
||||||
double percentile = StatUtils.percentile(series.stream().mapToDouble(Double::valueOf).toArray(),
|
|
||||||
ApplicationConfig.BASELINE_SPARSE_FILL_PERCENTILE);
|
|
||||||
Arrays.fill(baselineArr, (int)percentile);
|
|
||||||
baselineArr = baselineFunction(series);
|
|
||||||
|
|
||||||
} else {
|
|
||||||
// 判断周期性
|
|
||||||
if (SeriesUtils.isPeriod(series)){
|
|
||||||
baselineArr = baselineFunction(series);
|
|
||||||
} else {
|
|
||||||
int ipPercentile = SeriesUtils.percentile(
|
|
||||||
originSeries.stream().map(i ->
|
|
||||||
Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList()),
|
|
||||||
ApplicationConfig.BASELINE_RATIONAL_PERCENTILE);
|
|
||||||
Arrays.fill(baselineArr, ipPercentile);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return baselineArr;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* baseline 生成算法
|
|
||||||
* @param timeSeries 输入序列
|
|
||||||
* @return 输出序列
|
|
||||||
*/
|
|
||||||
private static int[] baselineFunction(List<Integer> timeSeries){
|
|
||||||
int[] result;
|
|
||||||
switch (ApplicationConfig.BASELINE_FUNCTION){
|
|
||||||
case "KalmanFilter":
|
|
||||||
KalmanFilter kalmanFilter = new KalmanFilter();
|
|
||||||
kalmanFilter.forcast(timeSeries, BASELINE_POINT_NUM);
|
|
||||||
result = kalmanFilter.getForecastSeries().stream().mapToInt(Integer::valueOf).toArray();
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
result = timeSeries.subList(0, BASELINE_POINT_NUM).stream().mapToInt(Integer::valueOf).toArray();
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void main(String[] args) {
|
|
||||||
perform();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
142
src/main/java/cn/mesalab/service/BaselineSingleThread.java
Normal file
142
src/main/java/cn/mesalab/service/BaselineSingleThread.java
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
package cn.mesalab.service;
|
||||||
|
|
||||||
|
import cn.mesalab.config.ApplicationConfig;
|
||||||
|
import cn.mesalab.dao.DruidData;
|
||||||
|
import cn.mesalab.service.algorithm.KalmanFilter;
|
||||||
|
import cn.mesalab.utils.HbaseUtils;
|
||||||
|
import cn.mesalab.utils.SeriesUtils;
|
||||||
|
import org.apache.calcite.avatica.AvaticaConnection;
|
||||||
|
import org.apache.calcite.avatica.AvaticaStatement;
|
||||||
|
import org.apache.commons.math3.stat.StatUtils;
|
||||||
|
import org.apache.hadoop.hbase.client.Put;
|
||||||
|
import org.apache.hadoop.hbase.client.Table;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author yjy
|
||||||
|
* @version 1.0
|
||||||
|
* @date 2021/8/3 6:18 下午
|
||||||
|
*/
|
||||||
|
public class BaselineSingleThread extends Thread {
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(BaselineSingleThread.class);
|
||||||
|
|
||||||
|
private List<String> ipList;
|
||||||
|
private AvaticaConnection druidConn;
|
||||||
|
private AvaticaStatement druidStatement;
|
||||||
|
private Table hbaseTable;
|
||||||
|
private List<String> attackTypeList;
|
||||||
|
private Integer BASELINE_POINT_NUM;
|
||||||
|
private String timeFilter;
|
||||||
|
private List<Map<String, Object>> batchDruidData;
|
||||||
|
|
||||||
|
public BaselineSingleThread(
|
||||||
|
List<String> batchIpList,
|
||||||
|
AvaticaConnection druidConn,
|
||||||
|
AvaticaStatement druidStatement,
|
||||||
|
Table hbaseTable,
|
||||||
|
List<String> attackTypeList,
|
||||||
|
Integer BASELINE_POINT_NUM,
|
||||||
|
String timeFilter
|
||||||
|
){
|
||||||
|
this.ipList = batchIpList;
|
||||||
|
this.druidConn = druidConn;
|
||||||
|
this.druidStatement = druidStatement;
|
||||||
|
this.hbaseTable = hbaseTable;
|
||||||
|
this.attackTypeList = attackTypeList;
|
||||||
|
this.BASELINE_POINT_NUM = BASELINE_POINT_NUM;
|
||||||
|
this.timeFilter = timeFilter;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run(){
|
||||||
|
batchDruidData = DruidData.readFromDruid(druidConn, druidStatement, ipList, timeFilter);
|
||||||
|
|
||||||
|
List<Put> putList = new ArrayList<>();
|
||||||
|
for(String attackType: attackTypeList){
|
||||||
|
for(String ip: ipList){
|
||||||
|
int[] ipBaseline = generateSingleIpBaseline(ip, attackType);
|
||||||
|
if (ipBaseline!= null){
|
||||||
|
putList = HbaseUtils.cachedInPut(putList, ip, ipBaseline, attackType, ApplicationConfig.BASELINE_METRIC_TYPE);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
hbaseTable.put(putList);
|
||||||
|
LOG.info("Baseline 线程 " + Thread.currentThread().getId() + " 成功写入Baseline条数共计 " + putList.size());
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* 单ip baseline生成逻辑
|
||||||
|
* @param ip ip
|
||||||
|
* @param attackType 攻击类型
|
||||||
|
* @return baseline序列,长度为 60/HISTORICAL_GRAD*24
|
||||||
|
*/
|
||||||
|
private int[] generateSingleIpBaseline(String ip, String attackType){
|
||||||
|
// 查询
|
||||||
|
List<Map<String, Object>> originSeries = DruidData.getTimeSeriesData(batchDruidData, ip, attackType);
|
||||||
|
|
||||||
|
if (originSeries.size()==0){
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 时间序列缺失值补0
|
||||||
|
List<Map<String, Object>> completSeries = SeriesUtils.complementSeries(originSeries);
|
||||||
|
|
||||||
|
int[] baselineArr = new int[BASELINE_POINT_NUM];
|
||||||
|
List<Integer>series = completSeries.stream().map(
|
||||||
|
i -> Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList());
|
||||||
|
|
||||||
|
// 判断ip出现频率
|
||||||
|
if(originSeries.size()/(float)completSeries.size()>ApplicationConfig.BASELINE_HISTORICAL_RATIO){
|
||||||
|
// 高频率
|
||||||
|
double percentile = StatUtils.percentile(series.stream().mapToDouble(Double::valueOf).toArray(),
|
||||||
|
ApplicationConfig.BASELINE_SPARSE_FILL_PERCENTILE);
|
||||||
|
Arrays.fill(baselineArr, (int)percentile);
|
||||||
|
baselineArr = baselineFunction(series);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// 判断周期性
|
||||||
|
if (SeriesUtils.isPeriod(series)){
|
||||||
|
baselineArr = baselineFunction(series);
|
||||||
|
} else {
|
||||||
|
int ipPercentile = SeriesUtils.percentile(
|
||||||
|
originSeries.stream().map(i ->
|
||||||
|
Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList()),
|
||||||
|
ApplicationConfig.BASELINE_RATIONAL_PERCENTILE);
|
||||||
|
Arrays.fill(baselineArr, ipPercentile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return baselineArr;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* baseline 生成算法
|
||||||
|
* @param timeSeries 输入序列
|
||||||
|
* @return 输出序列
|
||||||
|
*/
|
||||||
|
private int[] baselineFunction(List<Integer> timeSeries){
|
||||||
|
int[] result;
|
||||||
|
switch (ApplicationConfig.BASELINE_FUNCTION){
|
||||||
|
case "KalmanFilter":
|
||||||
|
KalmanFilter kalmanFilter = new KalmanFilter();
|
||||||
|
kalmanFilter.forcast(timeSeries, BASELINE_POINT_NUM);
|
||||||
|
result = kalmanFilter.getForecastSeries().stream().mapToInt(Integer::valueOf).toArray();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
result = timeSeries.subList(0, BASELINE_POINT_NUM).stream().mapToInt(Integer::valueOf).toArray();
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -19,16 +19,19 @@ public class DruidUtils {
|
|||||||
private static ThreadLocal<AvaticaConnection> threadLocal = new ThreadLocal<AvaticaConnection>();
|
private static ThreadLocal<AvaticaConnection> threadLocal = new ThreadLocal<AvaticaConnection>();
|
||||||
|
|
||||||
private static final String DRUID_URL = ApplicationConfig.DRUID_URL;
|
private static final String DRUID_URL = ApplicationConfig.DRUID_URL;
|
||||||
private static AvaticaStatement statement = null;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 打开连接
|
* 打开连接
|
||||||
* @throws SQLException
|
* @throws SQLException
|
||||||
*/
|
*/
|
||||||
public static AvaticaConnection getConn() throws SQLException {
|
public static AvaticaConnection getConn() {
|
||||||
Properties properties = new Properties();
|
Properties properties = new Properties();
|
||||||
properties.setProperty("connectTimeout", String.valueOf(10*60*60));
|
AvaticaConnection connection = null;
|
||||||
AvaticaConnection connection = (AvaticaConnection) DriverManager.getConnection(DRUID_URL, properties);
|
try {
|
||||||
|
connection = (AvaticaConnection) DriverManager.getConnection(DRUID_URL, properties);
|
||||||
|
} catch (SQLException exception) {
|
||||||
|
exception.printStackTrace();
|
||||||
|
}
|
||||||
threadLocal.set(connection);
|
threadLocal.set(connection);
|
||||||
return connection;
|
return connection;
|
||||||
}
|
}
|
||||||
@@ -48,8 +51,12 @@ public class DruidUtils {
|
|||||||
* 根据sql查询结果
|
* 根据sql查询结果
|
||||||
*/
|
*/
|
||||||
public static ResultSet executeQuery (AvaticaStatement statement, String sql) throws SQLException{
|
public static ResultSet executeQuery (AvaticaStatement statement, String sql) throws SQLException{
|
||||||
ResultSet resultSet = statement.executeQuery(sql);
|
ResultSet resultSet = statement.executeQuery(sql);
|
||||||
return resultSet;
|
return resultSet;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static AvaticaStatement getStatement(AvaticaConnection conn) throws SQLException {
|
||||||
|
return conn.createStatement();
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
package cn.mesalab.utils;
|
package cn.mesalab.utils;
|
||||||
@@ -2,7 +2,6 @@ package cn.mesalab.utils;
|
|||||||
|
|
||||||
import cn.mesalab.config.ApplicationConfig;
|
import cn.mesalab.config.ApplicationConfig;
|
||||||
import cn.mesalab.dao.DruidData;
|
import cn.mesalab.dao.DruidData;
|
||||||
import cn.mesalab.service.BaselineGeneration;
|
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import org.jfree.util.Log;
|
import org.jfree.util.Log;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
@@ -10,13 +9,11 @@ import org.slf4j.LoggerFactory;
|
|||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.FileReader;
|
import java.io.FileReader;
|
||||||
import java.lang.reflect.Array;
|
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.time.LocalDateTime;
|
import java.time.LocalDateTime;
|
||||||
import java.time.format.DateTimeFormatter;
|
import java.time.format.DateTimeFormatter;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ baseline.kalman.r=0.002
|
|||||||
# 每更新1000个记录打印log
|
# 每更新1000个记录打印log
|
||||||
log.write.count=10000
|
log.write.count=10000
|
||||||
# FOR TEST
|
# FOR TEST
|
||||||
generate.batch.size=10
|
generate.batch.size=100
|
||||||
|
|
||||||
|
|
||||||
# http client配置
|
# http client配置
|
||||||
|
|||||||
206
src/test/java/cn/mesalab/service/BaselineGenerationBak.java
Normal file
206
src/test/java/cn/mesalab/service/BaselineGenerationBak.java
Normal file
@@ -0,0 +1,206 @@
|
|||||||
|
//package cn.mesalab.service;
|
||||||
|
//
|
||||||
|
//import cn.mesalab.config.ApplicationConfig;
|
||||||
|
//import cn.mesalab.dao.DruidData;
|
||||||
|
//import cn.mesalab.service.algorithm.KalmanFilter;
|
||||||
|
//import cn.mesalab.utils.HbaseUtils;
|
||||||
|
//import cn.mesalab.utils.SeriesUtils;
|
||||||
|
//import com.google.common.collect.Lists;
|
||||||
|
//import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||||
|
//import org.apache.commons.math3.stat.StatUtils;
|
||||||
|
//import org.apache.hadoop.hbase.client.Put;
|
||||||
|
//import org.apache.hadoop.hbase.client.Table;
|
||||||
|
//import org.slf4j.Logger;
|
||||||
|
//import org.slf4j.LoggerFactory;
|
||||||
|
//
|
||||||
|
//import java.io.IOException;
|
||||||
|
//import java.util.*;
|
||||||
|
//import java.util.concurrent.*;
|
||||||
|
//import java.util.stream.Collectors;
|
||||||
|
//
|
||||||
|
///**
|
||||||
|
// * @author yjy
|
||||||
|
// * @version 1.0
|
||||||
|
// * baseline生成及写入
|
||||||
|
// * @date 2021/7/23 5:38 下午
|
||||||
|
// */
|
||||||
|
//public class BaselineGeneration {
|
||||||
|
// private static final Logger LOG = LoggerFactory.getLogger(BaselineGeneration.class);
|
||||||
|
//
|
||||||
|
// private static DruidData druidData;
|
||||||
|
// private static HbaseUtils hbaseUtils;
|
||||||
|
// private static Table hbaseTable;
|
||||||
|
// private static List<Map<String, Object>> batchDruidData = new ArrayList<>();
|
||||||
|
//
|
||||||
|
// private static List<String> attackTypeList = Arrays.asList(
|
||||||
|
// ApplicationConfig.DRUID_ATTACKTYPE_TCP_SYN_FLOOD,
|
||||||
|
// ApplicationConfig.DRUID_ATTACKTYPE_ICMP_FLOOD,
|
||||||
|
// ApplicationConfig.DRUID_ATTACKTYPE_UDP_FLOOD,
|
||||||
|
// ApplicationConfig.DRUID_ATTACKTYPE_DNS_AMPL
|
||||||
|
// );
|
||||||
|
// private static final Integer BASELINE_POINT_NUM =
|
||||||
|
// ApplicationConfig.BASELINE_RANGE_DAYS * 24 * (60/ApplicationConfig.HISTORICAL_GRAD);
|
||||||
|
//
|
||||||
|
// /**
|
||||||
|
// * 程序执行
|
||||||
|
// */
|
||||||
|
// public static void perform() {
|
||||||
|
// long start = System.currentTimeMillis();
|
||||||
|
//
|
||||||
|
// druidData = DruidData.getInstance();
|
||||||
|
// hbaseUtils = HbaseUtils.getInstance();
|
||||||
|
// hbaseTable = hbaseUtils.getHbaseTable();
|
||||||
|
// LOG.info("Druid 成功建立连接");
|
||||||
|
//
|
||||||
|
// try{
|
||||||
|
// // baseline生成并写入
|
||||||
|
// generateBaselinesThread();
|
||||||
|
//
|
||||||
|
// long last = System.currentTimeMillis();
|
||||||
|
// LOG.warn("运行时间:" + (last - start));
|
||||||
|
//
|
||||||
|
// druidData.closeConn();
|
||||||
|
// hbaseTable.close();
|
||||||
|
// LOG.info("Druid 关闭连接");
|
||||||
|
//
|
||||||
|
// } catch (Exception e){
|
||||||
|
// e.printStackTrace();
|
||||||
|
// }
|
||||||
|
// System.exit(0);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// /**
|
||||||
|
// * 多线程baseline生成入口
|
||||||
|
// * @throws InterruptedException
|
||||||
|
// */
|
||||||
|
// private static void generateBaselinesThread() throws InterruptedException {
|
||||||
|
// int threadNum = Runtime.getRuntime().availableProcessors();
|
||||||
|
//
|
||||||
|
// ThreadFactory namedThreadFactory = new ThreadFactoryBuilder()
|
||||||
|
// .setNameFormat("baseline-demo-%d").build();
|
||||||
|
//
|
||||||
|
// // 创建线程池
|
||||||
|
// ThreadPoolExecutor executor = new ThreadPoolExecutor(
|
||||||
|
// threadNum,
|
||||||
|
// threadNum,
|
||||||
|
// 0L,
|
||||||
|
// TimeUnit.MILLISECONDS,
|
||||||
|
// new LinkedBlockingQueue<>(1024),
|
||||||
|
// namedThreadFactory,
|
||||||
|
// new ThreadPoolExecutor.AbortPolicy());
|
||||||
|
//
|
||||||
|
// // IP列表获取
|
||||||
|
// ArrayList<String> destinationIps = druidData.getServerIpList();
|
||||||
|
//
|
||||||
|
// LOG.info("共查询到服务端ip " +destinationIps.size() + " 个");
|
||||||
|
// LOG.info("Baseline batch 大小: " + ApplicationConfig.GENERATE_BATCH_SIZE);
|
||||||
|
//
|
||||||
|
// // 分批进行IP baseline生成和处理
|
||||||
|
// List<List<String>> batchIpLists = Lists.partition(destinationIps, ApplicationConfig.GENERATE_BATCH_SIZE);
|
||||||
|
// for (List<String> batchIps: batchIpLists){
|
||||||
|
// if(batchIps.size()>0){
|
||||||
|
// executor.execute(() -> generateBaselines(batchIps));
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// executor.shutdown();
|
||||||
|
// executor.awaitTermination(10L, TimeUnit.HOURS);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// /**
|
||||||
|
// * 批量生成IP baseline
|
||||||
|
// * @param ipList ip列表
|
||||||
|
// */
|
||||||
|
// public static void generateBaselines(List<String> ipList){
|
||||||
|
// druidData = DruidData.getInstance();
|
||||||
|
// batchDruidData = druidData.readFromDruid(ipList);
|
||||||
|
//
|
||||||
|
// List<Put> putList = new ArrayList<>();
|
||||||
|
// for(String attackType: attackTypeList){
|
||||||
|
// for(String ip: ipList){
|
||||||
|
// int[] ipBaseline = generateSingleIpBaseline(ip, attackType);
|
||||||
|
// if (ipBaseline!= null){
|
||||||
|
// putList = hbaseUtils.cachedInPut(putList, ip, ipBaseline, attackType, ApplicationConfig.BASELINE_METRIC_TYPE);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// try {
|
||||||
|
// hbaseTable.put(putList);
|
||||||
|
// LOG.info("Baseline 线程 " + Thread.currentThread().getId() + " 成功写入Baseline条数共计 " + putList.size());
|
||||||
|
// } catch (IOException e) {
|
||||||
|
// e.printStackTrace();
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// druidData.closeConn();
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// /**
|
||||||
|
// * 单ip baseline生成逻辑
|
||||||
|
// * @param ip ip
|
||||||
|
// * @param attackType 攻击类型
|
||||||
|
// * @return baseline序列,长度为 60/HISTORICAL_GRAD*24
|
||||||
|
// */
|
||||||
|
// private static int[] generateSingleIpBaseline(String ip, String attackType){
|
||||||
|
// // 查询
|
||||||
|
// List<Map<String, Object>> originSeries = druidData.getTimeSeriesData(batchDruidData, ip, attackType);
|
||||||
|
//
|
||||||
|
// if (originSeries.size()==0){
|
||||||
|
// return null;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// // 时间序列缺失值补0
|
||||||
|
// List<Map<String, Object>> completSeries = SeriesUtils.complementSeries(originSeries);
|
||||||
|
//
|
||||||
|
// int[] baselineArr = new int[BASELINE_POINT_NUM];
|
||||||
|
// List<Integer>series = completSeries.stream().map(
|
||||||
|
// i -> Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList());
|
||||||
|
//
|
||||||
|
// // 判断ip出现频率
|
||||||
|
// if(originSeries.size()/(float)completSeries.size()>ApplicationConfig.BASELINE_HISTORICAL_RATIO){
|
||||||
|
// // 高频率
|
||||||
|
// double percentile = StatUtils.percentile(series.stream().mapToDouble(Double::valueOf).toArray(),
|
||||||
|
// ApplicationConfig.BASELINE_SPARSE_FILL_PERCENTILE);
|
||||||
|
// Arrays.fill(baselineArr, (int)percentile);
|
||||||
|
// baselineArr = baselineFunction(series);
|
||||||
|
//
|
||||||
|
// } else {
|
||||||
|
// // 判断周期性
|
||||||
|
// if (SeriesUtils.isPeriod(series)){
|
||||||
|
// baselineArr = baselineFunction(series);
|
||||||
|
// } else {
|
||||||
|
// int ipPercentile = SeriesUtils.percentile(
|
||||||
|
// originSeries.stream().map(i ->
|
||||||
|
// Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList()),
|
||||||
|
// ApplicationConfig.BASELINE_RATIONAL_PERCENTILE);
|
||||||
|
// Arrays.fill(baselineArr, ipPercentile);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// return baselineArr;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// /**
|
||||||
|
// * baseline 生成算法
|
||||||
|
// * @param timeSeries 输入序列
|
||||||
|
// * @return 输出序列
|
||||||
|
// */
|
||||||
|
// private static int[] baselineFunction(List<Integer> timeSeries){
|
||||||
|
// int[] result;
|
||||||
|
// switch (ApplicationConfig.BASELINE_FUNCTION){
|
||||||
|
// case "KalmanFilter":
|
||||||
|
// KalmanFilter kalmanFilter = new KalmanFilter();
|
||||||
|
// kalmanFilter.forcast(timeSeries, BASELINE_POINT_NUM);
|
||||||
|
// result = kalmanFilter.getForecastSeries().stream().mapToInt(Integer::valueOf).toArray();
|
||||||
|
// break;
|
||||||
|
// default:
|
||||||
|
// result = timeSeries.subList(0, BASELINE_POINT_NUM).stream().mapToInt(Integer::valueOf).toArray();
|
||||||
|
// }
|
||||||
|
// return result;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// public static void main(String[] args) {
|
||||||
|
// perform();
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
//}
|
||||||
@@ -37,18 +37,36 @@ public class HBaseTest {
|
|||||||
Table table = conn.getTable(tableName);
|
Table table = conn.getTable(tableName);
|
||||||
|
|
||||||
|
|
||||||
DruidData druidData = DruidData.getInstance();
|
// DruidData druidData = DruidData.getInstance();
|
||||||
ArrayList<String> destinationIps = druidData.getServerIpList();
|
// ArrayList<String> destinationIps = druidData.getServerIpList();
|
||||||
|
List<String> ips = Arrays.asList(
|
||||||
|
"192.168.1.1",
|
||||||
|
"192.168.1.2",
|
||||||
|
"192.168.1.3",
|
||||||
|
"192.168.1.4",
|
||||||
|
"192.168.1.5",
|
||||||
|
"192.168.1.6",
|
||||||
|
"192.168.1.7",
|
||||||
|
"192.168.1.8",
|
||||||
|
"192.168.10.1",
|
||||||
|
"192.168.10.2",
|
||||||
|
"192.168.10.3",
|
||||||
|
"192.168.10.4",
|
||||||
|
"192.168.10.5",
|
||||||
|
"192.168.10.6",
|
||||||
|
"192.168.10.7",
|
||||||
|
"192.168.10.8"
|
||||||
|
);
|
||||||
|
|
||||||
for (String ip : destinationIps){
|
for (String ip : ips){
|
||||||
Get abcGet = new Get(Bytes.toBytes(ip));
|
Get abcGet = new Get(Bytes.toBytes(ip));
|
||||||
Result r = table.get(abcGet);
|
Result r = table.get(abcGet);
|
||||||
ArrayWritable w = new ArrayWritable(IntWritable.class);
|
ArrayWritable w = new ArrayWritable(IntWritable.class);
|
||||||
List<String> attackTypeList = Arrays.asList(
|
List<String> attackTypeList = Arrays.asList(
|
||||||
"TCP SYN Flood",
|
"TCP SYN Flood",
|
||||||
"ICMP Flood",
|
"ICMP Flood"
|
||||||
"UDP Flood",
|
// "UDP Flood",
|
||||||
"DNS Amplification"
|
// "DNS Amplification"
|
||||||
);
|
);
|
||||||
for (String attackType : attackTypeList){
|
for (String attackType : attackTypeList){
|
||||||
byte[] session_nums = r.getValue(Bytes.toBytes(attackType), Bytes.toBytes("session_num"));
|
byte[] session_nums = r.getValue(Bytes.toBytes(attackType), Bytes.toBytes("session_num"));
|
||||||
@@ -62,12 +80,35 @@ public class HBaseTest {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get abcGet = new Get(Bytes.toBytes("1.0.0.1"));
|
|
||||||
// Result r = table.get(abcGet);
|
|
||||||
// ArrayWritable w = new ArrayWritable(IntWritable.class);
|
// int[] arr = new int[144];
|
||||||
// w.readFields(new DataInputStream(new ByteArrayInputStream(r.getValue(Bytes.toBytes("TCP SYN Flood"), Bytes.toBytes("session_num")))));
|
// Arrays.fill(arr, 100);
|
||||||
// ArrayList<Integer> arr2 = fromWritable(w);
|
// List<String> ips = Arrays.asList(
|
||||||
// System.out.println(arr2.toString());
|
// "192.168.1.1",
|
||||||
|
// "192.168.1.2",
|
||||||
|
// "192.168.1.3",
|
||||||
|
// "192.168.1.4",
|
||||||
|
// "192.168.1.5",
|
||||||
|
// "192.168.1.6",
|
||||||
|
// "192.168.1.7",
|
||||||
|
// "192.168.1.8",
|
||||||
|
// "192.168.10.1",
|
||||||
|
// "192.168.10.2",
|
||||||
|
// "192.168.10.3",
|
||||||
|
// "192.168.10.4",
|
||||||
|
// "192.168.10.5",
|
||||||
|
// "192.168.10.6",
|
||||||
|
// "192.168.10.7",
|
||||||
|
// "192.168.10.8"
|
||||||
|
// );
|
||||||
|
//
|
||||||
|
// for (String ip : ips){
|
||||||
|
// Put put = new Put(Bytes.toBytes(ip));
|
||||||
|
// put.addColumn(Bytes.toBytes("ICMP Flood"),Bytes.toBytes("session_num"), WritableUtils.toByteArray(toWritable(arr)));
|
||||||
|
// table.put(put);
|
||||||
|
// }
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,13 @@
|
|||||||
package cn.mesalab.utils;
|
package cn.mesalab.utils;
|
||||||
|
|
||||||
|
import cn.mesalab.config.ApplicationConfig;
|
||||||
|
import cn.mesalab.dao.DruidData;
|
||||||
|
import com.google.common.collect.Maps;
|
||||||
import com.zdjizhi.utils.JsonMapper;
|
import com.zdjizhi.utils.JsonMapper;
|
||||||
|
import sun.net.util.URLUtil;
|
||||||
|
|
||||||
|
import java.net.URL;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author yjy
|
* @author yjy
|
||||||
@@ -8,7 +15,57 @@ import com.zdjizhi.utils.JsonMapper;
|
|||||||
* @date 2021/8/3 4:43 下午
|
* @date 2021/8/3 4:43 下午
|
||||||
*/
|
*/
|
||||||
public class HttpClientUtilsTest {
|
public class HttpClientUtilsTest {
|
||||||
|
private static HttpClientUtils httpClientUtils = new HttpClientUtils();
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
executeHttpPost("select * from top_server_ip_test_log limit 10");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Map<String, String> executeHttpPost(String sql){
|
||||||
|
String queryUrl = "http://192.168.44.12:8082/druid/v2/sql";
|
||||||
|
DruidQueryParam druidQueryParam = getDruidQueryParam(sql);
|
||||||
|
int socketTimeout = ApplicationConfig.HTTP_RESPONSE_TIMEOUT;
|
||||||
|
Map<String, String> stringStringMap = httpClientUtils.httpPost(queryUrl, JsonMapper.toJsonString(druidQueryParam), socketTimeout);
|
||||||
|
System.out.println(stringStringMap.toString());
|
||||||
|
return stringStringMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static DruidQueryParam getDruidQueryParam(String sql) {
|
||||||
|
DruidQueryParam druidQueryParam = new DruidQueryParam();
|
||||||
|
druidQueryParam.setQuery(sql);
|
||||||
|
druidQueryParam.getContext().put("skipEmptyBuckets", "true");
|
||||||
|
druidQueryParam.setResultFormat("object");
|
||||||
|
return druidQueryParam;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class DruidQueryParam {
|
||||||
|
private String query;
|
||||||
|
private Map<String, String> context = Maps.newHashMap();
|
||||||
|
private String resultFormat;
|
||||||
|
|
||||||
|
public String getQuery() {
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setQuery(String query) {
|
||||||
|
this.query = query;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<String, String> getContext() {
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setContext(Map<String, String> context) {
|
||||||
|
this.context = context;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getResultFormat() {
|
||||||
|
return resultFormat;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setResultFormat(String resultFormat) {
|
||||||
|
this.resultFormat = resultFormat;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user