Initial commit 单线程成功,并行报错

This commit is contained in:
yinjiangyi
2021-08-01 17:28:31 +08:00
commit bff209ac5a
46 changed files with 3994 additions and 0 deletions

View File

@@ -0,0 +1,53 @@
package cn.mesalab.config;
import cn.mesalab.utils.ConfigUtils;
/**
* @author yjy
* @version 1.0
* @date 2021/7/24 10:23 上午
*/
public class ApplicationConfig {
public static final String DRUID_URL= ConfigUtils.getStringProperty("druid.url");
public static final String DRUID_DRIVER = ConfigUtils.getStringProperty("druid.driver");
public static final String DRUID_TABLE = ConfigUtils.getStringProperty("druid.table");
public static final Integer DRUID_TIME_LIMIT_TYPE = ConfigUtils.getIntProperty("read.druid.time.limit.type");
public static final Long READ_DRUID_MAX_TIME = ConfigUtils.getLongProperty("read.druid.max.time");
public static final Long READ_DRUID_MIN_TIME = ConfigUtils.getLongProperty("read.druid.min.time");
public static final Integer READ_HISTORICAL_DAYS = ConfigUtils.getIntProperty("read.historical.days");
public static final Integer HISTORICAL_GRAD = ConfigUtils.getIntProperty("historical.grad");
public static final String TIME_FORMAT = ConfigUtils.getStringProperty("time.format");
public static final String BASELINE_METRIC_TYPE = ConfigUtils.getStringProperty("baseline.metric.type");
public static final String DRUID_ATTACKTYPE_TCP_SYN_FLOOD = ConfigUtils.getStringProperty("druid.attacktype.tcpsynflood");
public static final String DRUID_ATTACKTYPE_UDP_FLOOD = ConfigUtils.getStringProperty("druid.attacktype.udpflood");
public static final String DRUID_ATTACKTYPE_ICMP_FLOOD = ConfigUtils.getStringProperty("druid.attacktype.icmpflood");
public static final String DRUID_ATTACKTYPE_DNS_AMPL = ConfigUtils.getStringProperty("druid.attacktype.dnsamplification");
public static final String DRUID_SERVERIP_COLUMN_NAME = ConfigUtils.getStringProperty("druid.serverip.columnname");
public static final String DRUID_ATTACKTYPE_COLUMN_NAME = ConfigUtils.getStringProperty("druid.attacktype.columnname");
public static final String DRUID_RECVTIME_COLUMN_NAME = ConfigUtils.getStringProperty("druid.recvtime.columnname");
public static final float BASELINE_PERIOD_CORR_THRE = ConfigUtils.getFloatProperty("baseline.period.correlative.threshold");
public static final float BASELINE_HISTORICAL_RATIO = ConfigUtils.getFloatProperty("baseline.historical.ratio.threshold");
public static final float BASELINE_SPARSE_FILL_PERCENTILE = ConfigUtils.getFloatProperty("baseline.historical.sparse.fill.percentile");
public static final String BASELINE_FUNCTION = ConfigUtils.getStringProperty("baseline.function");
public static final Integer BASELINE_RANGE_DAYS = ConfigUtils.getIntProperty("baseline.range.days");
public static final float BASELINE_RATIONAL_PERCENTILE = ConfigUtils.getFloatProperty("baseline.rational.percentile");
public static final String HBASE_TABLE = ConfigUtils.getStringProperty("hbase.table");
public static final String HBASE_ZOOKEEPER_QUORUM= ConfigUtils.getStringProperty("hbase.zookeeper.quorum");
public static final String HBASE_ZOOKEEPER_CLIENT_PORT= ConfigUtils.getStringProperty("hbase.zookeeper.client.port");
public static final Double BASELINE_KALMAN_Q = ConfigUtils.getDoubleProperty("baseline.kalman.q");
public static final Double BASELINE_KALMAN_R = ConfigUtils.getDoubleProperty("baseline.kalman.r");
public static final Integer LOG_WRITE_COUNT = ConfigUtils.getIntProperty("log.write.count");
public static final Integer GENERATE_BATCH_NUM= ConfigUtils.getIntProperty("generate.batch.number");
}

View File

@@ -0,0 +1,134 @@
package cn.mesalab.dao;
import cn.mesalab.config.ApplicationConfig;
import cn.mesalab.dao.Impl.ResultSetToListServiceImp;
import cn.mesalab.utils.DruidUtils;
import io.vavr.Tuple;
import io.vavr.Tuple2;
import org.apache.calcite.avatica.AvaticaConnection;
import org.apache.calcite.avatica.AvaticaStatement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
/**
* @author yjy
* @version 1.0
* @date 2021/7/23 4:56 下午
*/
public class DruidData {
private static final Logger LOG = LoggerFactory.getLogger(DruidData.class);
private static DruidData druidData;
private static DruidUtils druidUtils;
private String timeFilter = ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
+ " >= MILLIS_TO_TIMESTAMP(" + getTimeLimit()._2
+ ") AND " + ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
+ " < MILLIS_TO_TIMESTAMP(" + getTimeLimit()._1 + ")";
static {
druidUtils = DruidUtils.getInstance();
}
public static DruidData getInstance() {
if (druidData == null){
druidData = new DruidData();
}
return druidData;
}
public ArrayList<String> getServerIpList(String attackType) {
ArrayList<String> serverIPs = new ArrayList<String>();
String sql = "SELECT distinct " + ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME
+ " FROM " + ApplicationConfig.DRUID_TABLE
+ " WHERE " + ApplicationConfig.DRUID_ATTACKTYPE_COLUMN_NAME + " = '" + attackType + "'"
+ " AND " + timeFilter
+ " LIMIT 20"; // FOR TEST
try{
ResultSet resultSet = druidUtils.executeQuery(sql);
while(resultSet.next()){
String ip = resultSet.getString(ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME);
serverIPs.add(ip);
}
} catch (Exception e){
e.printStackTrace();
}
return serverIPs;
}
public ArrayList<String> getServerIpList(String attackType, String test) {
ArrayList<String> serverIPs = new ArrayList<String>();
serverIPs.add("153.99.250.54");
return serverIPs;
}
public List<Map<String, Object>> getTimeSeriesData(String ip, String attackType){
List<Map<String, Object>> rsList = null;
String sql = "SELECT "+ ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME
+ ", "+ ApplicationConfig.BASELINE_METRIC_TYPE
+ ", " + ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
+ " FROM " + ApplicationConfig.DRUID_TABLE
+ " WHERE " + ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME
+ " = '" + ip + "'"
+ " AND " + ApplicationConfig.DRUID_ATTACKTYPE_COLUMN_NAME
+ " = '" + attackType + "'"
+ " AND " + timeFilter;
System.out.println("getTimeSeriesData:" + sql);
try{
ResultSet resultSet = druidUtils.executeQuery(sql);
ResultSetToListService service = new ResultSetToListServiceImp();
rsList = service.selectAll(resultSet);
} catch (Exception e){
e.printStackTrace();
}
return rsList;
}
public Tuple2<Long, Long> getTimeLimit(){
long maxTime = 0L;
long minTime = 0L;
switch(ApplicationConfig.DRUID_TIME_LIMIT_TYPE){
case 0:
maxTime = getCurrentDay();
minTime = getCurrentDay(-ApplicationConfig.READ_HISTORICAL_DAYS);
break;
case 1:
maxTime = ApplicationConfig.READ_DRUID_MAX_TIME;
minTime = ApplicationConfig.READ_DRUID_MIN_TIME;
break;
default:
LOG.warn("没有设置Druid数据读取方式");
}
return Tuple.of(maxTime, minTime);
}
private long getCurrentDay(int bias) {
Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.DAY_OF_YEAR, calendar.get(Calendar.DAY_OF_YEAR) + bias);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
return calendar.getTimeInMillis();
}
private long getCurrentDay(){
return getCurrentDay(0);
}
public void closeConn(){
druidUtils.closeConnection();
}
}

View File

@@ -0,0 +1,44 @@
package cn.mesalab.dao.Impl;
import cn.mesalab.dao.ResultSetToListService;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author yjy
* @version 1.0
* @date 2021/7/24 4:29 下午
*/
public class ResultSetToListServiceImp implements ResultSetToListService {
/**
* SELECT 查询记录以List结构返回每一个元素是一条记录
* 每条记录保存在Map<String, Object>里面String类型指字段名字Object对应字段值
*
* @param rs
* @return List<Map<String, Object>>
*/
@Override
public List<Map<String, Object>> selectAll(ResultSet rs) {
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
try {
ResultSetMetaData rmd = rs.getMetaData();
int columnCount = rmd.getColumnCount();
while (rs.next()) {
Map<String, Object> rowData = new HashMap<String, Object>();
for (int i = 1; i <= columnCount; ++i) {
rowData.put(rmd.getColumnName(i), rs.getObject(i));
}
list.add(rowData);
}
} catch (Exception ex) {
ex.printStackTrace();
}
return list;
}
}

View File

@@ -0,0 +1,24 @@
package cn.mesalab.dao;
import java.sql.ResultSet;
import java.util.List;
import java.util.Map;
/**
* @author yjy
* @version 1.0
* @date 2021/7/24 4:27 下午
*/
public interface ResultSetToListService {
/**
* SELECT * FROM websites
* 查询所有记录以List返回
* list对象的每一个元素都是一条记录
* 每条记录保存在Map<String, Object>里面String类型指字段名字Object对应字段值
*
* @param rs
* @return List<Map < String, Object>>
*/
public List<Map<String, Object>> selectAll(ResultSet rs);
}

View File

@@ -0,0 +1,14 @@
package cn.mesalab.main;
import cn.mesalab.service.BaselineGeneration;
/**
* @author yjy
* @version 1.0
* @date 2021/7/23 5:34 下午
*/
public class BaselineApplication {
public static void main(String[] args) {
BaselineGeneration.perform();
}
}

View File

@@ -0,0 +1,174 @@
package cn.mesalab.service;
import cn.mesalab.config.ApplicationConfig;
import cn.mesalab.dao.DruidData;
import cn.mesalab.service.BaselineService.KalmanFilter;
import cn.mesalab.utils.HbaseUtils;
import cn.mesalab.utils.SeriesUtils;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.commons.math3.stat.StatUtils;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.*;
import java.util.stream.Collectors;
/**
* @author yjy
* @version 1.0
* @date 2021/7/23 5:38 下午
*/
public class BaselineGeneration {
private static final Logger LOG = LoggerFactory.getLogger(BaselineGeneration.class);
private static DruidData druidData;
private static HbaseUtils hbaseUtils;
private static Table hbaseTable;
private static final Integer BASELINE_POINT_NUM = ApplicationConfig.BASELINE_RANGE_DAYS * 24 * (60/ApplicationConfig.HISTORICAL_GRAD);
public static void perform() {
long start = System.currentTimeMillis();
druidData = DruidData.getInstance();
hbaseUtils = HbaseUtils.getInstance();
hbaseTable = hbaseUtils.getHbaseTable();
LOG.info("Druid 成功建立连接");
try{
generateBaselinesThread(ApplicationConfig.DRUID_ATTACKTYPE_TCP_SYN_FLOOD);
//generateBaselines(ApplicationConfig.DRUID_ATTACKTYPE_UDP_FLOOD);
//generateBaselines(ApplicationConfig.DRUID_ATTACKTYPE_ICMP_FLOOD);
//generateBaselines(ApplicationConfig.DRUID_ATTACKTYPE_DNS_AMPL);
long last = System.currentTimeMillis();
LOG.warn("运行时间:" + (last - start));
druidData.closeConn();
hbaseTable.close();
LOG.info("Druid 关闭连接");
} catch (Exception e){
e.printStackTrace();
}
System.exit(0);
}
private static void generateBaselinesThread(String attackType) throws InterruptedException {
int threadNum = Runtime.getRuntime().availableProcessors();
ThreadFactory namedThreadFactory = new ThreadFactoryBuilder()
.setNameFormat(attackType+"-baseline-demo-%d").build();
// 创建线程池
ThreadPoolExecutor executor = new ThreadPoolExecutor(
threadNum,
threadNum,
0L,
TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<>(1024),
namedThreadFactory,
new ThreadPoolExecutor.AbortPolicy());
// baseline 生成及写入
ArrayList<String> destinationIps = druidData.getServerIpList(attackType);
LOG.info("查询到服务端ip共 " +destinationIps.size() + "");
int batchCount = destinationIps.size() / ApplicationConfig.GENERATE_BATCH_NUM;
for (int batchCurrent = 0; batchCurrent <batchCount; batchCurrent++){
List<String> batchIps = destinationIps.subList(batchCurrent*ApplicationConfig.GENERATE_BATCH_NUM,
(batchCurrent+1)*ApplicationConfig.GENERATE_BATCH_NUM);
executor.execute(() -> generateBaselines(batchIps, attackType));
}
executor.shutdown();
executor.awaitTermination(10L, TimeUnit.SECONDS);
LOG.info("BaselineGeneration 完成:" + attackType);
LOG.info("BaselineGeneration 共写入数据条数:" + destinationIps.size());
}
static void generateBaselines(String attackType){
ArrayList<String> destinationIps = druidData.getServerIpList(attackType);
generateBaselines(destinationIps, attackType);
LOG.info("BaselineGeneration 完成:" + attackType);
LOG.info("BaselineGeneration 共写入数据条数:" + destinationIps.size());
}
public static void generateBaselines(List<String> ipList, String attackType){
List<Put> putList = new ArrayList<>();
for(String ip: ipList){
int[] ipBaseline = generateSingleIpBaseline(ip, attackType);
putList = hbaseUtils.cachedInPut(putList, ip, ipBaseline, attackType, ApplicationConfig.BASELINE_METRIC_TYPE);
}
try {
hbaseTable.put(putList);
LOG.info("HBase 写入数据条数 " + ApplicationConfig.GENERATE_BATCH_NUM);
} catch (IOException e) {
e.printStackTrace();
}
}
private static int[] generateSingleIpBaseline(String ip, String attackType){
// 查询
List<Map<String, Object>> originSeries = druidData.getTimeSeriesData(ip, attackType);
// 时间序列缺失值补0
System.out.println("当前线程id"+Thread.currentThread().getId());
System.out.println("origin 大小"+originSeries.size());
List<Map<String, Object>> completSeries = SeriesUtils.complementSeries(originSeries);
int[] baselineArr = new int[completSeries.size()];
List<Integer>series = completSeries.stream().map(
i -> Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList());
// 判断ip出现频率
if(originSeries.size()/(float)completSeries.size()>ApplicationConfig.BASELINE_HISTORICAL_RATIO){
// 低频率
double percentile = StatUtils.percentile(series.stream().mapToDouble(Double::valueOf).toArray(),
ApplicationConfig.BASELINE_SPARSE_FILL_PERCENTILE);
Arrays.fill(baselineArr, (int)percentile);
baselineArr = baselineFunction(series);
} else {
// 判断周期性
if (SeriesUtils.isPeriod(series)){
baselineArr = baselineFunction(series);
} else {
int ipPercentile = SeriesUtils.percentile(
originSeries.stream().map(i ->
Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList()),
ApplicationConfig.BASELINE_RATIONAL_PERCENTILE);
Arrays.fill(baselineArr, ipPercentile);
}
}
System.out.println(ip);
System.out.println(Arrays.toString(baselineArr));
return baselineArr;
}
private static int[] baselineFunction(List<Integer> timeSeries){
int[] result;
switch (ApplicationConfig.BASELINE_FUNCTION){
case "KalmanFilter":
KalmanFilter kalmanFilter = new KalmanFilter();
kalmanFilter.forcast(timeSeries, BASELINE_POINT_NUM);
result = kalmanFilter.getForecastSeries().stream().mapToInt(Integer::valueOf).toArray();
break;
default:
result = timeSeries.subList(0, BASELINE_POINT_NUM).stream().mapToInt(Integer::valueOf).toArray();
}
return result;
}
public static void main(String[] args) {
perform();
}
}

View File

@@ -0,0 +1,92 @@
package cn.mesalab.service.BaselineService;
import cn.mesalab.config.ApplicationConfig;
import java.util.ArrayList;
import java.util.List;
/**
* @author yjy
* @version 1.0
* @date 2021/7/25 1:42 下午
*/
public class KalmanFilter {
/**Kalman Filter*/
private Integer predict;
private Integer current;
private Integer estimate;
private double pdelt;
private double mdelt;
private double Gauss;
private double kalmanGain;
private final static double Q = ApplicationConfig.BASELINE_KALMAN_Q;
private final static double R = ApplicationConfig.BASELINE_KALMAN_R;
public KalmanFilter() {
initial();
}
public void initial(){
pdelt = 1;
mdelt = 1;
}
private ArrayList<Integer> smoothSeries;
private ArrayList<Integer> forecastSeries;
public Integer calSingleKalPoint(Integer oldValue, Integer value){
//第一个估计值
predict = oldValue;
current = value;
//高斯噪声方差
Gauss = Math.sqrt(pdelt * pdelt + mdelt * mdelt) + Q;
//估计方差
kalmanGain = Math.sqrt((Gauss * Gauss)/(Gauss * Gauss + pdelt * pdelt)) + R;
//估计值
estimate = (int) (kalmanGain * (current - predict) + predict);
//新的估计方差
mdelt = Math.sqrt((1-kalmanGain) * Gauss * Gauss);
return estimate;
}
public void forcast(List<Integer> historicalSeries, Integer length){
// 初始值计算
int oldvalue = (historicalSeries.stream().mapToInt(Integer::intValue).sum())/historicalSeries.size();
// 滤波
smoothSeries = new ArrayList<Integer>();
for(int i = 0; i < historicalSeries.size(); i++){
int value = historicalSeries.get(i);
oldvalue = calSingleKalPoint(oldvalue,value);
smoothSeries.add(oldvalue);
}
forecastSeries = new ArrayList<>();
Integer partitonNum = historicalSeries.size()/length;
for(int i = 0; i<length; i++){
long sum = 0;
for (int period=0; period<partitonNum; period++){
sum += smoothSeries.get(length*period+i);
}
forecastSeries.add((int)sum/partitonNum);
}
}
public ArrayList<Integer> getSmoothSeries() {
return smoothSeries;
}
public ArrayList<Integer> getAllRangeSeries() {
ArrayList<Integer> results = new ArrayList<>();
results.addAll(smoothSeries);
results.addAll(forecastSeries);
return results;
}
public ArrayList<Integer> getForecastSeries() {
return forecastSeries;
}
}

View File

@@ -0,0 +1,154 @@
package cn.mesalab.utils;
import com.google.common.collect.Lists;
import org.springframework.core.task.AsyncTaskExecutor;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
/**
* 〈一句话功能简述〉:
* 〈并发工具〉
*
* @create 2020-05-28
* @since 1.0.0
*/
public class ConcurrentUtils {
/**
* @Description
* 并发执行工具类
*
* @param batchProcessNum 并发执行的个数
* @param executor 执行需要的线程池
* @param keys 批量执行的key
* @param kvConvert 从key生成value
* @param valueProcessor 对value的后置处理
* @param exceptionHook 通过key获取value时发生异常自定义处理
* @return void
**/
public static <K, V> void concurrentProcess(int batchProcessNum, AsyncTaskExecutor executor, K[] keys,
Function<K, V> kvConvert,
ValueProcessor<K, V> valueProcessor,
ExceptionHook<K> exceptionHook ){
AtomicInteger index = new AtomicInteger(1);
List<Future> futureList = Lists.newArrayListWithExpectedSize(batchProcessNum);
final int length = keys.length;
System.out.println("total jobs size:{}" + length);
for (int i = 0; i < batchProcessNum; i++) {
int finalI = i;
futureList.add(executor.submit(() -> {
System.out.println("batch process start thread num:{}"+ finalI);
int currentIndex;
while (length >= (currentIndex = index.getAndIncrement())) {
System.out.println("current job index:"+currentIndex+" of "+ length);
K key = keys[currentIndex-1];
try {
valueProcessor.process(key, kvConvert.apply(key));
} catch (Exception e) {
exceptionHook.process(key, e);
}
}
System.out.println("batch process end thread num:{}"+finalI);
}));
}
waitFutureFinished(futureList);
}
public static <K, V> void concurrentProcess(int batchProcessNum, AsyncTaskExecutor executor, K[] keys,
Function<K, V> kvConvert,
ValueProcessor<K, V> valueProcessor) {
concurrentProcess(batchProcessNum, executor, keys, kvConvert, valueProcessor,
(key,e) -> System.out.println("通过key:"+ key +" 获取value异常, e:"+e));
}
/**
* @Description
* 并发执行工具类
*
* @param batchProcessNum 并发执行的个数
* @param executor 执行需要的线程池
* @param keys 批量执行的key
* @param kvConvert 从key生成value
* @return Collection<V> 返回value的集合
**/
public static <K, V> Collection<V> concurrentGet(int batchProcessNum, AsyncTaskExecutor executor, K[] keys,
Function<K, V> kvConvert) {
List<V> rt = Lists.newArrayListWithCapacity(keys.length);
concurrentProcess(batchProcessNum, executor, keys, kvConvert, (k, v) -> {
if (v == null) {
System.out.println("key:{} apply value is null");
return;
}
rt.add(v);
});
return rt;
}
public static void waitFutureFinished(List<Future> unfinishedFuture, boolean ignoreExcetion) {
boolean interrupt = false;
while (!unfinishedFuture.isEmpty()) {
Iterator<Future> iterator = unfinishedFuture.iterator();
while (iterator.hasNext()) {
Future next = iterator.next();
if (next.isDone()) {
try {
next.get();
} catch (InterruptedException | ExecutionException e) {
System.out.println("执行异常, e:"+e);
if (!ignoreExcetion) {
interrupt = true;
break;
}
}
iterator.remove();
}
}
if (interrupt) {
break;
}
sleep();
}
if (interrupt) {
Iterator<Future> iterator = unfinishedFuture.iterator();
while (iterator.hasNext()) {
Future next = iterator.next();
if (next.isDone()) {
next.cancel(true);
}
}
throw new RuntimeException("任务异常终止");
}
}
public static void waitFutureFinished(List<Future> unfinishedFuture) {
waitFutureFinished(unfinishedFuture, false);
}
public static void sleep() {
sleep(5000);
}
public static void sleep(long millis) {
try {
Thread.sleep(millis);
} catch (InterruptedException e) {
System.out.println("sleep error, e:" + e);
}
}
@FunctionalInterface
public interface ValueProcessor<K, V> {
void process(K key, V value);
}
@FunctionalInterface
public interface ExceptionHook<K>{
void process(K key, Exception e);
}
}

View File

@@ -0,0 +1,45 @@
package cn.mesalab.utils;
import org.apache.log4j.Logger;
import java.util.Properties;
public class ConfigUtils {
private static final Logger LOG = Logger.getLogger(ConfigUtils.class);
private static Properties propCommon = new Properties();
public static String getStringProperty(String key) {
return propCommon.getProperty(key);
}
public static Float getFloatProperty(String key) {
return Float.parseFloat(propCommon.getProperty(key));
}
public static Integer getIntProperty(String key) {
return Integer.parseInt(propCommon.getProperty(key));
}
public static Long getLongProperty(String key) {
return Long.parseLong(propCommon.getProperty(key));
}
public static Double getDoubleProperty(String key) {
return Double.parseDouble(propCommon.getProperty(key));
}
public static Boolean getBooleanProperty(String key) {
return "true".equals(propCommon.getProperty(key).toLowerCase().trim());
}
static {
try {
propCommon.load(ConfigUtils.class.getClassLoader().getResourceAsStream("application.properties"));
} catch (Exception e) {
propCommon = null;
LOG.error("配置加载失败");
}
}
}

View File

@@ -0,0 +1,77 @@
package cn.mesalab.utils;
import cn.mesalab.config.ApplicationConfig;
import cn.mesalab.service.BaselineGeneration;
import org.apache.calcite.avatica.AvaticaConnection;
import org.apache.calcite.avatica.AvaticaStatement;
import org.jfree.util.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Properties;
/**
* @author yjy
* @version 1.0
* @date 2021/7/23 4:50 下午
*/
public class DruidUtils {
private static final Logger LOG = LoggerFactory.getLogger(DruidUtils.class);
private static DruidUtils druidUtils;
private static final String DRUID_URL = ApplicationConfig.DRUID_URL;
private static AvaticaConnection connection;
private static AvaticaStatement statement;
public DruidUtils() throws SQLException {
Properties properties = new Properties();
connection = (AvaticaConnection) DriverManager.getConnection(DRUID_URL, properties);
statement = connection.createStatement();
}
public static DruidUtils getInstance() {
if (connection==null){
try{
druidUtils = new DruidUtils();
} catch (SQLException e){
LOG.error("Druid 建立连接失败!");
e.printStackTrace();
}
}
return druidUtils;
}
private static AvaticaConnection getConn() throws SQLException {
Properties properties = new Properties();
AvaticaConnection connection = (AvaticaConnection) DriverManager.getConnection(DRUID_URL, properties);
return connection;
}
public void closeConnection() {
if(connection != null){
try{
connection.close();
} catch (SQLException e){
LOG.error("Druid 关闭连接失败!");
e.printStackTrace();
}
}
}
public ResultSet executeQuery (String sql) throws SQLException{
System.out.println("executeQuery:"+sql);
ResultSet resultSet = statement.executeQuery(sql);
return resultSet;
}
public AvaticaConnection getConnection() {
return connection;
}
public AvaticaStatement getStatement() {
return statement;
}
}

View File

@@ -0,0 +1 @@
package cn.mesalab.utils;

View File

@@ -0,0 +1,214 @@
package cn.mesalab.utils;
import cn.mesalab.config.ApplicationConfig;
import cn.mesalab.dao.DruidData;
import cn.mesalab.service.BaselineGeneration;
import com.google.common.collect.Lists;
import org.jfree.util.Log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.FileReader;
import java.lang.reflect.Array;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.stream.Stream;
/**
* @author joy
*/
public class SeriesUtils {
private static final Logger LOG = LoggerFactory.getLogger(SeriesUtils.class);
private static DruidData druidData = new DruidData();
public static List<Map<String, Object>> readCsvToList(String filePath) {
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
String line;
try (BufferedReader br = new BufferedReader(new FileReader(filePath))) {
br.readLine();
while ((line = br.readLine()) != null) {
List<String> column = Arrays.asList(line.split(","));
// 保存记录中的每个<字段名-字段值>
Map<String, Object> rowData = new HashMap<String, Object>();
rowData.put("__time", column.get(0));
rowData.put(ApplicationConfig.BASELINE_METRIC_TYPE, Integer.valueOf(column.get(1)));
list.add(rowData);
}
} catch (Exception e) {
e.printStackTrace();
}
return list;
}
/**
* 时序数据补齐
*/
public static List<Map<String, Object>> complementSeries(List<Map<String, Object>> originSeries){
LocalDateTime startTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(druidData.getTimeLimit()._2), TimeZone
.getDefault().toZoneId());
LocalDateTime endTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(druidData.getTimeLimit()._1), TimeZone
.getDefault().toZoneId());
List<String> dateList = completionDate(startTime, endTime);
// 补全后的结果
List<Map<String, Object>> result = new ArrayList<>();
boolean dbDateExist = false;
for (String date : dateList) {
//table为数据库查询出来的对象列表结构为List<Map<String, Object>>
for (Map<String, Object> row : originSeries) {
if (row.get(ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME).toString().substring(0,19).equals(date)) {
//集合已包含该日期
dbDateExist = true;
result.add(row);
break;
}
}
//添加补全的数据到最后结果列表
if (!dbDateExist) {
Map<String, Object> temp = new HashMap<>(2);
temp.put(ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME, date);
temp.put(ApplicationConfig.BASELINE_METRIC_TYPE, 0);
result.add(temp);
}
dbDateExist = false;
}
return result;
}
private static List<String> completionDate(LocalDateTime startTime, LocalDateTime endTime) {
//日期格式化
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(ApplicationConfig.TIME_FORMAT);
List<String> timeList = new ArrayList<>();
//遍历给定的日期期间的每一天
for (int i = 0; !Duration.between(startTime.plusMinutes(i+1), endTime).isNegative(); i+= ApplicationConfig.HISTORICAL_GRAD) {
//添加日期
timeList.add(startTime.plusMinutes(i).format(formatter));
}
return timeList;
}
/**
* 判断是否存在以天为单位的周期特征
* @param historicalSeries
* @return
*/
public static Boolean isPeriod(List<Integer> historicalSeries){
Boolean result = true;
List<List<Integer>> partitions = Lists.partition(historicalSeries, 24*60/ApplicationConfig.HISTORICAL_GRAD);
List<Integer> aggregatedPart = Arrays.asList();
try{
aggregatedPart = columnAverage(partitions.subList(0, ApplicationConfig.READ_HISTORICAL_DAYS-1));
} catch (IndexOutOfBoundsException e){
Log.error("历史");
}
// Pearson corrcoef
double pearsonCorrelationScore = getPearsonCorrelationScore(aggregatedPart.stream().mapToInt(Integer::valueOf).toArray(),
partitions.get(partitions.size() - 1).stream().mapToInt(Integer::valueOf).toArray());
if (pearsonCorrelationScore < ApplicationConfig.BASELINE_PERIOD_CORR_THRE){
result=false;
}
return result;
}
public static double getPearsonCorrelationScore(int[] xData, int[] yData) {
if (xData.length != yData.length) {
Log.error("Pearson CorrelationScore 数组长度不相等!");
}
int xMeans;
int yMeans;
double numerator = 0;
double denominator = 0;
double result = 0;
// 拿到两个数据的平均值
xMeans = (int) getMeans(xData);
yMeans = (int) getMeans(yData);
// 计算皮尔逊系数的分子
numerator = generateNumerator(xData, xMeans, yData, yMeans);
// 计算皮尔逊系数的分母
denominator = generateDenomiator(xData, xMeans, yData, yMeans);
// 计算皮尔逊系数
if(denominator>0) {
result = numerator / denominator;
}
//FOR TEST
System.out.println(result);
return result;
}
private static int generateNumerator(int[] xData, int xMeans, int[] yData, int yMeans) {
int numerator = 0;
for (int i = 0; i < xData.length; i++) {
numerator += (xData[i] - xMeans) * (yData[i] - yMeans);
}
return numerator;
}
private static double generateDenomiator(int[] xData, int xMeans, int[] yData, int yMeans) {
double xSum = 0.0;
for (int i = 0; i < xData.length; i++) {
xSum += (xData[i] - xMeans) * (xData[i] - xMeans);
}
double ySum = 0.0;
for (int i = 0; i < yData.length; i++) {
ySum += (yData[i] - yMeans) * (yData[i] - yMeans);
}
return Math.sqrt(xSum) * Math.sqrt(ySum);
}
private static double getMeans(int[] datas) {
double sum = 0.0;
for (int i = 0; i < datas.length; i++) {
sum += datas[i];
}
return sum / datas.length;
}
public static List<Integer> columnAverage(List<List<Integer>> list){
ArrayList<Integer> averages = new ArrayList<>();
for(int i=0; i<list.get(0).size(); i++){
int columnSum = 0;
for(int j = 0; j< list.size(); j++){
columnSum += list.get(j).get(i);
}
averages.add(columnSum / list.size());
}
return averages;
}
public static int percentile(List<Integer> latencies, double percentile) {
Collections.sort(latencies);
int index = (int) Math.ceil(percentile * latencies.size());
return latencies.get(index-1);
}
public static void main(String[] args) {
List<Integer> test = Arrays.asList(
1,2,3,4,5,
1,2,3,4,5,
1,2,3,4,5,
1,2,3,4,5,
1,2,3,4,5,
1,2,3,4,5,
1,2,3,4,5);
System.out.println(columnAverage(Lists.partition(test, 5)));
}
}