ignore
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -42,4 +42,5 @@ nbdist/
|
||||
log/
|
||||
logs/
|
||||
|
||||
*/META-INF/*
|
||||
*/META-INF/*
|
||||
*/test/*
|
||||
@@ -1,206 +0,0 @@
|
||||
//package cn.mesalab.service;
|
||||
//
|
||||
//import cn.mesalab.config.ApplicationConfig;
|
||||
//import cn.mesalab.dao.DruidData;
|
||||
//import cn.mesalab.service.algorithm.KalmanFilter;
|
||||
//import cn.mesalab.utils.HbaseUtils;
|
||||
//import cn.mesalab.utils.SeriesUtils;
|
||||
//import com.google.common.collect.Lists;
|
||||
//import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
//import org.apache.commons.math3.stat.StatUtils;
|
||||
//import org.apache.hadoop.hbase.client.Put;
|
||||
//import org.apache.hadoop.hbase.client.Table;
|
||||
//import org.slf4j.Logger;
|
||||
//import org.slf4j.LoggerFactory;
|
||||
//
|
||||
//import java.io.IOException;
|
||||
//import java.util.*;
|
||||
//import java.util.concurrent.*;
|
||||
//import java.util.stream.Collectors;
|
||||
//
|
||||
///**
|
||||
// * @author yjy
|
||||
// * @version 1.0
|
||||
// * baseline生成及写入
|
||||
// * @date 2021/7/23 5:38 下午
|
||||
// */
|
||||
//public class BaselineGeneration {
|
||||
// private static final Logger LOG = LoggerFactory.getLogger(BaselineGeneration.class);
|
||||
//
|
||||
// private static DruidData druidData;
|
||||
// private static HbaseUtils hbaseUtils;
|
||||
// private static Table hbaseTable;
|
||||
// private static List<Map<String, Object>> batchDruidData = new ArrayList<>();
|
||||
//
|
||||
// private static List<String> attackTypeList = Arrays.asList(
|
||||
// ApplicationConfig.DRUID_ATTACKTYPE_TCP_SYN_FLOOD,
|
||||
// ApplicationConfig.DRUID_ATTACKTYPE_ICMP_FLOOD,
|
||||
// ApplicationConfig.DRUID_ATTACKTYPE_UDP_FLOOD,
|
||||
// ApplicationConfig.DRUID_ATTACKTYPE_DNS_AMPL
|
||||
// );
|
||||
// private static final Integer BASELINE_POINT_NUM =
|
||||
// ApplicationConfig.BASELINE_RANGE_DAYS * 24 * (60/ApplicationConfig.HISTORICAL_GRAD);
|
||||
//
|
||||
// /**
|
||||
// * 程序执行
|
||||
// */
|
||||
// public static void perform() {
|
||||
// long start = System.currentTimeMillis();
|
||||
//
|
||||
// druidData = DruidData.getInstance();
|
||||
// hbaseUtils = HbaseUtils.getInstance();
|
||||
// hbaseTable = hbaseUtils.getHbaseTable();
|
||||
// LOG.info("Druid 成功建立连接");
|
||||
//
|
||||
// try{
|
||||
// // baseline生成并写入
|
||||
// generateBaselinesThread();
|
||||
//
|
||||
// long last = System.currentTimeMillis();
|
||||
// LOG.warn("运行时间:" + (last - start));
|
||||
//
|
||||
// druidData.closeConn();
|
||||
// hbaseTable.close();
|
||||
// LOG.info("Druid 关闭连接");
|
||||
//
|
||||
// } catch (Exception e){
|
||||
// e.printStackTrace();
|
||||
// }
|
||||
// System.exit(0);
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * 多线程baseline生成入口
|
||||
// * @throws InterruptedException
|
||||
// */
|
||||
// private static void generateBaselinesThread() throws InterruptedException {
|
||||
// int threadNum = Runtime.getRuntime().availableProcessors();
|
||||
//
|
||||
// ThreadFactory namedThreadFactory = new ThreadFactoryBuilder()
|
||||
// .setNameFormat("baseline-demo-%d").build();
|
||||
//
|
||||
// // 创建线程池
|
||||
// ThreadPoolExecutor executor = new ThreadPoolExecutor(
|
||||
// threadNum,
|
||||
// threadNum,
|
||||
// 0L,
|
||||
// TimeUnit.MILLISECONDS,
|
||||
// new LinkedBlockingQueue<>(1024),
|
||||
// namedThreadFactory,
|
||||
// new ThreadPoolExecutor.AbortPolicy());
|
||||
//
|
||||
// // IP列表获取
|
||||
// ArrayList<String> destinationIps = druidData.getServerIpList();
|
||||
//
|
||||
// LOG.info("共查询到服务端ip " +destinationIps.size() + " 个");
|
||||
// LOG.info("Baseline batch 大小: " + ApplicationConfig.GENERATE_BATCH_SIZE);
|
||||
//
|
||||
// // 分批进行IP baseline生成和处理
|
||||
// List<List<String>> batchIpLists = Lists.partition(destinationIps, ApplicationConfig.GENERATE_BATCH_SIZE);
|
||||
// for (List<String> batchIps: batchIpLists){
|
||||
// if(batchIps.size()>0){
|
||||
// executor.execute(() -> generateBaselines(batchIps));
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// executor.shutdown();
|
||||
// executor.awaitTermination(10L, TimeUnit.HOURS);
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * 批量生成IP baseline
|
||||
// * @param ipList ip列表
|
||||
// */
|
||||
// public static void generateBaselines(List<String> ipList){
|
||||
// druidData = DruidData.getInstance();
|
||||
// batchDruidData = druidData.readFromDruid(ipList);
|
||||
//
|
||||
// List<Put> putList = new ArrayList<>();
|
||||
// for(String attackType: attackTypeList){
|
||||
// for(String ip: ipList){
|
||||
// int[] ipBaseline = generateSingleIpBaseline(ip, attackType);
|
||||
// if (ipBaseline!= null){
|
||||
// putList = hbaseUtils.cachedInPut(putList, ip, ipBaseline, attackType, ApplicationConfig.BASELINE_METRIC_TYPE);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// try {
|
||||
// hbaseTable.put(putList);
|
||||
// LOG.info("Baseline 线程 " + Thread.currentThread().getId() + " 成功写入Baseline条数共计 " + putList.size());
|
||||
// } catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
// }
|
||||
//
|
||||
// druidData.closeConn();
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * 单ip baseline生成逻辑
|
||||
// * @param ip ip
|
||||
// * @param attackType 攻击类型
|
||||
// * @return baseline序列,长度为 60/HISTORICAL_GRAD*24
|
||||
// */
|
||||
// private static int[] generateSingleIpBaseline(String ip, String attackType){
|
||||
// // 查询
|
||||
// List<Map<String, Object>> originSeries = druidData.getTimeSeriesData(batchDruidData, ip, attackType);
|
||||
//
|
||||
// if (originSeries.size()==0){
|
||||
// return null;
|
||||
// }
|
||||
//
|
||||
// // 时间序列缺失值补0
|
||||
// List<Map<String, Object>> completSeries = SeriesUtils.complementSeries(originSeries);
|
||||
//
|
||||
// int[] baselineArr = new int[BASELINE_POINT_NUM];
|
||||
// List<Integer>series = completSeries.stream().map(
|
||||
// i -> Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList());
|
||||
//
|
||||
// // 判断ip出现频率
|
||||
// if(originSeries.size()/(float)completSeries.size()>ApplicationConfig.BASELINE_HISTORICAL_RATIO){
|
||||
// // 高频率
|
||||
// double percentile = StatUtils.percentile(series.stream().mapToDouble(Double::valueOf).toArray(),
|
||||
// ApplicationConfig.BASELINE_SPARSE_FILL_PERCENTILE);
|
||||
// Arrays.fill(baselineArr, (int)percentile);
|
||||
// baselineArr = baselineFunction(series);
|
||||
//
|
||||
// } else {
|
||||
// // 判断周期性
|
||||
// if (SeriesUtils.isPeriod(series)){
|
||||
// baselineArr = baselineFunction(series);
|
||||
// } else {
|
||||
// int ipPercentile = SeriesUtils.percentile(
|
||||
// originSeries.stream().map(i ->
|
||||
// Integer.valueOf(i.get(ApplicationConfig.BASELINE_METRIC_TYPE).toString())).collect(Collectors.toList()),
|
||||
// ApplicationConfig.BASELINE_RATIONAL_PERCENTILE);
|
||||
// Arrays.fill(baselineArr, ipPercentile);
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// return baselineArr;
|
||||
// }
|
||||
//
|
||||
// /**
|
||||
// * baseline 生成算法
|
||||
// * @param timeSeries 输入序列
|
||||
// * @return 输出序列
|
||||
// */
|
||||
// private static int[] baselineFunction(List<Integer> timeSeries){
|
||||
// int[] result;
|
||||
// switch (ApplicationConfig.BASELINE_FUNCTION){
|
||||
// case "KalmanFilter":
|
||||
// KalmanFilter kalmanFilter = new KalmanFilter();
|
||||
// kalmanFilter.forcast(timeSeries, BASELINE_POINT_NUM);
|
||||
// result = kalmanFilter.getForecastSeries().stream().mapToInt(Integer::valueOf).toArray();
|
||||
// break;
|
||||
// default:
|
||||
// result = timeSeries.subList(0, BASELINE_POINT_NUM).stream().mapToInt(Integer::valueOf).toArray();
|
||||
// }
|
||||
// return result;
|
||||
// }
|
||||
//
|
||||
// public static void main(String[] args) {
|
||||
// perform();
|
||||
// }
|
||||
//
|
||||
//}
|
||||
@@ -1,133 +0,0 @@
|
||||
package cn.mesalab.service;
|
||||
|
||||
/**
|
||||
* @author yjy
|
||||
* @version 1.0
|
||||
* @date 2021/8/3 11:21 上午
|
||||
*/
|
||||
|
||||
import cn.mesalab.config.ApplicationConfig;
|
||||
import cn.mesalab.dao.DruidData;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.client.*;
|
||||
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.io.ArrayWritable;
|
||||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.io.WritableUtils;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class HBaseTest {
|
||||
public static void main(String[] args) throws IOException {
|
||||
org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
|
||||
|
||||
config.set(HConstants.ZOOKEEPER_QUORUM, ApplicationConfig.HBASE_ZOOKEEPER_QUORUM);
|
||||
config.set(HConstants.ZOOKEEPER_CLIENT_PORT, ApplicationConfig.HBASE_ZOOKEEPER_CLIENT_PORT);
|
||||
|
||||
TableName tableName = TableName.valueOf(ApplicationConfig.HBASE_TABLE);
|
||||
Connection conn = ConnectionFactory.createConnection(config);
|
||||
Table table = conn.getTable(tableName);
|
||||
|
||||
|
||||
// DruidData druidData = DruidData.getInstance();
|
||||
// ArrayList<String> destinationIps = druidData.getServerIpList();
|
||||
List<String> ips = Arrays.asList(
|
||||
"192.168.1.1",
|
||||
"192.168.1.2",
|
||||
"192.168.1.3",
|
||||
"192.168.1.4",
|
||||
"192.168.1.5",
|
||||
"192.168.1.6",
|
||||
"192.168.1.7",
|
||||
"192.168.1.8",
|
||||
"192.168.10.1",
|
||||
"192.168.10.2",
|
||||
"192.168.10.3",
|
||||
"192.168.10.4",
|
||||
"192.168.10.5",
|
||||
"192.168.10.6",
|
||||
"192.168.10.7",
|
||||
"192.168.10.8"
|
||||
);
|
||||
|
||||
for (String ip : ips){
|
||||
Get abcGet = new Get(Bytes.toBytes(ip));
|
||||
Result r = table.get(abcGet);
|
||||
ArrayWritable w = new ArrayWritable(IntWritable.class);
|
||||
List<String> attackTypeList = Arrays.asList(
|
||||
"TCP SYN Flood",
|
||||
"ICMP Flood"
|
||||
// "UDP Flood",
|
||||
// "DNS Amplification"
|
||||
);
|
||||
for (String attackType : attackTypeList){
|
||||
byte[] session_nums = r.getValue(Bytes.toBytes(attackType), Bytes.toBytes("session_num"));
|
||||
if (session_nums==null){
|
||||
continue;
|
||||
}
|
||||
w.readFields(new DataInputStream(new ByteArrayInputStream(session_nums)));
|
||||
ArrayList<Integer> arr2 = fromWritable(w);
|
||||
System.out.println(ip + "-" + attackType + ": " + arr2.toString());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
// int[] arr = new int[144];
|
||||
// Arrays.fill(arr, 100);
|
||||
// List<String> ips = Arrays.asList(
|
||||
// "192.168.1.1",
|
||||
// "192.168.1.2",
|
||||
// "192.168.1.3",
|
||||
// "192.168.1.4",
|
||||
// "192.168.1.5",
|
||||
// "192.168.1.6",
|
||||
// "192.168.1.7",
|
||||
// "192.168.1.8",
|
||||
// "192.168.10.1",
|
||||
// "192.168.10.2",
|
||||
// "192.168.10.3",
|
||||
// "192.168.10.4",
|
||||
// "192.168.10.5",
|
||||
// "192.168.10.6",
|
||||
// "192.168.10.7",
|
||||
// "192.168.10.8"
|
||||
// );
|
||||
//
|
||||
// for (String ip : ips){
|
||||
// Put put = new Put(Bytes.toBytes(ip));
|
||||
// put.addColumn(Bytes.toBytes("ICMP Flood"),Bytes.toBytes("session_num"), WritableUtils.toByteArray(toWritable(arr)));
|
||||
// table.put(put);
|
||||
// }
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
public static Writable toWritable(int[] arr) {
|
||||
Writable[] content = new Writable[arr.length];
|
||||
for (int i = 0; i < content.length; i++) {
|
||||
content[i] = new IntWritable(arr[i]);
|
||||
}
|
||||
return new ArrayWritable(IntWritable.class, content);
|
||||
}
|
||||
|
||||
public static ArrayList<Integer> fromWritable(ArrayWritable writable) {
|
||||
Writable[] writables = ((ArrayWritable) writable).get();
|
||||
ArrayList<Integer> list = new ArrayList<Integer>(writables.length);
|
||||
for (Writable wrt : writables) {
|
||||
list.add(((IntWritable)wrt).get());
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user