批量读取Druid

This commit is contained in:
yinjiangyi
2021-08-01 19:15:02 +08:00
parent f1e243ded0
commit ac8709248b
6 changed files with 240 additions and 5 deletions

File diff suppressed because one or more lines are too long

View File

@@ -6,6 +6,7 @@ import cn.mesalab.utils.DruidUtils;
import io.vavr.Tuple;
import io.vavr.Tuple2;
import org.apache.calcite.avatica.AvaticaConnection;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -15,6 +16,7 @@ import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -91,7 +93,39 @@ public class DruidData {
+ " = '" + attackType + "'"
+ " AND " + timeFilter;
// System.out.println("getTimeSeriesData:" + sql);
try{
ResultSet resultSet = DruidUtils.executeQuery(connection,sql);
ResultSetToListService service = new ResultSetToListServiceImp();
rsList = service.selectAll(resultSet);
} catch (Exception e){
e.printStackTrace();
}
return rsList;
}
public List<Map<String, Object>> getTimeSeriesData(List<Map<String, Object>> allData, String ip, String attackType){
List<Map<String, Object>> rsList = null;
try{
rsList = allData.stream().
filter(i->((i.get(ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME).equals(ip))
& (i.get(ApplicationConfig.DRUID_ATTACKTYPE_COLUMN_NAME).equals(attackType))))
.collect(Collectors.toList());
} catch (NullPointerException e){
}
return rsList;
}
public List<Map<String, Object>> readFromDruid(List<String> ipList){
List<Map<String, Object>> rsList = null;
ipList = ipList.stream().map( ip -> "\'"+ip+"\'").collect(Collectors.toList());
String ipString = "(" + StringUtils.join(ipList, ",").toString() + ")";
String sql = "SELECT "+ ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME
+ ", "+ ApplicationConfig.BASELINE_METRIC_TYPE
+ ", " + ApplicationConfig.DRUID_RECVTIME_COLUMN_NAME
+ " FROM " + ApplicationConfig.DRUID_TABLE
+ " WHERE " + ApplicationConfig.DRUID_SERVERIP_COLUMN_NAME
+ " IN " + ipString
+ " AND " + timeFilter;
try{
ResultSet resultSet = DruidUtils.executeQuery(connection,sql);
ResultSetToListService service = new ResultSetToListServiceImp();

View File

@@ -29,6 +29,8 @@ public class BaselineGeneration {
private static HbaseUtils hbaseUtils;
private static Table hbaseTable;
private static List<Map<String, Object>> batchDruidData = new ArrayList<>();
private static final Integer BASELINE_POINT_NUM = ApplicationConfig.BASELINE_RANGE_DAYS * 24 * (60/ApplicationConfig.HISTORICAL_GRAD);
public static void perform() {
@@ -106,9 +108,15 @@ public class BaselineGeneration {
Long startGenerateTime = System.currentTimeMillis();
druidData = DruidData.getInstance();
batchDruidData = druidData.readFromDruid(ipList);
List<Put> putList = new ArrayList<>();
for(String ip: ipList){
int[] ipBaseline = generateSingleIpBaseline(ip, attackType);
if (ipBaseline==null){
break;
}
putList = hbaseUtils.cachedInPut(putList, ip, ipBaseline, attackType, ApplicationConfig.BASELINE_METRIC_TYPE);
}
@@ -118,7 +126,7 @@ public class BaselineGeneration {
try {
hbaseTable.put(putList);
LOG.info("HBase 写入数据条数 " + ApplicationConfig.GENERATE_BATCH_NUM);
LOG.info("HBase 处理数据条数 " + ApplicationConfig.GENERATE_BATCH_NUM);
} catch (IOException e) {
e.printStackTrace();
}
@@ -130,7 +138,12 @@ public class BaselineGeneration {
private static int[] generateSingleIpBaseline(String ip, String attackType){
// 查询
Long startQuerySingleIPTime = System.currentTimeMillis();
List<Map<String, Object>> originSeries = druidData.getTimeSeriesData(ip, attackType);
List<Map<String, Object>> originSeries = druidData.getTimeSeriesData(batchDruidData, ip, attackType);
if (originSeries==null){
return null;
}
Long endQuerySingleIPTime = System.currentTimeMillis();
LOG.info("性能测试单个ip查询耗时——"+(endQuerySingleIPTime-startQuerySingleIPTime));

View File

@@ -56,5 +56,3 @@ log.write.count=10000
# FOR TEST
generate.batch.number=100