提交Live Traffic Chart重构后初版代码。(TSG-14799)

This commit is contained in:
qidaijie
2023-05-06 15:08:21 +08:00
parent dbb6481635
commit ef57dda773
27 changed files with 2158 additions and 88 deletions

View File

@@ -0,0 +1,36 @@
package com.zdjizhi.utils.functions.filter;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSONPath;
import com.alibaba.fastjson2.JSONReader;
import com.zdjizhi.utils.StringUtil;
import org.apache.flink.api.common.functions.FilterFunction;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.functions.filter
* @Description:
* @date 2023/4/1919:02
*/
public class DataTypeFilter implements FilterFunction<String> {
private static final Log logger = LogFactory.get();
private static final String dataTypeExpr = "[?(@.name = 'traffic_application_protocol_stat')]";
@Override
public boolean filter(String message) throws Exception {
boolean protocolData = false;
try {
if (StringUtil.isNotBlank(message)) {
Object name = JSONPath.eval(message, dataTypeExpr);
if (name != null) {
protocolData = true;
}
}
} catch (RuntimeException e) {
logger.error("Parsing metric data is abnormal! The exception message is:" + e.getMessage());
}
return protocolData;
}
}

View File

@@ -0,0 +1,26 @@
package com.zdjizhi.utils.functions.keyby;
import com.alibaba.fastjson2.JSONObject;
import com.zdjizhi.common.pojo.AppProtocol;
import com.zdjizhi.common.pojo.Fields;
import com.zdjizhi.common.pojo.Tags;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import java.util.Map;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.functions
* @Description:
* @date 2021/7/2112:13
*/
public class DimensionKeyBy implements KeySelector<Tuple2<String, AppProtocol>, String> {
@Override
public String getKey(Tuple2<String, AppProtocol> value) throws Exception {
//以map拼接的key分组
return value.f0;
}
}

View File

@@ -0,0 +1,49 @@
package com.zdjizhi.utils.functions.map;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import com.zdjizhi.common.pojo.AppProtocol;
import com.zdjizhi.utils.StringUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.functions
* @Description:
* @date 2021/5/2715:01
*/
public class MetricsParseMap implements MapFunction<String, Tuple2<String, AppProtocol>> {
private static final Log logger = LogFactory.get();
@Override
@SuppressWarnings("unchecked")
public Tuple2<String, AppProtocol> map(String message) {
try {
JSONObject originalLog = JSON.parseObject(message);
JSONObject fieldsObject = JSONObject.parseObject(originalLog.getString("fields"));
JSONObject tagsObject = JSONObject.parseObject(originalLog.getString("tags"));
fieldsObject.putAll(tagsObject);
AppProtocol appProtocol = JSON.to(AppProtocol.class, fieldsObject);
String appFullPath = appProtocol.getApp_name();
if (StringUtil.isNotBlank(appFullPath)) {
String appName = appFullPath.substring(appFullPath.lastIndexOf(".") + 1);
String protocolLabel = appProtocol.getProtocol_stack_id();
appProtocol.setApp_name(appName);
appProtocol.setProtocol_stack_id(protocolLabel.concat(".").concat(appFullPath));
}
return new Tuple2<>(tagsObject.toJSONString(), appProtocol);
} catch (RuntimeException e) {
logger.error("An error occurred in the original log parsing reorganization,error message is:" + e);
return new Tuple2<>(null, null);
}
}
}

View File

@@ -0,0 +1,55 @@
package com.zdjizhi.utils.functions.map;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSONObject;
import com.alibaba.fastjson2.JSONWriter;
import com.zdjizhi.common.config.GlobalConfig;
import com.zdjizhi.common.pojo.AppProtocol;
import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.utils.general.FormatConverterUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.util.Collector;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.functions
* @Description:
* @date 2021/7/2114:52
*/
public class ResultFlatMap implements FlatMapFunction<AppProtocol, String> {
private static final Log logger = LogFactory.get();
@Override
@SuppressWarnings("unchecked")
public void flatMap(AppProtocol appProtocol, Collector<String> out) throws Exception {
try {
JSONObject tags = FormatConverterUtil.getTags(appProtocol);
JSONObject conversion = FormatConverterUtil.structureConversion(appProtocol);
String protocolStackId = tags.getString("protocol_stack_id");
out.collect(FormatConverterUtil.updateTagsData(conversion, tags));
tags.remove("app_name");
StringBuilder stringBuilder = new StringBuilder();
String[] protocolIds = protocolStackId.split(GlobalConfig.PROTOCOL_SPLITTER);
int protocolIdsNum = protocolIds.length;
for (int i = 0; i < protocolIdsNum - 1; i++) {
if (StringUtil.isBlank(stringBuilder.toString())) {
stringBuilder.append(protocolIds[i]);
tags.put("protocol_stack_id", stringBuilder.toString());
out.collect(FormatConverterUtil.updateTagsData(conversion, tags));
} else {
stringBuilder.append(".").append(protocolIds[i]);
tags.put("protocol_stack_id", stringBuilder.toString());
conversion.put("tags", tags);
out.collect(FormatConverterUtil.updateTagsData(conversion, tags));
}
}
} catch (RuntimeException e) {
logger.error("An exception occurred during parsing the result data,error message is:" + e);
e.printStackTrace();
}
}
}

View File

@@ -0,0 +1,33 @@
package com.zdjizhi.utils.functions.statistics;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.pojo.AppProtocol;
import com.zdjizhi.utils.general.MetricUtil;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple2;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.functions.statistics
* @Description:
* @date 2023/4/2314:02
*/
public class DispersionCountWindow implements ReduceFunction<Tuple2<String, AppProtocol>> {
private static final Log logger = LogFactory.get();
@Override
public Tuple2<String, AppProtocol> reduce(Tuple2<String, AppProtocol> value1, Tuple2<String, AppProtocol> value2) throws Exception {
try {
AppProtocol cacheData = value1.f1;
AppProtocol newData = value2.f1;
MetricUtil.statisticsMetrics(cacheData, newData);
return new Tuple2<>(value1.f0, cacheData);
} catch (RuntimeException e) {
logger.error("An exception occurred during incremental aggregation! The message is:" + e.getMessage());
return value1;
}
}
}

View File

@@ -0,0 +1,34 @@
package com.zdjizhi.utils.functions.statistics;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.pojo.AppProtocol;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.functions.statistics
* @Description:
* @date 2023/4/2314:43
*/
public class MergeCountWindow extends ProcessWindowFunction<Tuple2<String, AppProtocol>, AppProtocol, String, TimeWindow> {
private static final Log logger = LogFactory.get();
@Override
public void process(String windowKey, Context context, Iterable<Tuple2<String, AppProtocol>> input, Collector<AppProtocol> output) throws Exception {
try {
Long endTime = context.window().getEnd() / 1000;
for (Tuple2<String, AppProtocol> tuple : input) {
AppProtocol data = tuple.f1;
data.setTimestamp(endTime);
output.collect(data);
}
} catch (RuntimeException e) {
logger.error("An exception occurred in the process of full data aggregation! The message is:" + e.getMessage());
}
}
}