优化程序处理逻辑,以Metrics结构输出结果。(TSG-14799)
This commit is contained in:
@@ -1,14 +1,9 @@
|
||||
package com.zdjizhi.utils.functions.keyby;
|
||||
|
||||
import com.alibaba.fastjson2.JSONObject;
|
||||
import com.zdjizhi.common.pojo.AppProtocol;
|
||||
import com.zdjizhi.common.pojo.Fields;
|
||||
import com.zdjizhi.common.pojo.Tags;
|
||||
import org.apache.flink.api.java.functions.KeySelector;
|
||||
import org.apache.flink.api.java.tuple.Tuple2;
|
||||
import org.apache.flink.api.java.tuple.Tuple3;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @author qidaijie
|
||||
@@ -16,11 +11,11 @@ import java.util.Map;
|
||||
* @Description:
|
||||
* @date 2021/7/2112:13
|
||||
*/
|
||||
public class DimensionKeyBy implements KeySelector<Tuple2<String, AppProtocol>, String> {
|
||||
public class DimensionKeyBy implements KeySelector<Tuple2<Tags, Fields>, String> {
|
||||
|
||||
@Override
|
||||
public String getKey(Tuple2<String, AppProtocol> value) throws Exception {
|
||||
public String getKey(Tuple2<Tags, Fields> value) throws Exception {
|
||||
//以map拼接的key分组
|
||||
return value.f0;
|
||||
return value.f0.toString();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,8 @@ import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.alibaba.fastjson2.JSON;
|
||||
import com.alibaba.fastjson2.JSONObject;
|
||||
import com.zdjizhi.common.pojo.AppProtocol;
|
||||
import com.zdjizhi.common.pojo.Fields;
|
||||
import com.zdjizhi.common.pojo.Tags;
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
import org.apache.flink.api.common.functions.MapFunction;
|
||||
import org.apache.flink.api.java.tuple.Tuple2;
|
||||
@@ -16,30 +17,26 @@ import org.apache.flink.api.java.tuple.Tuple2;
|
||||
* @Description:
|
||||
* @date 2021/5/2715:01
|
||||
*/
|
||||
public class MetricsParseMap implements MapFunction<String, Tuple2<String, AppProtocol>> {
|
||||
public class MetricsParseMap implements MapFunction<String, Tuple2<Tags, Fields>> {
|
||||
private static final Log logger = LogFactory.get();
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Tuple2<String, AppProtocol> map(String message) {
|
||||
public Tuple2<Tags, Fields> map(String message) {
|
||||
try {
|
||||
JSONObject originalLog = JSON.parseObject(message);
|
||||
JSONObject fieldsObject = JSONObject.parseObject(originalLog.getString("fields"));
|
||||
JSONObject tagsObject = JSONObject.parseObject(originalLog.getString("tags"));
|
||||
fieldsObject.putAll(tagsObject);
|
||||
Fields fields = JSONObject.parseObject(originalLog.getString("fields"), Fields.class);
|
||||
Tags tags = JSONObject.parseObject(originalLog.getString("tags"), Tags.class);
|
||||
|
||||
AppProtocol appProtocol = JSON.to(AppProtocol.class, fieldsObject);
|
||||
|
||||
String appFullPath = appProtocol.getApp_name();
|
||||
String appFullPath = tags.getApp_name();
|
||||
if (StringUtil.isNotBlank(appFullPath)) {
|
||||
String appName = appFullPath.substring(appFullPath.lastIndexOf(".") + 1);
|
||||
String protocolLabel = appProtocol.getProtocol_stack_id();
|
||||
String protocolLabel = tags.getProtocol_stack_id();
|
||||
|
||||
appProtocol.setApp_name(appName);
|
||||
appProtocol.setProtocol_stack_id(protocolLabel.concat(".").concat(appFullPath));
|
||||
tags.setApp_name(appName);
|
||||
tags.setProtocol_stack_id(protocolLabel.concat(".").concat(appFullPath));
|
||||
}
|
||||
|
||||
return new Tuple2<>(tagsObject.toJSONString(), appProtocol);
|
||||
return new Tuple2<>(tags, fields);
|
||||
} catch (RuntimeException e) {
|
||||
logger.error("An error occurred in the original log parsing reorganization,error message is:" + e);
|
||||
return new Tuple2<>(null, null);
|
||||
|
||||
@@ -5,9 +5,9 @@ import cn.hutool.log.LogFactory;
|
||||
import com.alibaba.fastjson2.JSONObject;
|
||||
import com.alibaba.fastjson2.JSONWriter;
|
||||
import com.zdjizhi.common.config.GlobalConfig;
|
||||
import com.zdjizhi.common.pojo.AppProtocol;
|
||||
import com.zdjizhi.common.pojo.Metrics;
|
||||
import com.zdjizhi.common.pojo.Tags;
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
import com.zdjizhi.utils.general.FormatConverterUtil;
|
||||
import org.apache.flink.api.common.functions.FlatMapFunction;
|
||||
import org.apache.flink.util.Collector;
|
||||
|
||||
@@ -17,20 +17,16 @@ import org.apache.flink.util.Collector;
|
||||
* @Description:
|
||||
* @date 2021/7/2114:52
|
||||
*/
|
||||
public class ResultFlatMap implements FlatMapFunction<AppProtocol, String> {
|
||||
public class ResultFlatMap implements FlatMapFunction<Metrics, String> {
|
||||
private static final Log logger = LogFactory.get();
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void flatMap(AppProtocol appProtocol, Collector<String> out) throws Exception {
|
||||
public void flatMap(Metrics metrics, Collector<String> out) throws Exception {
|
||||
try {
|
||||
JSONObject tags = FormatConverterUtil.getTags(appProtocol);
|
||||
JSONObject conversion = FormatConverterUtil.structureConversion(appProtocol);
|
||||
|
||||
String protocolStackId = tags.getString("protocol_stack_id");
|
||||
|
||||
out.collect(FormatConverterUtil.updateTagsData(conversion, tags));
|
||||
tags.remove("app_name");
|
||||
Tags tags = metrics.getTags();
|
||||
String protocolStackId = tags.getProtocol_stack_id();
|
||||
out.collect(getResultJson(metrics));
|
||||
tags.setApp_name(null);
|
||||
|
||||
StringBuilder stringBuilder = new StringBuilder();
|
||||
String[] protocolIds = protocolStackId.split(GlobalConfig.PROTOCOL_SPLITTER);
|
||||
@@ -38,13 +34,14 @@ public class ResultFlatMap implements FlatMapFunction<AppProtocol, String> {
|
||||
for (int i = 0; i < protocolIdsNum - 1; i++) {
|
||||
if (StringUtil.isBlank(stringBuilder.toString())) {
|
||||
stringBuilder.append(protocolIds[i]);
|
||||
tags.put("protocol_stack_id", stringBuilder.toString());
|
||||
out.collect(FormatConverterUtil.updateTagsData(conversion, tags));
|
||||
tags.setProtocol_stack_id(stringBuilder.toString());
|
||||
metrics.setTags(tags);
|
||||
out.collect(getResultJson(metrics));
|
||||
} else {
|
||||
stringBuilder.append(".").append(protocolIds[i]);
|
||||
tags.put("protocol_stack_id", stringBuilder.toString());
|
||||
conversion.put("tags", tags);
|
||||
out.collect(FormatConverterUtil.updateTagsData(conversion, tags));
|
||||
tags.setProtocol_stack_id(stringBuilder.toString());
|
||||
metrics.setTags(tags);
|
||||
out.collect(getResultJson(metrics));
|
||||
}
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
@@ -52,4 +49,10 @@ public class ResultFlatMap implements FlatMapFunction<AppProtocol, String> {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
private static String getResultJson(Metrics metrics) {
|
||||
return JSONObject.toJSONString(metrics
|
||||
, JSONWriter.Feature.WriteNullStringAsEmpty
|
||||
, JSONWriter.Feature.WriteNullNumberAsZero);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,8 @@ package com.zdjizhi.utils.functions.statistics;
|
||||
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.zdjizhi.common.pojo.AppProtocol;
|
||||
import com.zdjizhi.common.pojo.Fields;
|
||||
import com.zdjizhi.common.pojo.Tags;
|
||||
import com.zdjizhi.utils.general.MetricUtil;
|
||||
import org.apache.flink.api.common.functions.ReduceFunction;
|
||||
import org.apache.flink.api.java.tuple.Tuple2;
|
||||
@@ -13,18 +14,18 @@ import org.apache.flink.api.java.tuple.Tuple2;
|
||||
* @Description:
|
||||
* @date 2023/4/2314:02
|
||||
*/
|
||||
public class DispersionCountWindow implements ReduceFunction<Tuple2<String, AppProtocol>> {
|
||||
public class DispersionCountWindow implements ReduceFunction<Tuple2<Tags, Fields>> {
|
||||
private static final Log logger = LogFactory.get();
|
||||
|
||||
@Override
|
||||
public Tuple2<String, AppProtocol> reduce(Tuple2<String, AppProtocol> value1, Tuple2<String, AppProtocol> value2) throws Exception {
|
||||
public Tuple2<Tags, Fields> reduce(Tuple2<Tags, Fields> value1, Tuple2<Tags, Fields> value2) throws Exception {
|
||||
try {
|
||||
AppProtocol cacheData = value1.f1;
|
||||
AppProtocol newData = value2.f1;
|
||||
Fields cacheData = value1.f1;
|
||||
Fields newData = value2.f1;
|
||||
|
||||
MetricUtil.statisticsMetrics(cacheData, newData);
|
||||
Fields metricsResult = MetricUtil.statisticsMetrics(cacheData, newData);
|
||||
|
||||
return new Tuple2<>(value1.f0, cacheData);
|
||||
return new Tuple2<>(value1.f0, metricsResult);
|
||||
} catch (RuntimeException e) {
|
||||
logger.error("An exception occurred during incremental aggregation! The message is:" + e.getMessage());
|
||||
return value1;
|
||||
|
||||
@@ -2,7 +2,10 @@ package com.zdjizhi.utils.functions.statistics;
|
||||
|
||||
import cn.hutool.log.Log;
|
||||
import cn.hutool.log.LogFactory;
|
||||
import com.zdjizhi.common.pojo.AppProtocol;
|
||||
import com.zdjizhi.common.config.GlobalConfig;
|
||||
import com.zdjizhi.common.pojo.Fields;
|
||||
import com.zdjizhi.common.pojo.Metrics;
|
||||
import com.zdjizhi.common.pojo.Tags;
|
||||
import org.apache.flink.api.java.tuple.Tuple2;
|
||||
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
|
||||
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
|
||||
@@ -14,17 +17,18 @@ import org.apache.flink.util.Collector;
|
||||
* @Description:
|
||||
* @date 2023/4/2314:43
|
||||
*/
|
||||
public class MergeCountWindow extends ProcessWindowFunction<Tuple2<String, AppProtocol>, AppProtocol, String, TimeWindow> {
|
||||
public class MergeCountWindow extends ProcessWindowFunction<Tuple2<Tags, Fields>, Metrics, String, TimeWindow> {
|
||||
private static final Log logger = LogFactory.get();
|
||||
|
||||
@Override
|
||||
public void process(String windowKey, Context context, Iterable<Tuple2<String, AppProtocol>> input, Collector<AppProtocol> output) throws Exception {
|
||||
public void process(String windowKey, Context context, Iterable<Tuple2<Tags, Fields>> input, Collector<Metrics> output) throws Exception {
|
||||
try {
|
||||
Long endTime = context.window().getEnd() / 1000;
|
||||
for (Tuple2<String, AppProtocol> tuple : input) {
|
||||
AppProtocol data = tuple.f1;
|
||||
data.setTimestamp(endTime);
|
||||
output.collect(data);
|
||||
for (Tuple2<Tags, Fields> tuple : input) {
|
||||
Tags tags = tuple.f0;
|
||||
Fields fields = tuple.f1;
|
||||
Metrics metrics = new Metrics(GlobalConfig.MEASUREMENT_NAME, tags, fields, endTime);
|
||||
output.collect(metrics);
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
logger.error("An exception occurred in the process of full data aggregation! The message is:" + e.getMessage());
|
||||
|
||||
Reference in New Issue
Block a user