优化配置加载方式:通过读取外部文件加载(GAL-435)

This commit is contained in:
qidaijie
2023-11-09 14:13:45 +08:00
parent f765650d9c
commit 0a116352d6
18 changed files with 312 additions and 377 deletions

View File

@@ -3,9 +3,6 @@ package com.zdjizhi.utils.functions.map;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSONObject;
import com.alibaba.fastjson2.JSONWriter;
import com.zdjizhi.common.config.GlobalConfig;
import com.zdjizhi.common.pojo.Fields;
import com.zdjizhi.common.pojo.Metrics;
import com.zdjizhi.common.pojo.Tags;
import com.zdjizhi.utils.StringUtil;
@@ -20,9 +17,14 @@ import org.apache.flink.util.Collector;
*/
public class ResultFlatMap implements FlatMapFunction<Metrics, String> {
private static final Log logger = LogFactory.get();
/**
* 协议分隔符,需要转义
*/
private static final String PROTOCOL_SPLITTER = "\\.";
@Override
public void flatMap(Metrics metrics, Collector<String> out) throws Exception {
public void flatMap(Metrics metrics, Collector<String> out) {
try {
Tags tags = metrics.getTags();
String protocolStackId = tags.getProtocol_stack_id();
@@ -30,7 +32,7 @@ public class ResultFlatMap implements FlatMapFunction<Metrics, String> {
tags.setApp_name(null);
StringBuilder stringBuilder = new StringBuilder();
String[] protocolIds = protocolStackId.split(GlobalConfig.PROTOCOL_SPLITTER);
String[] protocolIds = protocolStackId.split(PROTOCOL_SPLITTER);
int protocolIdsNum = protocolIds.length;
for (int i = 0; i < protocolIdsNum - 1; i++) {
if (StringUtil.isBlank(stringBuilder.toString())) {

View File

@@ -19,8 +19,9 @@ import org.apache.flink.api.java.tuple.Tuple3;
public class DispersionCountWindow implements ReduceFunction<Tuple3<Tags, Fields, Long>> {
private static final Log logger = LogFactory.get();
@Override
public Tuple3<Tags, Fields, Long> reduce(Tuple3<Tags, Fields, Long> value1, Tuple3<Tags, Fields, Long> value2) throws Exception {
public Tuple3<Tags, Fields, Long> reduce(Tuple3<Tags, Fields, Long> value1, Tuple3<Tags, Fields, Long> value2) {
try {
Fields cacheData = value1.f1;
Fields newData = value2.f1;

View File

@@ -2,12 +2,12 @@ package com.zdjizhi.utils.functions.statistics;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.config.GlobalConfig;
import com.zdjizhi.common.config.MergeConfigs;
import com.zdjizhi.common.pojo.Fields;
import com.zdjizhi.common.pojo.Metrics;
import com.zdjizhi.common.pojo.Tags;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
@@ -18,17 +18,30 @@ import org.apache.flink.util.Collector;
* @Description:
* @date 2023/4/2314:43
*/
public class MergeCountWindow extends ProcessWindowFunction<Tuple3<Tags, Fields,Long>, Metrics, String, TimeWindow> {
public class MergeCountWindow extends ProcessWindowFunction<Tuple3<Tags, Fields, Long>, Metrics, String, TimeWindow> {
private static final Log logger = LogFactory.get();
private String NAME = null;
@Override
public void process(String windowKey, Context context, Iterable<Tuple3<Tags, Fields,Long>> input, Collector<Metrics> output) throws Exception {
public void open(Configuration parameters) throws Exception {
super.open(parameters);
final Configuration configuration = (Configuration) getRuntimeContext()
.getExecutionConfig().getGlobalJobParameters();
NAME = configuration.get(MergeConfigs.MEASUREMENT_NAME);
}
@Override
public void process(String windowKey, Context context, Iterable<Tuple3<Tags, Fields, Long>> input, Collector<Metrics> output) {
try {
long timestamp_ms = context.window().getStart();
for (Tuple3<Tags, Fields,Long> tuple : input) {
for (Tuple3<Tags, Fields, Long> tuple : input) {
Tags tags = tuple.f0;
Fields fields = tuple.f1;
Metrics metrics = new Metrics(GlobalConfig.MEASUREMENT_NAME, tags, fields, timestamp_ms);
Metrics metrics = new Metrics(NAME, tags, fields, timestamp_ms);
output.collect(metrics);
}