提交两层聚合代码

This commit is contained in:
qidaijie
2021-12-28 10:50:45 +03:00
parent 2a32156c9e
commit 4d10705cdb
6 changed files with 9 additions and 11 deletions

View File

@@ -43,9 +43,10 @@ source.parallelism=1
#map函数并行度 #map函数并行度
parse.parallelism=2 parse.parallelism=2
#count 函数并行度 #first count 函数并行度
first.window.parallelism=2 first.window.parallelism=2
#second count 函数并行度
second.window.parallelism=2 second.window.parallelism=2
#producer 并行度 #producer 并行度

View File

@@ -7,7 +7,6 @@ import com.zdjizhi.utils.functions.*;
import com.zdjizhi.utils.kafka.Consumer; import com.zdjizhi.utils.kafka.Consumer;
import com.zdjizhi.utils.kafka.Producer; import com.zdjizhi.utils.kafka.Producer;
import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.tuple.Tuple4; import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
@@ -39,7 +38,7 @@ public class StreamAggregateTopology {
DataStream<String> streamSource = environment.addSource(Consumer.getKafkaConsumer()) DataStream<String> streamSource = environment.addSource(Consumer.getKafkaConsumer())
.setParallelism(StreamAggregateConfig.SOURCE_PARALLELISM); .setParallelism(StreamAggregateConfig.SOURCE_PARALLELISM);
SingleOutputStreamOperator<Tuple4<String, String, String, String>> parseDataMap = streamSource.map(new MapParseFunction()) SingleOutputStreamOperator<Tuple4<String, String, String, String>> parseDataMap = streamSource.map(new ParseMapFunction())
.name("ParseDataMap") .name("ParseDataMap")
.setParallelism(StreamAggregateConfig.PARSE_PARALLELISM); .setParallelism(StreamAggregateConfig.PARSE_PARALLELISM);

View File

@@ -15,6 +15,7 @@ import org.slf4j.LoggerFactory;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** /**
* @author qidaijie * @author qidaijie
@@ -28,7 +29,6 @@ public class FirstCountWindowFunction extends ProcessWindowFunction<Tuple4<Strin
private static HashMap<String, String[]> metricsMap = JsonParseUtil.getMetricsMap(); private static HashMap<String, String[]> metricsMap = JsonParseUtil.getMetricsMap();
private static HashMap<String, String[]> actionMap = JsonParseUtil.getActionMap(); private static HashMap<String, String[]> actionMap = JsonParseUtil.getActionMap();
private HashMap<String, Map<String, Object>> cacheMap = new HashMap<>(320); private HashMap<String, Map<String, Object>> cacheMap = new HashMap<>(320);
private static String resultTimeKey = JsonParseUtil.getTimeKey();
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@@ -61,7 +61,6 @@ public class FirstCountWindowFunction extends ProcessWindowFunction<Tuple4<Strin
for (String countKey : cacheMap.keySet()) { for (String countKey : cacheMap.keySet()) {
Map<String, Object> resultMap = cacheMap.get(countKey); Map<String, Object> resultMap = cacheMap.get(countKey);
JsonParseUtil.setValue(resultMap, resultTimeKey, endTime);
output.collect(new Tuple2<>(countKey, JsonMapper.toJsonString(resultMap))); output.collect(new Tuple2<>(countKey, JsonMapper.toJsonString(resultMap)));
} }
} }

View File

@@ -25,7 +25,7 @@ import java.util.Map;
* @Description: * @Description:
* @date 2021/5/2715:01 * @date 2021/5/2715:01
*/ */
public class MapParseFunction implements MapFunction<String,Tuple4<String,String, String, String>> { public class ParseMapFunction implements MapFunction<String,Tuple4<String,String, String, String>> {
private static final Logger logger = LoggerFactory.getLogger(FirstCountWindowFunction.class); private static final Logger logger = LoggerFactory.getLogger(FirstCountWindowFunction.class);
private static ArrayList<String[]> jobList = JsonParseUtil.getTransformsList(); private static ArrayList<String[]> jobList = JsonParseUtil.getTransformsList();
@@ -38,7 +38,7 @@ public class MapParseFunction implements MapFunction<String,Tuple4<String,String
try { try {
if (StringUtil.isNotBlank(message)) { if (StringUtil.isNotBlank(message)) {
Map<String, Object> object = (Map<String, Object>) JsonMapper.fromJsonString(message, Map.class); Map<String, Object> object = (Map<String, Object>) JsonMapper.fromJsonString(message, Map.class);
// String streamTraceId = JsonParseUtil.getString(object, "common_stream_trace_id"); String streamTraceId = JsonParseUtil.getString(object, "common_stream_trace_id");
Map<String, Object> dimensionsObj = ParseFunctions.transDimensions(dimensionsMap, object); Map<String, Object> dimensionsObj = ParseFunctions.transDimensions(dimensionsMap, object);
if (ParseFunctions.filterLogs(object)) { if (ParseFunctions.filterLogs(object)) {
for (String[] strings : jobList) { for (String[] strings : jobList) {
@@ -76,8 +76,6 @@ public class MapParseFunction implements MapFunction<String,Tuple4<String,String
} }
break; break;
case "hierarchy": case "hierarchy":
String streamTraceId = JsonParseUtil.getString(object, "common_stream_trace_id");
// RandomUtil.randomInt(0, StreamAggregateConfig.COUNT_PARALLELISM)
return new Tuple4<>(streamTraceId,JsonParseUtil.getString(object, logsKeyName), JsonMapper.toJsonString(dimensionsObj), JsonMapper.toJsonString(object)); return new Tuple4<>(streamTraceId,JsonParseUtil.getString(object, logsKeyName), JsonMapper.toJsonString(dimensionsObj), JsonMapper.toJsonString(object));
default: default:
break; break;

View File

@@ -14,6 +14,7 @@ import org.slf4j.LoggerFactory;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/** /**
* @author qidaijie * @author qidaijie

View File

@@ -51,8 +51,8 @@ public class ParseFunctions {
for (String key : filtersMap.keySet()) { for (String key : filtersMap.keySet()) {
switch (key) { switch (key) {
case "notempty": case "notempty":
Object value = JsonParseUtil.getValue(object, filtersMap.get(key)); String value = JsonParseUtil.getString(object, filtersMap.get(key));
if (value != null && StringUtil.isNotBlank(value.toString())) { if (StringUtil.isNotBlank(value)) {
available = true; available = true;
} }
break; break;