协议与应用统计程序基于事件时间处理,且结果数据时间戳为毫秒级。(TSG-16737)

This commit is contained in:
qidaijie
2023-08-21 17:22:37 +08:00
parent 345b7fd601
commit 7b2302234a
8 changed files with 55 additions and 34 deletions

View File

@@ -3,10 +3,12 @@ package com.zdjizhi.utils.functions.statistics;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.pojo.Fields;
import com.zdjizhi.common.pojo.Metrics;
import com.zdjizhi.common.pojo.Tags;
import com.zdjizhi.utils.general.MetricUtil;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
/**
* @author qidaijie
@@ -14,21 +16,23 @@ import org.apache.flink.api.java.tuple.Tuple2;
* @Description:
* @date 2023/4/2314:02
*/
public class DispersionCountWindow implements ReduceFunction<Tuple2<Tags, Fields>> {
public class DispersionCountWindow implements ReduceFunction<Tuple3<Tags, Fields, Long>> {
private static final Log logger = LogFactory.get();
@Override
public Tuple2<Tags, Fields> reduce(Tuple2<Tags, Fields> value1, Tuple2<Tags, Fields> value2) throws Exception {
public Tuple3<Tags, Fields, Long> reduce(Tuple3<Tags, Fields, Long> value1, Tuple3<Tags, Fields, Long> value2) throws Exception {
try {
Fields cacheData = value1.f1;
Fields newData = value2.f1;
Fields metricsResult = MetricUtil.statisticsMetrics(cacheData, newData);
return new Tuple2<>(value1.f0, metricsResult);
return new Tuple3<>(value1.f0, metricsResult, value1.f2);
} catch (RuntimeException e) {
logger.error("An exception occurred during incremental aggregation! The message is:" + e.getMessage());
return value1;
}
}
}