统计时间戳字段重命名timestamp改为timestamp_ms。(TSG-17084)
This commit is contained in:
@@ -10,14 +10,14 @@ public class Metrics {
|
||||
private String name;
|
||||
private Tags tags;
|
||||
private Fields fields;
|
||||
private long timestamp;
|
||||
private long timestamp_ms;
|
||||
|
||||
|
||||
public Metrics(String name, Tags tags, Fields fields, long timestamp) {
|
||||
public Metrics(String name, Tags tags, Fields fields, long timestamp_ms) {
|
||||
this.name = name;
|
||||
this.tags = tags;
|
||||
this.fields = fields;
|
||||
this.timestamp = timestamp;
|
||||
this.timestamp_ms = timestamp_ms;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
@@ -44,11 +44,11 @@ public class Metrics {
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
public long getTimestamp() {
|
||||
return timestamp;
|
||||
public long getTimestamp_ms() {
|
||||
return timestamp_ms;
|
||||
}
|
||||
|
||||
public void setTimestamp(long timestamp) {
|
||||
this.timestamp = timestamp;
|
||||
public void setTimestamp_ms(long timestamp_ms) {
|
||||
this.timestamp_ms = timestamp_ms;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@ import com.zdjizhi.utils.kafka.KafkaConsumer;
|
||||
import com.zdjizhi.utils.kafka.KafkaProducer;
|
||||
import org.apache.flink.api.common.eventtime.*;
|
||||
import org.apache.flink.api.java.tuple.Tuple3;
|
||||
import org.apache.flink.api.java.utils.ParameterTool;
|
||||
import org.apache.flink.configuration.Configuration;
|
||||
import org.apache.flink.streaming.api.datastream.DataStream;
|
||||
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
||||
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
||||
@@ -65,7 +67,7 @@ public class ApplicationProtocolTopology {
|
||||
resultFlatMap.addSink(KafkaProducer.getKafkaProducer())
|
||||
.setParallelism(GlobalConfig.SINK_PARALLELISM).name(GlobalConfig.SINK_KAFKA_TOPIC);
|
||||
|
||||
environment.execute(args[0]);
|
||||
environment.execute("APP-PROTOCOL-STAT-TRAFFIC-MERGE");
|
||||
} catch (Exception e) {
|
||||
logger.error("This Flink task start ERROR! Exception information is :");
|
||||
e.printStackTrace();
|
||||
|
||||
@@ -26,7 +26,7 @@ public class ParsingData extends ProcessFunction<String, Tuple3<Tags, Fields, Lo
|
||||
JSONObject originalLog = JSON.parseObject(value);
|
||||
Fields fields = JSONObject.parseObject(originalLog.getString("fields"), Fields.class);
|
||||
Tags tags = JSONObject.parseObject(originalLog.getString("tags"), Tags.class);
|
||||
Long timestamp = originalLog.getLong("timestamp");
|
||||
Long timestamp_ms = originalLog.getLong("timestamp_ms");
|
||||
|
||||
String appFullPath = tags.getApp_name();
|
||||
if (StringUtil.isNotBlank(appFullPath)) {
|
||||
@@ -37,7 +37,7 @@ public class ParsingData extends ProcessFunction<String, Tuple3<Tags, Fields, Lo
|
||||
tags.setProtocol_stack_id(protocolLabel.concat(".").concat(appFullPath));
|
||||
}
|
||||
|
||||
out.collect(new Tuple3<>(tags, fields, timestamp));
|
||||
out.collect(new Tuple3<>(tags, fields, timestamp_ms));
|
||||
}
|
||||
}
|
||||
} catch (RuntimeException e) {
|
||||
|
||||
@@ -24,11 +24,11 @@ public class MergeCountWindow extends ProcessWindowFunction<Tuple3<Tags, Fields,
|
||||
@Override
|
||||
public void process(String windowKey, Context context, Iterable<Tuple3<Tags, Fields,Long>> input, Collector<Metrics> output) throws Exception {
|
||||
try {
|
||||
long timestamp = context.window().getStart();
|
||||
long timestamp_ms = context.window().getStart();
|
||||
for (Tuple3<Tags, Fields,Long> tuple : input) {
|
||||
Tags tags = tuple.f0;
|
||||
Fields fields = tuple.f1;
|
||||
Metrics metrics = new Metrics(GlobalConfig.MEASUREMENT_NAME, tags, fields, timestamp);
|
||||
Metrics metrics = new Metrics(GlobalConfig.MEASUREMENT_NAME, tags, fields, timestamp_ms);
|
||||
output.collect(metrics);
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user