优化代码

This commit is contained in:
houjinchuan
2024-03-04 15:43:18 +08:00
parent 644ca7f35c
commit 7795ebb318
10 changed files with 5 additions and 13 deletions

View File

@@ -127,5 +127,4 @@ public class Configs {
public static final ConfigOption<String> FILTER_EXPRESSION = ConfigOptions.key("filter.expression") public static final ConfigOption<String> FILTER_EXPRESSION = ConfigOptions.key("filter.expression")
.stringType() .stringType()
.defaultValue(""); .defaultValue("");
} }

View File

@@ -75,5 +75,4 @@ public class ParseMessagePackMapFunction extends RichMapFunction<byte[], FileChu
} }
return fileChunk; return fileChunk;
} }
} }

View File

@@ -6,6 +6,8 @@ import org.apache.flink.configuration.Configuration;
import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.Counter;
import org.apache.flink.metrics.MetricGroup; import org.apache.flink.metrics.MetricGroup;
import static com.zdjizhi.utils.PublicConstants.COMBINE_MODE_SEEK;
public class SideOutputMapFunction extends RichMapFunction<FileChunk, FileChunk> { public class SideOutputMapFunction extends RichMapFunction<FileChunk, FileChunk> {
private transient Counter pcapDelayedChunkCounter; private transient Counter pcapDelayedChunkCounter;
@@ -22,7 +24,7 @@ public class SideOutputMapFunction extends RichMapFunction<FileChunk, FileChunk>
@Override @Override
public FileChunk map(FileChunk fileChunk) { public FileChunk map(FileChunk fileChunk) {
fileChunk.setChunkCount(1); fileChunk.setChunkCount(1);
if ("seek".equals(fileChunk.getCombineMode())) { if (COMBINE_MODE_SEEK.equals(fileChunk.getCombineMode())) {
trafficDelayedChunkCounter.inc(); trafficDelayedChunkCounter.inc();
} else { } else {
fileChunk.setChunkNumbers(fileChunk.getTimestamp() + "-" + fileChunk.getChunk().length + ";"); fileChunk.setChunkNumbers(fileChunk.getTimestamp() + "-" + fileChunk.getChunk().length + ";");
@@ -30,5 +32,4 @@ public class SideOutputMapFunction extends RichMapFunction<FileChunk, FileChunk>
} }
return fileChunk; return fileChunk;
} }
} }

View File

@@ -43,5 +43,4 @@ public abstract class KafkaConsumer extends ByteArrayDeserializationSchema {
kafkaConsumer.setStartFromGroupOffsets(); kafkaConsumer.setStartFromGroupOffsets();
return kafkaConsumer; return kafkaConsumer;
} }
} }

View File

@@ -206,5 +206,4 @@ public class HBaseSink extends RichSinkFunction<FileChunk> {
IoUtil.close(syncHBaseConnection); IoUtil.close(syncHBaseConnection);
IoUtil.close(AsyncHBaseConnection); IoUtil.close(AsyncHBaseConnection);
} }
} }

View File

@@ -178,7 +178,7 @@ public class HosSink extends RichSinkFunction<FileChunk> {
httpPut.setHeader(HOS_META_PREFIX + StrUtil.toSymbolCase(meta, CharUtil.DASHED), metaMap.get(meta) + ""); httpPut.setHeader(HOS_META_PREFIX + StrUtil.toSymbolCase(meta, CharUtil.DASHED), metaMap.get(meta) + "");
} }
} }
httpPut.setEntity(new ByteArrayEntity(fileChunk.getChunk())); httpPut.setEntity(new ByteArrayEntity(data));
executeRequest(httpPut); executeRequest(httpPut);
} }
} }
@@ -242,5 +242,4 @@ public class HosSink extends RichSinkFunction<FileChunk> {
} }
} }
} }
} }

View File

@@ -3,7 +3,6 @@ package com.zdjizhi.utils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
public interface HBaseColumnConstants { public interface HBaseColumnConstants {
String FAMILY_DATA = "data"; String FAMILY_DATA = "data";
String FAMILY_META = "meta"; String FAMILY_META = "meta";
String COLUMN_FILENAME = "filename"; String COLUMN_FILENAME = "filename";
@@ -46,5 +45,4 @@ public interface HBaseColumnConstants {
byte[] BYTE_BUCKET_COLUMN_TTL = Bytes.toBytes(BUCKET_COLUMN_TTL); byte[] BYTE_BUCKET_COLUMN_TTL = Bytes.toBytes(BUCKET_COLUMN_TTL);
byte[] BYTE_BUCKET_COLUMN_WAL = Bytes.toBytes(BUCKET_COLUMN_WAL); byte[] BYTE_BUCKET_COLUMN_WAL = Bytes.toBytes(BUCKET_COLUMN_WAL);
byte[] BYTE_BUCKET_COLUMN_LOCATION = Bytes.toBytes(BUCKET_COLUMN_LOCATION); byte[] BYTE_BUCKET_COLUMN_LOCATION = Bytes.toBytes(BUCKET_COLUMN_LOCATION);
} }

View File

@@ -27,7 +27,6 @@ public class HBaseConnectionUtil {
hbaseConfiguration.set(ConnectionConfiguration.MAX_KEYVALUE_SIZE_KEY, "1073741800"); hbaseConfiguration.set(ConnectionConfiguration.MAX_KEYVALUE_SIZE_KEY, "1073741800");
hbaseConfiguration.set(ConnectionConfiguration.WRITE_BUFFER_SIZE_KEY, configuration.get(Configs.SINK_HBASE_CLIENT_WRITE_BUFFER) + ""); hbaseConfiguration.set(ConnectionConfiguration.WRITE_BUFFER_SIZE_KEY, configuration.get(Configs.SINK_HBASE_CLIENT_WRITE_BUFFER) + "");
hbaseConfiguration.set(HConstants.HBASE_CLIENT_IPC_POOL_SIZE, configuration.get(Configs.SINK_HBASE_CLIENT_IPC_POOL_SIZE) + ""); hbaseConfiguration.set(HConstants.HBASE_CLIENT_IPC_POOL_SIZE, configuration.get(Configs.SINK_HBASE_CLIENT_IPC_POOL_SIZE) + "");
} }
public static synchronized HBaseConnectionUtil getInstance(Configuration configuration) { public static synchronized HBaseConnectionUtil getInstance(Configuration configuration) {

View File

@@ -175,5 +175,4 @@ public class HttpClientUtil {
.setConnectionManager(getAsyncSslClientManager()) .setConnectionManager(getAsyncSslClientManager())
.build(); .build();
} }
} }

View File

@@ -25,7 +25,7 @@ combiner.window.time=10
combiner.window.idle.time=5 combiner.window.idle.time=5
file.max.chunk.count=100000 file.max.chunk.count=100000
file.max.size=1073741824 file.max.size=1073741824
#eval<EFBFBD><EFBFBD><EFBFBD><EFBFBD>ʽ<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ֶι<EFBFBD><EFBFBD><EFBFBD> #<23><><EFBFBD><EFBFBD><EFBFBD>ֶι<EFBFBD><EFBFBD>ˣ<EFBFBD>java<EFBFBD><EFBFBD><EFBFBD><EFBFBD>ʽ
#filter.expression=FileChunk.fileType == "txt" || FileChunk.fileType == "eml" #filter.expression=FileChunk.fileType == "txt" || FileChunk.fileType == "eml"
#sink<6E><6B><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD> #sink<6E><6B><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
sink.parallelism=2 sink.parallelism=2