diff --git a/README.md b/README.md
index 5be6ff0..5a8ee0d 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# file-stream-combiner
+# file-chunk-combiner
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..275475a
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,251 @@
+
+
+ 4.0.0
+
+ com.zdjizhi
+ file-chunk-combiner
+ 24.01.18
+
+
+
+ nexus
+ Team Nexus Repository
+ http://192.168.40.153:8099/content/groups/public
+
+
+ maven-ali
+ http://maven.aliyun.com/nexus/content/groups/public/
+
+ true
+
+
+ true
+ fail
+
+
+
+
+
+ 1.13.1
+
+
+
+
+ com.zdjizhi
+ galaxy
+ 1.1.3
+
+
+ slf4j-log4j12
+ org.slf4j
+
+
+ log4j-over-slf4j
+ org.slf4j
+
+
+
+
+ org.slf4j
+ slf4j-api
+ 1.7.25
+
+
+ org.slf4j
+ slf4j-log4j12
+ 1.7.25
+
+
+ log4j
+ log4j
+ 1.2.17
+
+
+ org.apache.flink
+ flink-core
+ ${flink.version}
+ provided
+
+
+ org.apache.flink
+ flink-streaming-java_2.12
+ ${flink.version}
+ provided
+ tests
+
+
+ org.apache.flink
+ flink-clients_2.12
+ ${flink.version}
+ provided
+
+
+ org.apache.flink
+ flink-connector-kafka_2.12
+ ${flink.version}
+
+
+ org.apache.flink
+ flink-java
+ ${flink.version}
+ provided
+
+
+ org.apache.flink
+ flink-metrics-dropwizard
+ ${flink.version}
+
+
+ org.apache.flink
+ flink-runtime_2.12
+ ${flink.version}
+ provided
+
+
+ org.apache.flink
+ flink-test-utils_2.12
+ ${flink.version}
+ test
+
+
+ org.msgpack
+ msgpack-core
+ 0.9.5
+
+
+ org.msgpack
+ jackson-dataformat-msgpack
+ 0.9.5
+
+
+ cn.hutool
+ hutool-all
+ 5.8.22
+
+
+ com.alibaba
+ fastjson
+ 2.0.32
+
+
+ org.jasypt
+ jasypt
+ 1.9.3
+
+
+ junit
+ junit
+ 4.12
+ test
+
+
+ org.mockito
+ mockito-core
+ 2.21.0
+ test
+
+
+ org.apache.httpcomponents
+ httpclient
+ 4.5.13
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 3.6.1
+
+ 1.8
+ 1.8
+ true
+
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+ 3.1.1
+
+ false
+ true
+ true
+
+
+ org.apache.http
+ shade.org.apache.http
+
+
+
+
+ com.google.code.findbugs:jsr305
+ org.slf4j:slf4j-api
+ org.slf4j:slf4j-jdk14
+ org.slf4j:slf4j-jcl
+ org.slf4j:slf4j-nop
+ org.slf4j:slf4j-simple
+ org.slf4j:slf4j-reload4j
+ org.slf4j:slf4j-log4j12
+ org.slf4j:log4j-over-slf4j
+ org.slf4j:jcl-over-slf4j
+ log4j:*
+ commons-logging:*
+ ch.qos.logback:*
+ org.apache.logging.log4j:log4j-api
+ org.apache.logging.log4j:log4j-core
+ org.apache.logging.log4j:log4j-slf4j-impl
+ org.apache.logging.log4j:log4j-1.2-api
+ org.apache.logging.log4j:log4j-to-slf4j
+
+
+
+
+
+ file-chunk-combiner
+ package
+
+ shade
+
+
+ file-chunk-combiner-${version}
+
+
+
+ *:*
+
+ META-INF
+
+
+
+
+
+ com.zdjizhi.FileChunkCombiner
+
+
+
+
+
+
+
+ io.github.zlika
+ reproducible-build-maven-plugin
+ 0.2
+
+
+
+ strip-jar
+
+ package
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/main/java/com/zdjizhi/FileChunkCombiner.java b/src/main/java/com/zdjizhi/FileChunkCombiner.java
new file mode 100644
index 0000000..06f7402
--- /dev/null
+++ b/src/main/java/com/zdjizhi/FileChunkCombiner.java
@@ -0,0 +1,78 @@
+package com.zdjizhi;
+
+import com.zdjizhi.config.Configs;
+import com.zdjizhi.function.*;
+import com.zdjizhi.pojo.*;
+import com.zdjizhi.sink.HosSink;
+import com.zdjizhi.kafka.KafkaConsumer;
+import com.zdjizhi.trigger.LastChunkOrNoDataInTimeTrigger;
+import com.zdjizhi.trigger.MultipleTrigger;
+import org.apache.flink.api.common.eventtime.WatermarkStrategy;
+import org.apache.flink.api.common.functions.FilterFunction;
+import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
+import org.apache.flink.api.java.utils.ParameterTool;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
+import org.apache.flink.streaming.api.windowing.time.Time;
+import org.apache.flink.streaming.api.windowing.triggers.EventTimeTrigger;
+import org.apache.flink.streaming.api.windowing.triggers.Trigger;
+import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
+import org.apache.flink.util.OutputTag;
+
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+
+public class FileChunkCombiner extends KafkaConsumer {
+
+ public static void main(String[] args) throws Exception {
+ final ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);
+ final Configuration configuration = parameterTool.getConfiguration();
+ final StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
+ environment.getConfig().setGlobalJobParameters(configuration);
+
+ WatermarkStrategy watermarkStrategy = WatermarkStrategy
+ .forBoundedOutOfOrderness(Duration.ofSeconds(0))
+ .withTimestampAssigner((FileChunk, timestamp) -> FileChunk.getTimestamp() / 1000);
+
+ SingleOutputStreamOperator parseMessagePackStream = environment
+ .addSource(KafkaConsumer.byteArrayConsumer(configuration))
+ .name("Kafka Source")
+ .map(new ParseMessagePackMapFunction())
+ .name("Map: Parse Message Pack")
+ .filter((FilterFunction) Objects::nonNull)
+ .assignTimestampsAndWatermarks(watermarkStrategy);
+
+ OutputTag delayedChunkOutputTag = new OutputTag("delayed-chunk") {
+ };
+
+ List> triggers = new ArrayList<>();
+ triggers.add(EventTimeTrigger.create());
+ triggers.add(LastChunkOrNoDataInTimeTrigger.of(configuration.get(Configs.COMBINER_WINDOW_IDLE_TIME) * 1000));
+ Trigger