1.更新为日志补全11版本

2.完善文件名后缀种类
This commit is contained in:
‘wangchengcheng’
2021-12-16 09:59:21 +08:00
parent bca1d5f40c
commit 70272eb6ec
77 changed files with 429 additions and 3706 deletions

3
.idea/compiler.xml generated
View File

@@ -6,10 +6,11 @@
<sourceOutputDir name="target/generated-sources/annotations" /> <sourceOutputDir name="target/generated-sources/annotations" />
<sourceTestOutputDir name="target/generated-test-sources/test-annotations" /> <sourceTestOutputDir name="target/generated-test-sources/test-annotations" />
<outputRelativeToContentRoot value="true" /> <outputRelativeToContentRoot value="true" />
<module name="log-stream-doublewrite" /> <module name="log-completion-doublewrite" />
</profile> </profile>
</annotationProcessing> </annotationProcessing>
<bytecodeTargetLevel> <bytecodeTargetLevel>
<module name="log-completion-doublewrite" target="1.8" />
<module name="log-stream-doublewrite" target="1.8" /> <module name="log-stream-doublewrite" target="1.8" />
</bytecodeTargetLevel> </bytecodeTargetLevel>
</component> </component>

2
.idea/modules.xml generated
View File

@@ -2,7 +2,7 @@
<project version="4"> <project version="4">
<component name="ProjectModuleManager"> <component name="ProjectModuleManager">
<modules> <modules>
<module fileurl="file://$PROJECT_DIR$/.idea/log-stream-doublewrite.iml" filepath="$PROJECT_DIR$/.idea/log-stream-doublewrite.iml" /> <module fileurl="file://$PROJECT_DIR$/log-completion-doublewrite.iml" filepath="$PROJECT_DIR$/log-completion-doublewrite.iml" />
</modules> </modules>
</component> </component>
</project> </project>

438
.idea/workspace.xml generated
View File

@@ -2,250 +2,83 @@
<project version="4"> <project version="4">
<component name="ChangeListManager"> <component name="ChangeListManager">
<list default="true" id="dfd1cd53-a804-4106-9206-5126890781e9" name="Default Changelist" comment=""> <list default="true" id="dfd1cd53-a804-4106-9206-5126890781e9" name="Default Changelist" comment="">
<change afterPath="$PROJECT_DIR$/.gitignore" afterDir="false" /> <change afterPath="$PROJECT_DIR$/log-completion-doublewrite.iml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/.gitignore" afterDir="false" /> <change afterPath="$PROJECT_DIR$/properties/file_type.properties" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/.name" afterDir="false" /> <change beforePath="$PROJECT_DIR$/.idea/compiler.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/compiler.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/compiler.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/.idea/modules.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/modules.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/encodings.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__asm_asm_3_1.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/pom.xml" beforeDir="false" afterPath="$PROJECT_DIR$/pom.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__cglib_cglib_nodep_3_2_4.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/properties/default_config.properties" beforeDir="false" afterPath="$PROJECT_DIR$/properties/default_config.properties" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__cn_hutool_hutool_all_5_5_2.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/properties/service_flow_config.properties" beforeDir="false" afterPath="$PROJECT_DIR$/properties/service_flow_config.properties" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_alibaba_fastjson_1_2_70.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/bean/FileMeta.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_esotericsoftware_kryo_kryo_2_24_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/bean/SourceList.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_esotericsoftware_minlog_minlog_1_2.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/common/FlowWriteConfig.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_annotations_2_9_5.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/topology/LogFlowWriteTopology.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_core_2_9_5.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/app/AppUtils.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_databind_2_9_5.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/exception/FlowWriteException.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_github_luben_zstd_jni_1_4_3_1.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/functions/DealFileProcessFunction.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_github_scopt_scopt_2_12_3_5_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/functions/FilterNullFunction.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_github_stephenc_findbugs_findbugs_annotations_1_3_9_1.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/functions/MapCompletedFunction.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_github_stephenc_jcip_jcip_annotations_1_0_1.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/general/FileEdit.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_google_code_findbugs_jsr305_1_3_9.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/general/SnowflakeId.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_google_code_gson_gson_2_2_4.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/general/TransFormMap.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_google_errorprone_error_prone_annotations_2_0_18.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/general/TransFormObject.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_google_guava_guava_23_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/general/TransFormTypeMap.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_google_j2objc_j2objc_annotations_1_1.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/general/TransFunction.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_google_protobuf_protobuf_java_2_5_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/hbase/HBaseUtils.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_jamesmurty_utils_java_xmlbuilder_0_4.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/http/HttpClientUtil.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_jayway_jsonpath_json_path_2_4_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/json/JsonParseUtil.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_jcraft_jsch_0_1_42.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/json/JsonTypeUtils.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_maxmind_db_maxmind_db_1_2_2.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/json/TypeUtils.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_maxmind_geoip2_geoip2_2_12_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/kafka/CertUtils.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_maxmind_geoip_geoip_api_1_3_1.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/kafka/Consumer.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_nimbusds_nimbus_jose_jwt_4_41_1.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/kafka/Producer.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_sun_jersey_jersey_client_1_9.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/ordinary/MD5Utils.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_sun_jersey_jersey_core_1_9.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/system/FlowWriteConfigurations.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_sun_jersey_jersey_json_1_9.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/zookeeper/DistributedLock.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_sun_jersey_jersey_server_1_9.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/zookeeper/ZookeeperUtils.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_sun_xml_bind_jaxb_impl_2_2_3_1.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/log4j.properties" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_thoughtworks_paranamer_paranamer_2_3.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/main/logback.xml" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_twitter_chill_2_12_0_7_6.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/test/java/com/zdjizhi/KafkaTest.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_twitter_chill_java_0_7_6.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/test/java/com/zdjizhi/LocationTest.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_typesafe_akka_akka_actor_2_12_2_5_21.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/test/java/com/zdjizhi/TestTime.java" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_typesafe_akka_akka_protobuf_2_12_2_5_21.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/bean/FileMeta.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_typesafe_akka_akka_slf4j_2_12_2_5_21.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/bean/SourceList.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_typesafe_akka_akka_stream_2_12_2_5_21.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/common/FlowWriteConfig.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_typesafe_config_1_3_3.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/topology/LogFlowWriteTopology.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_typesafe_ssl_config_core_2_12_0_3_7.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/app/AppUtils$1.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__com_zdjizhi_galaxy_1_0_6.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/app/AppUtils.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__commons_beanutils_commons_beanutils_1_7_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/exception/FlowWriteException.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__commons_beanutils_commons_beanutils_core_1_8_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/functions/DealFileProcessFunction$1.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__commons_cli_commons_cli_1_3_1.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/functions/DealFileProcessFunction.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__commons_codec_commons_codec_1_10.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/functions/FilterNullFunction.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__commons_collections_commons_collections_3_2_2.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/functions/MapCompletedFunction.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__commons_configuration_commons_configuration_1_6.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/general/FileEdit.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__commons_digester_commons_digester_1_8.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/general/SnowflakeId.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__commons_httpclient_commons_httpclient_3_1.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/general/TransFormMap.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__commons_io_commons_io_2_4.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/general/TransFormObject.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__commons_lang_commons_lang_2_6.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/general/TransFormTypeMap.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__commons_logging_commons_logging_1_1_3.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/general/TransFunction.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__commons_net_commons_net_3_1.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/hbase/HBaseUtils$1.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__io_dropwizard_metrics_metrics_core_3_2_6.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/hbase/HBaseUtils.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__io_netty_netty_3_10_5_Final.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/http/HttpClientUtil.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__io_netty_netty_all_4_0_23_Final.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/json/JsonParseUtil.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__io_prometheus_simpleclient_0_9_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/json/JsonTypeUtils.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__io_prometheus_simpleclient_common_0_9_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/json/TypeUtils.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__io_prometheus_simpleclient_pushgateway_0_9_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/kafka/CertUtils.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__javax_servlet_jsp_jsp_api_2_1.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/kafka/Consumer.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__javax_servlet_servlet_api_2_5.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/kafka/Producer.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__javax_xml_bind_jaxb_api_2_3_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/ordinary/MD5Utils.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__jline_jline_0_9_94.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/system/FlowWriteConfigurations.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__joda_time_joda_time_2_10.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/zookeeper/DistributedLock$LockException.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__junit_junit_4_12.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/zookeeper/DistributedLock.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__log4j_log4j_1_2_14.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/zookeeper/ZookeeperUtils.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__net_java_dev_jets3t_jets3t_0_9_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/default_config.properties" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__net_minidev_accessors_smart_1_2.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/classes/service_flow_config.properties" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__net_minidev_json_smart_2_3.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/test-classes/com/zdjizhi/KafkaTest$1.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_avro_avro_1_7_4.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/test-classes/com/zdjizhi/KafkaTest.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_commons_commons_compress_1_20.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/test-classes/com/zdjizhi/LocationTest.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_commons_commons_crypto_1_0_0.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/target/test-classes/com/zdjizhi/TestTime.class" beforeDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_commons_commons_lang3_3_4.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_commons_commons_math3_3_5.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_curator_curator_client_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_curator_curator_framework_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_curator_curator_recipes_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_directory_api_api_asn1_api_1_0_0_M20.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_directory_api_api_util_1_0_0_M20.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_directory_server_apacheds_i18n_2_0_0_M15.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_directory_server_apacheds_kerberos_codec_2_0_0_M15.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_annotations_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_clients_2_12_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_connector_base_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_connector_kafka_2_12_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_core_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_file_sink_common_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_hadoop_fs_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_java_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_metrics_core_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_optimizer_2_12_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_queryable_state_client_java_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_runtime_2_12_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_shaded_asm_7_7_1_13_0.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_shaded_guava_18_0_13_0.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_shaded_jackson_2_12_1_13_0.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_shaded_netty_4_1_49_Final_13_0.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_shaded_zookeeper_3_3_4_14_13_0.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_flink_streaming_java_2_12_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_flink_force_shading_1_13_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_annotations_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_auth_2_8_5.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_client_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_common_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_hdfs_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_mapreduce_client_app_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_mapreduce_client_common_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_mapreduce_client_core_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_mapreduce_client_jobclient_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_mapreduce_client_shuffle_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_yarn_api_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_yarn_client_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_yarn_common_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hadoop_hadoop_yarn_server_common_2_7_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hbase_hbase_client_2_2_3.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hbase_hbase_common_2_2_3.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hbase_hbase_hadoop2_compat_2_2_3.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hbase_hbase_hadoop_compat_2_2_3.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hbase_hbase_metrics_2_2_3.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hbase_hbase_metrics_api_2_2_3.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hbase_hbase_protocol_2_2_3.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hbase_hbase_protocol_shaded_2_2_3.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hbase_thirdparty_hbase_shaded_miscellaneous_2_2_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hbase_thirdparty_hbase_shaded_netty_2_2_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_hbase_thirdparty_hbase_shaded_protobuf_2_2_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_htrace_htrace_core4_4_2_0_incubating.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_htrace_htrace_core_3_1_0_incubating.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_httpcomponents_httpclient_4_5_2.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_httpcomponents_httpcore_4_4_4.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_kafka_kafka_clients_2_4_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_yetus_audience_annotations_0_5_0.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apache_zookeeper_zookeeper_3_4_10.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_apiguardian_apiguardian_api_1_0_0.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_clapper_grizzled_slf4j_2_12_1_3_2.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_codehaus_jackson_jackson_core_asl_1_9_13.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_codehaus_jackson_jackson_jaxrs_1_8_3.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_codehaus_jackson_jackson_mapper_asl_1_9_13.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_codehaus_jackson_jackson_xc_1_8_3.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_codehaus_jettison_jettison_1_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_codehaus_mojo_animal_sniffer_annotations_1_14.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_fusesource_leveldbjni_leveldbjni_all_1_8.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_hamcrest_hamcrest_core_1_3.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_javassist_javassist_3_24_0_GA.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_jruby_jcodings_jcodings_1_0_18.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_jruby_joni_joni_2_1_11.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_junit_jupiter_junit_jupiter_api_5_3_2.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_junit_platform_junit_platform_commons_1_3_2.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_lz4_lz4_java_1_6_0.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_mortbay_jetty_jetty_6_1_26.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_mortbay_jetty_jetty_util_6_1_26.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_objenesis_objenesis_2_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_opentest4j_opentest4j_1_1_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_ow2_asm_asm_5_0_4.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_reactivestreams_reactive_streams_1_0_2.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_scala_lang_modules_scala_java8_compat_2_12_0_8_0.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_scala_lang_modules_scala_parser_combinators_2_12_1_1_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_scala_lang_scala_library_2_12_7.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_slf4j_slf4j_api_1_7_15.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__org_xerial_snappy_snappy_java_1_1_8_3.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__xerces_xercesImpl_2_9_1.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__xml_apis_xml_apis_1_3_04.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/libraries/Maven__xmlenc_xmlenc_0_52.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/log-stream-doublewrite.iml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/misc.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/modules.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/vcs.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/pom.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/properties/default_config.properties" afterDir="false" />
<change afterPath="$PROJECT_DIR$/properties/service_flow_config.properties" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/bean/FileMeta.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/bean/SourceList.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/common/FlowWriteConfig.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/topology/LogFlowWriteTopology.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/app/AppUtils.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/exception/FlowWriteException.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/functions/DealFileProcessFunction.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/functions/FilterNullFunction.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/functions/MapCompletedFunction.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/general/FileEdit.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/general/SnowflakeId.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/general/TransFormMap.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/general/TransFormObject.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/general/TransFormTypeMap.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/general/TransFunction.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/hbase/HBaseUtils.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/http/HttpClientUtil.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/json/JsonParseUtil.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/json/JsonTypeUtils.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/json/TypeUtils.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/kafka/CertUtils.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/kafka/Consumer.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/kafka/Producer.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/ordinary/MD5Utils.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/system/FlowWriteConfigurations.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/zookeeper/DistributedLock.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/java/com/zdjizhi/utils/zookeeper/ZookeeperUtils.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/log4j.properties" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/main/logback.xml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/test/java/com/zdjizhi/KafkaTest.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/test/java/com/zdjizhi/LocationTest.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/src/test/java/com/zdjizhi/TestTime.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/bean/FileMeta.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/bean/SourceList.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/common/FlowWriteConfig.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/topology/LogFlowWriteTopology.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/app/AppUtils$1.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/app/AppUtils.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/exception/FlowWriteException.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/functions/DealFileProcessFunction$1.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/functions/DealFileProcessFunction.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/functions/FilterNullFunction.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/functions/MapCompletedFunction.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/general/FileEdit.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/general/SnowflakeId.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/general/TransFormMap.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/general/TransFormObject.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/general/TransFormTypeMap.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/general/TransFunction.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/hbase/HBaseUtils$1.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/hbase/HBaseUtils.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/http/HttpClientUtil.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/json/JsonParseUtil.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/json/JsonTypeUtils.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/json/TypeUtils.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/kafka/CertUtils.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/kafka/Consumer.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/kafka/Producer.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/ordinary/MD5Utils.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/system/FlowWriteConfigurations.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/zookeeper/DistributedLock$LockException.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/zookeeper/DistributedLock.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/com/zdjizhi/utils/zookeeper/ZookeeperUtils.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/default_config.properties" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/classes/service_flow_config.properties" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/test-classes/com/zdjizhi/KafkaTest$1.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/test-classes/com/zdjizhi/KafkaTest.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/test-classes/com/zdjizhi/LocationTest.class" afterDir="false" />
<change afterPath="$PROJECT_DIR$/target/test-classes/com/zdjizhi/TestTime.class" afterDir="false" />
</list> </list>
<option name="SHOW_DIALOG" value="false" /> <option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" /> <option name="HIGHLIGHT_CONFLICTS" value="true" />
@@ -284,14 +117,21 @@
</component> </component>
<component name="PropertiesComponent"> <component name="PropertiesComponent">
<property name="RunOnceActivity.ShowReadmeOnStart" value="true" /> <property name="RunOnceActivity.ShowReadmeOnStart" value="true" />
<property name="SHARE_PROJECT_CONFIGURATION_FILES" value="true" />
<property name="last_opened_file_path" value="$PROJECT_DIR$" /> <property name="last_opened_file_path" value="$PROJECT_DIR$" />
<property name="settings.editor.selected.configurable" value="reference.settings.project.maven.importing" /> <property name="settings.editor.selected.configurable" value="reference.settings.project.maven.importing" />
</component> </component>
<component name="RunManager"> <component name="RecentsManager">
<key name="CopyFile.RECENT_KEYS">
<recent name="D:\p19-etl\log-stream-doublewrite" />
<recent name="D:\p19-etl\log-stream-doublewrite\properties" />
<recent name="D:\p19-etl\log-stream-doublewrite\src\main\java\com\zdjizhi" />
<recent name="D:\p19-etl\log-stream-doublewrite\src\test" />
</key>
</component>
<component name="RunManager" selected="Application.LogFlowWriteTopology">
<configuration name="LogFlowWriteTopology" type="Application" factoryName="Application" temporary="true" nameIsGenerated="true"> <configuration name="LogFlowWriteTopology" type="Application" factoryName="Application" temporary="true" nameIsGenerated="true">
<option name="MAIN_CLASS_NAME" value="com.zdjizhi.topology.LogFlowWriteTopology" /> <option name="MAIN_CLASS_NAME" value="com.zdjizhi.topology.LogFlowWriteTopology" />
<module name="log-stream-doublewrite" />
<option name="PROGRAM_PARAMETERS" value="test" />
<extension name="coverage"> <extension name="coverage">
<pattern> <pattern>
<option name="PATTERN" value="com.zdjizhi.topology.*" /> <option name="PATTERN" value="com.zdjizhi.topology.*" />
@@ -302,9 +142,27 @@
<option name="Make" enabled="true" /> <option name="Make" enabled="true" />
</method> </method>
</configuration> </configuration>
<configuration name="TestTime" type="Application" factoryName="Application" temporary="true" nameIsGenerated="true">
<option name="MAIN_CLASS_NAME" value="com.zdjizhi.TestTime" />
<module name="log-completion-doublewrite" />
<extension name="coverage">
<pattern>
<option name="PATTERN" value="com.zdjizhi.*" />
<option name="ENABLED" value="true" />
</pattern>
</extension>
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
<list>
<item itemvalue="Application.LogFlowWriteTopology" />
<item itemvalue="Application.TestTime" />
</list>
<recent_temporary> <recent_temporary>
<list> <list>
<item itemvalue="Application.LogFlowWriteTopology" /> <item itemvalue="Application.LogFlowWriteTopology" />
<item itemvalue="Application.TestTime" />
</list> </list>
</recent_temporary> </recent_temporary>
</component> </component>
@@ -331,44 +189,98 @@
<option name="presentableId" value="Default" /> <option name="presentableId" value="Default" />
<updated>1634901800840</updated> <updated>1634901800840</updated>
</task> </task>
<task id="LOCAL-00001" summary="针对p19项目研发的双写程序">
<created>1634902912230</created>
<option name="number" value="00001" />
<option name="presentableId" value="LOCAL-00001" />
<option name="project" value="LOCAL" />
<updated>1634902912230</updated>
</task>
<option name="localTasksCounter" value="2" />
<servers /> <servers />
</component> </component>
<component name="VcsManagerConfiguration">
<MESSAGE value="针对p19项目研发的双写程序" />
<MESSAGE value="为完善文件名后缀的版本" />
<option name="LAST_COMMIT_MESSAGE" value="为完善文件名后缀的版本" />
</component>
<component name="WindowStateProjectService"> <component name="WindowStateProjectService">
<state x="223" y="67" key="#com.intellij.execution.impl.EditConfigurationsDialog" timestamp="1634902125257"> <state x="467" y="69" key="#LogFlowWriteTopology" timestamp="1639551343993">
<screen x="0" y="0" width="1536" height="824" /> <screen x="0" y="0" width="1536" height="824" />
</state> </state>
<state x="223" y="67" key="#com.intellij.execution.impl.EditConfigurationsDialog/0.0.1536.824@0.0.1536.824" timestamp="1634902125257" /> <state x="467" y="69" key="#LogFlowWriteTopology/0.0.1536.824@0.0.1536.824" timestamp="1639551343993" />
<state x="549" y="167" key="FileChooserDialogImpl" timestamp="1634901999469"> <state x="223" y="67" key="#com.intellij.execution.impl.EditConfigurationsDialog" timestamp="1639555238664">
<screen x="0" y="0" width="1536" height="824" /> <screen x="0" y="0" width="1536" height="824" />
</state> </state>
<state x="549" y="167" key="FileChooserDialogImpl/0.0.1536.824@0.0.1536.824" timestamp="1634901999469" /> <state x="223" y="67" key="#com.intellij.execution.impl.EditConfigurationsDialog/0.0.1536.824@0.0.1536.824" timestamp="1639555238664" />
<state width="1493" height="210" key="GridCell.Tab.0.bottom" timestamp="1634902131315"> <state x="322" y="6" width="891" height="814" key="CommitChangelistDialog2" timestamp="1638771902930">
<screen x="0" y="0" width="1536" height="824" /> <screen x="0" y="0" width="1536" height="824" />
</state> </state>
<state width="1493" height="210" key="GridCell.Tab.0.bottom/0.0.1536.824@0.0.1536.824" timestamp="1634902131315" /> <state x="322" y="6" width="891" height="814" key="CommitChangelistDialog2/0.0.1536.824@0.0.1536.824" timestamp="1638771902930" />
<state width="1493" height="210" key="GridCell.Tab.0.center" timestamp="1634902131315"> <state x="93" y="93" width="1350" height="638" key="DiffContextDialog" timestamp="1638771862046">
<screen x="0" y="0" width="1536" height="824" /> <screen x="0" y="0" width="1536" height="824" />
</state> </state>
<state width="1493" height="210" key="GridCell.Tab.0.center/0.0.1536.824@0.0.1536.824" timestamp="1634902131315" /> <state x="93" y="93" width="1350" height="638" key="DiffContextDialog/0.0.1536.824@0.0.1536.824" timestamp="1638771862046" />
<state width="1493" height="210" key="GridCell.Tab.0.left" timestamp="1634902131315"> <state x="549" y="167" key="FileChooserDialogImpl" timestamp="1639533356106">
<screen x="0" y="0" width="1536" height="824" /> <screen x="0" y="0" width="1536" height="824" />
</state> </state>
<state width="1493" height="210" key="GridCell.Tab.0.left/0.0.1536.824@0.0.1536.824" timestamp="1634902131315" /> <state x="549" y="167" key="FileChooserDialogImpl/0.0.1536.824@0.0.1536.824" timestamp="1639533356106" />
<state width="1493" height="210" key="GridCell.Tab.0.right" timestamp="1634902131315"> <state width="1493" height="225" key="GridCell.Tab.0.bottom" timestamp="1639561346732">
<screen x="0" y="0" width="1536" height="824" /> <screen x="0" y="0" width="1536" height="824" />
</state> </state>
<state width="1493" height="210" key="GridCell.Tab.0.right/0.0.1536.824@0.0.1536.824" timestamp="1634902131315" /> <state width="1493" height="261" key="GridCell.Tab.0.bottom/0.0.1536.824/1920.0.2560.1400@0.0.1536.824" timestamp="1635334266901" />
<state width="2517" height="429" key="GridCell.Tab.0.bottom/0.0.1536.824/1920.0.2560.1400@1920.0.2560.1400" timestamp="1635739027315" />
<state width="1493" height="225" key="GridCell.Tab.0.bottom/0.0.1536.824@0.0.1536.824" timestamp="1639561346732" />
<state width="1493" height="225" key="GridCell.Tab.0.center" timestamp="1639561346732">
<screen x="0" y="0" width="1536" height="824" />
</state>
<state width="1493" height="261" key="GridCell.Tab.0.center/0.0.1536.824/1920.0.2560.1400@0.0.1536.824" timestamp="1635334266901" />
<state width="2517" height="429" key="GridCell.Tab.0.center/0.0.1536.824/1920.0.2560.1400@1920.0.2560.1400" timestamp="1635739027315" />
<state width="1493" height="225" key="GridCell.Tab.0.center/0.0.1536.824@0.0.1536.824" timestamp="1639561346732" />
<state width="1493" height="225" key="GridCell.Tab.0.left" timestamp="1639561346732">
<screen x="0" y="0" width="1536" height="824" />
</state>
<state width="1493" height="261" key="GridCell.Tab.0.left/0.0.1536.824/1920.0.2560.1400@0.0.1536.824" timestamp="1635334266901" />
<state width="2517" height="429" key="GridCell.Tab.0.left/0.0.1536.824/1920.0.2560.1400@1920.0.2560.1400" timestamp="1635739027315" />
<state width="1493" height="225" key="GridCell.Tab.0.left/0.0.1536.824@0.0.1536.824" timestamp="1639561346732" />
<state width="1493" height="225" key="GridCell.Tab.0.right" timestamp="1639561346732">
<screen x="0" y="0" width="1536" height="824" />
</state>
<state width="1493" height="261" key="GridCell.Tab.0.right/0.0.1536.824/1920.0.2560.1400@0.0.1536.824" timestamp="1635334266901" />
<state width="2517" height="429" key="GridCell.Tab.0.right/0.0.1536.824/1920.0.2560.1400@1920.0.2560.1400" timestamp="1635739027315" />
<state width="1493" height="225" key="GridCell.Tab.0.right/0.0.1536.824@0.0.1536.824" timestamp="1639561346732" />
<state width="1493" height="261" key="GridCell.Tab.1.bottom" timestamp="1635334266901">
<screen x="0" y="0" width="1536" height="824" />
</state>
<state width="1493" height="261" key="GridCell.Tab.1.bottom/0.0.1536.824/1920.0.2560.1400@0.0.1536.824" timestamp="1635334266901" />
<state width="1493" height="261" key="GridCell.Tab.1.center" timestamp="1635334266901">
<screen x="0" y="0" width="1536" height="824" />
</state>
<state width="1493" height="261" key="GridCell.Tab.1.center/0.0.1536.824/1920.0.2560.1400@0.0.1536.824" timestamp="1635334266901" />
<state width="1493" height="261" key="GridCell.Tab.1.left" timestamp="1635334266901">
<screen x="0" y="0" width="1536" height="824" />
</state>
<state width="1493" height="261" key="GridCell.Tab.1.left/0.0.1536.824/1920.0.2560.1400@0.0.1536.824" timestamp="1635334266901" />
<state width="1493" height="261" key="GridCell.Tab.1.right" timestamp="1635334266901">
<screen x="0" y="0" width="1536" height="824" />
</state>
<state width="1493" height="261" key="GridCell.Tab.1.right/0.0.1536.824/1920.0.2560.1400@0.0.1536.824" timestamp="1635334266901" />
<state x="270" y="54" key="SettingsEditor" timestamp="1634902038183"> <state x="270" y="54" key="SettingsEditor" timestamp="1634902038183">
<screen x="0" y="0" width="1536" height="824" /> <screen x="0" y="0" width="1536" height="824" />
</state> </state>
<state x="270" y="54" key="SettingsEditor/0.0.1536.824@0.0.1536.824" timestamp="1634902038183" /> <state x="270" y="54" key="SettingsEditor/0.0.1536.824@0.0.1536.824" timestamp="1634902038183" />
<state x="361" y="145" key="Vcs.Push.Dialog.v2" timestamp="1634902506767"> <state x="361" y="145" key="Vcs.Push.Dialog.v2" timestamp="1634904713433">
<screen x="0" y="0" width="1536" height="824" /> <screen x="0" y="0" width="1536" height="824" />
</state> </state>
<state x="361" y="145" key="Vcs.Push.Dialog.v2/0.0.1536.824@0.0.1536.824" timestamp="1634902506767" /> <state x="361" y="145" key="Vcs.Push.Dialog.v2/0.0.1536.824@0.0.1536.824" timestamp="1634904713433" />
<state x="449" y="236" key="com.intellij.ide.util.TipDialog" timestamp="1634901920489"> <state x="93" y="93" width="1350" height="638" key="com.intellij.history.integration.ui.views.DirectoryHistoryDialog" timestamp="1638771862046">
<screen x="0" y="0" width="1536" height="824" /> <screen x="0" y="0" width="1536" height="824" />
</state> </state>
<state x="449" y="236" key="com.intellij.ide.util.TipDialog/0.0.1536.824@0.0.1536.824" timestamp="1634901920489" /> <state x="93" y="93" width="1350" height="638" key="com.intellij.history.integration.ui.views.DirectoryHistoryDialog/0.0.1536.824@0.0.1536.824" timestamp="1638771862046" />
<state x="449" y="236" key="com.intellij.ide.util.TipDialog" timestamp="1639532154492">
<screen x="0" y="0" width="1536" height="824" />
</state>
<state x="449" y="236" key="com.intellij.ide.util.TipDialog/0.0.1536.824/1920.0.2560.1400@0.0.1536.824" timestamp="1635410128225" />
<state x="449" y="236" key="com.intellij.ide.util.TipDialog/0.0.1536.824@0.0.1536.824" timestamp="1639532154492" />
</component> </component>
</project> </project>

View File

@@ -0,0 +1,186 @@
<?xml version="1.0" encoding="UTF-8"?>
<module org.jetbrains.idea.maven.project.MavenProjectsManager.isMavenModule="true" type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8">
<output url="file://$MODULE_DIR$/target/classes" />
<output-test url="file://$MODULE_DIR$/target/test-classes" />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/properties" type="java-resource" />
<sourceFolder url="file://$MODULE_DIR$/src/main/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src/test/java" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/target" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="Maven: com.zdjizhi:galaxy:1.0.6" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-lang3:3.4" level="project" />
<orderEntry type="library" name="Maven: commons-io:commons-io:2.4" level="project" />
<orderEntry type="library" name="Maven: log4j:log4j:1.2.14" level="project" />
<orderEntry type="library" name="Maven: joda-time:joda-time:2.10" level="project" />
<orderEntry type="library" name="Maven: com.maxmind.geoip:geoip-api:1.3.1" level="project" />
<orderEntry type="library" name="Maven: com.maxmind.geoip2:geoip2:2.12.0" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-databind:2.9.5" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-core:2.9.5" level="project" />
<orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-annotations:2.9.5" level="project" />
<orderEntry type="library" name="Maven: com.maxmind.db:maxmind-db:1.2.2" level="project" />
<orderEntry type="library" name="Maven: com.google.guava:guava:23.0" level="project" />
<orderEntry type="library" name="Maven: com.google.errorprone:error_prone_annotations:2.0.18" level="project" />
<orderEntry type="library" name="Maven: com.google.j2objc:j2objc-annotations:1.1" level="project" />
<orderEntry type="library" name="Maven: org.codehaus.mojo:animal-sniffer-annotations:1.14" level="project" />
<orderEntry type="library" name="Maven: com.alibaba:fastjson:1.2.70" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-core:1.13.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-annotations:1.13.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-metrics-core:1.13.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-shaded-asm-7:7.1-13.0" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.esotericsoftware.kryo:kryo:2.24.0" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.esotericsoftware.minlog:minlog:1.2" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.objenesis:objenesis:2.1" level="project" />
<orderEntry type="library" name="Maven: commons-collections:commons-collections:3.2.2" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-compress:1.20" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-shaded-guava:18.0-13.0" level="project" />
<orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.15" level="project" />
<orderEntry type="library" name="Maven: com.google.code.findbugs:jsr305:1.3.9" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:force-shading:1.13.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-streaming-java_2.12:1.13.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-file-sink-common:1.13.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-runtime_2.12:1.13.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-queryable-state-client-java:1.13.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-hadoop-fs:1.13.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-shaded-netty:4.1.49.Final-13.0" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-shaded-jackson:2.12.1-13.0" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-shaded-zookeeper-3:3.4.14-13.0" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.javassist:javassist:3.24.0-GA" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.scala-lang:scala-library:2.12.7" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.typesafe.akka:akka-actor_2.12:2.5.21" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.typesafe:config:1.3.3" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.scala-lang.modules:scala-java8-compat_2.12:0.8.0" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.typesafe.akka:akka-stream_2.12:2.5.21" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.reactivestreams:reactive-streams:1.0.2" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.typesafe:ssl-config-core_2.12:0.3.7" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.scala-lang.modules:scala-parser-combinators_2.12:1.1.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.typesafe.akka:akka-protobuf_2.12:2.5.21" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.typesafe.akka:akka-slf4j_2.12:2.5.21" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.clapper:grizzled-slf4j_2.12:1.3.2" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.github.scopt:scopt_2.12:3.5.0" level="project" />
<orderEntry type="library" name="Maven: org.xerial.snappy:snappy-java:1.1.8.3" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.twitter:chill_2.12:0.7.6" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.twitter:chill-java:0.7.6" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.lz4:lz4-java:1.6.0" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-math3:3.5" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-clients_2.12:1.13.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-optimizer_2.12:1.13.1" level="project" />
<orderEntry type="library" name="Maven: commons-cli:commons-cli:1.3.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-connector-kafka_2.12:1.13.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.kafka:kafka-clients:2.4.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.github.luben:zstd-jni:1.4.3-1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-connector-base:1.13.1" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.apache.flink:flink-java:1.13.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.zookeeper:zookeeper:3.4.10" level="project" />
<orderEntry type="library" name="Maven: jline:jline:0.9.94" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty:3.10.5.Final" level="project" />
<orderEntry type="library" name="Maven: org.apache.hbase:hbase-client:2.2.3" level="project" />
<orderEntry type="library" name="Maven: org.apache.hbase.thirdparty:hbase-shaded-protobuf:2.2.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.hbase:hbase-common:2.2.3" level="project" />
<orderEntry type="library" name="Maven: com.github.stephenc.findbugs:findbugs-annotations:1.3.9-1" level="project" />
<orderEntry type="library" name="Maven: org.apache.hbase:hbase-hadoop-compat:2.2.3" level="project" />
<orderEntry type="library" name="Maven: org.apache.hbase:hbase-metrics-api:2.2.3" level="project" />
<orderEntry type="library" name="Maven: org.apache.hbase:hbase-hadoop2-compat:2.2.3" level="project" />
<orderEntry type="library" name="Maven: org.apache.hbase:hbase-metrics:2.2.3" level="project" />
<orderEntry type="library" name="Maven: org.apache.hbase:hbase-protocol-shaded:2.2.3" level="project" />
<orderEntry type="library" name="Maven: org.apache.hbase:hbase-protocol:2.2.3" level="project" />
<orderEntry type="library" name="Maven: commons-codec:commons-codec:1.10" level="project" />
<orderEntry type="library" name="Maven: org.apache.hbase.thirdparty:hbase-shaded-miscellaneous:2.2.1" level="project" />
<orderEntry type="library" name="Maven: com.google.protobuf:protobuf-java:2.5.0" level="project" />
<orderEntry type="library" name="Maven: org.apache.hbase.thirdparty:hbase-shaded-netty:2.2.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.htrace:htrace-core4:4.2.0-incubating" level="project" />
<orderEntry type="library" name="Maven: org.jruby.jcodings:jcodings:1.0.18" level="project" />
<orderEntry type="library" name="Maven: org.jruby.joni:joni:2.1.11" level="project" />
<orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-core:3.2.6" level="project" />
<orderEntry type="library" name="Maven: org.apache.commons:commons-crypto:1.0.0" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-auth:2.8.5" level="project" />
<orderEntry type="library" name="Maven: com.nimbusds:nimbus-jose-jwt:4.41.1" level="project" />
<orderEntry type="library" name="Maven: com.github.stephenc.jcip:jcip-annotations:1.0-1" level="project" />
<orderEntry type="library" name="Maven: org.apache.directory.server:apacheds-kerberos-codec:2.0.0-M15" level="project" />
<orderEntry type="library" name="Maven: org.apache.directory.server:apacheds-i18n:2.0.0-M15" level="project" />
<orderEntry type="library" name="Maven: org.apache.directory.api:api-asn1-api:1.0.0-M20" level="project" />
<orderEntry type="library" name="Maven: org.apache.directory.api:api-util:1.0.0-M20" level="project" />
<orderEntry type="library" name="Maven: org.apache.curator:curator-framework:2.7.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.yetus:audience-annotations:0.5.0" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-common:2.7.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-annotations:2.7.1" level="project" />
<orderEntry type="module-library">
<library name="Maven: jdk.tools:jdk.tools:1.8">
<CLASSES>
<root url="jar://C:/Program Files/Java/jdk1.8.0_191/lib/tools.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
</orderEntry>
<orderEntry type="library" name="Maven: xmlenc:xmlenc:0.52" level="project" />
<orderEntry type="library" name="Maven: commons-httpclient:commons-httpclient:3.1" level="project" />
<orderEntry type="library" name="Maven: commons-net:commons-net:3.1" level="project" />
<orderEntry type="library" name="Maven: javax.servlet:servlet-api:2.5" level="project" />
<orderEntry type="library" name="Maven: org.mortbay.jetty:jetty:6.1.26" level="project" />
<orderEntry type="library" name="Maven: org.mortbay.jetty:jetty-util:6.1.26" level="project" />
<orderEntry type="library" scope="RUNTIME" name="Maven: javax.servlet.jsp:jsp-api:2.1" level="project" />
<orderEntry type="library" name="Maven: com.sun.jersey:jersey-core:1.9" level="project" />
<orderEntry type="library" name="Maven: com.sun.jersey:jersey-json:1.9" level="project" />
<orderEntry type="library" name="Maven: org.codehaus.jettison:jettison:1.1" level="project" />
<orderEntry type="library" name="Maven: com.sun.xml.bind:jaxb-impl:2.2.3-1" level="project" />
<orderEntry type="library" name="Maven: org.codehaus.jackson:jackson-jaxrs:1.8.3" level="project" />
<orderEntry type="library" name="Maven: org.codehaus.jackson:jackson-xc:1.8.3" level="project" />
<orderEntry type="library" name="Maven: com.sun.jersey:jersey-server:1.9" level="project" />
<orderEntry type="library" name="Maven: asm:asm:3.1" level="project" />
<orderEntry type="library" name="Maven: commons-logging:commons-logging:1.1.3" level="project" />
<orderEntry type="library" name="Maven: net.java.dev.jets3t:jets3t:0.9.0" level="project" />
<orderEntry type="library" name="Maven: com.jamesmurty.utils:java-xmlbuilder:0.4" level="project" />
<orderEntry type="library" name="Maven: commons-lang:commons-lang:2.6" level="project" />
<orderEntry type="library" name="Maven: commons-configuration:commons-configuration:1.6" level="project" />
<orderEntry type="library" name="Maven: commons-digester:commons-digester:1.8" level="project" />
<orderEntry type="library" name="Maven: commons-beanutils:commons-beanutils:1.7.0" level="project" />
<orderEntry type="library" name="Maven: commons-beanutils:commons-beanutils-core:1.8.0" level="project" />
<orderEntry type="library" name="Maven: org.codehaus.jackson:jackson-core-asl:1.9.13" level="project" />
<orderEntry type="library" name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.9.13" level="project" />
<orderEntry type="library" name="Maven: org.apache.avro:avro:1.7.4" level="project" />
<orderEntry type="library" name="Maven: com.thoughtworks.paranamer:paranamer:2.3" level="project" />
<orderEntry type="library" name="Maven: com.google.code.gson:gson:2.2.4" level="project" />
<orderEntry type="library" name="Maven: com.jcraft:jsch:0.1.42" level="project" />
<orderEntry type="library" name="Maven: org.apache.curator:curator-client:2.7.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.curator:curator-recipes:2.7.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.htrace:htrace-core:3.1.0-incubating" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-client:2.7.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-hdfs:2.7.1" level="project" />
<orderEntry type="library" name="Maven: io.netty:netty-all:4.0.23.Final" level="project" />
<orderEntry type="library" name="Maven: xerces:xercesImpl:2.9.1" level="project" />
<orderEntry type="library" name="Maven: xml-apis:xml-apis:1.3.04" level="project" />
<orderEntry type="library" name="Maven: org.fusesource.leveldbjni:leveldbjni-all:1.8" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-app:2.7.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-common:2.7.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-yarn-client:2.7.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-yarn-server-common:2.7.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-shuffle:2.7.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-yarn-api:2.7.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-core:2.7.1" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-yarn-common:2.7.1" level="project" />
<orderEntry type="library" name="Maven: com.sun.jersey:jersey-client:1.9" level="project" />
<orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-jobclient:2.7.1" level="project" />
<orderEntry type="library" name="Maven: cglib:cglib-nodep:3.2.4" level="project" />
<orderEntry type="library" name="Maven: org.junit.jupiter:junit-jupiter-api:5.3.2" level="project" />
<orderEntry type="library" name="Maven: org.apiguardian:apiguardian-api:1.0.0" level="project" />
<orderEntry type="library" name="Maven: org.opentest4j:opentest4j:1.1.1" level="project" />
<orderEntry type="library" name="Maven: org.junit.platform:junit-platform-commons:1.3.2" level="project" />
<orderEntry type="library" name="Maven: org.apache.httpcomponents:httpclient:4.5.2" level="project" />
<orderEntry type="library" name="Maven: org.apache.httpcomponents:httpcore:4.4.4" level="project" />
<orderEntry type="library" name="Maven: com.jayway.jsonpath:json-path:2.4.0" level="project" />
<orderEntry type="library" name="Maven: net.minidev:json-smart:2.3" level="project" />
<orderEntry type="library" name="Maven: net.minidev:accessors-smart:1.2" level="project" />
<orderEntry type="library" name="Maven: org.ow2.asm:asm:5.0.4" level="project" />
<orderEntry type="library" name="Maven: io.prometheus:simpleclient_pushgateway:0.9.0" level="project" />
<orderEntry type="library" name="Maven: io.prometheus:simpleclient:0.9.0" level="project" />
<orderEntry type="library" name="Maven: io.prometheus:simpleclient_common:0.9.0" level="project" />
<orderEntry type="library" name="Maven: javax.xml.bind:jaxb-api:2.3.0" level="project" />
<orderEntry type="library" name="Maven: cn.hutool:hutool-all:5.5.2" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: junit:junit:4.12" level="project" />
<orderEntry type="library" scope="TEST" name="Maven: org.hamcrest:hamcrest-core:1.3" level="project" />
</component>
</module>

View File

@@ -6,7 +6,7 @@
<groupId>com.zdjizhi</groupId> <groupId>com.zdjizhi</groupId>
<artifactId>log-completion-doublewrite</artifactId> <artifactId>log-completion-doublewrite</artifactId>
<version>1015</version> <version>1214</version>
<name>log-completion-doublewrite</name> <name>log-completion-doublewrite</name>
<url>http://www.example.com</url> <url>http://www.example.com</url>
@@ -37,8 +37,8 @@
<hadoop.version>2.7.1</hadoop.version> <hadoop.version>2.7.1</hadoop.version>
<kafka.version>1.0.0</kafka.version> <kafka.version>1.0.0</kafka.version>
<hbase.version>2.2.3</hbase.version> <hbase.version>2.2.3</hbase.version>
<!-- <scope.type>provided</scope.type>--> <scope.type>provided</scope.type>
<scope.type>compile</scope.type> <!-- <scope.type>compile</scope.type>-->
</properties> </properties>
<build> <build>

View File

@@ -1,33 +1,4 @@
#producer<EFBFBD><EFBFBD><EFBFBD>ԵĴ<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD> #====================Kafka Consumer====================#
retries=0
#<23><><EFBFBD>ĺ<EFBFBD><C4BA><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>˵һ<CBB5><D2BB>Batch<63><68><EFBFBD><EFBFBD><EFBFBD><EFBFBD>֮<EFBFBD><D6AE><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ã<EFBFBD><C3A3><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Batch<63><68>û<EFBFBD><C3BB>д<EFBFBD><D0B4><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ͳ<EFBFBD>ȥ<EFBFBD><C8A5>
linger.ms=10
#<23><><EFBFBD><EFBFBD><EFBFBD>ڳ<EFBFBD>ʱ֮ǰδ<C7B0>յ<EFBFBD><D5B5><EFBFBD>Ӧ<EFBFBD><D3A6><EFBFBD>ͻ<EFBFBD><CDBB>˽<EFBFBD><CBBD>ڱ<EFBFBD>Ҫʱ<D2AA><CAB1><EFBFBD>·<EFBFBD><C2B7><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
request.timeout.ms=30000
#producer<65><72><EFBFBD>ǰ<EFBFBD><C7B0><EFBFBD>batch<63><68><EFBFBD>з<EFBFBD><D0B7>͵<EFBFBD>,<2C><><EFBFBD>δ<EFBFBD>С<EFBFBD><D0A1>Ĭ<EFBFBD><C4AC>:16384
batch.size=262144
#Producer<65><72><EFBFBD><EFBFBD><EFBFBD>ڻ<EFBFBD><DABB><EFBFBD><EFBFBD><EFBFBD>Ϣ<EFBFBD>Ļ<EFBFBD><C4BB><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>С
#64M
#buffer.memory=67108864
#128M
buffer.memory=134217728
#<23><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ÿ<EFBFBD>η<EFBFBD><CEB7>͸<EFBFBD>Kafka<6B><61><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>С<><C4AC>1048576
#5M
#max.request.size=5242880
#10M
max.request.size=10485760
#kafka SASL<53><4C>֤<EFBFBD>û<EFBFBD><C3BB><EFBFBD>
kafka.user=admin
#kafka SASL<53><4C>SSL<53><4C>֤<EFBFBD><D6A4><EFBFBD><EFBFBD>
kafka.pin=galaxy2019
#kafka source connection timeout #kafka source connection timeout
session.timeout.ms=60000 session.timeout.ms=60000
@@ -36,15 +7,48 @@ max.poll.records=3000
#kafka source poll bytes #kafka source poll bytes
max.partition.fetch.bytes=31457280 max.partition.fetch.bytes=31457280
#====================Kafka Producer====================#
#producer重试的次数设置
retries=0
#hbase table name #他的含义就是说一个Batch被创建之后最多过多久不管这个Batch有没有写满都必须发送出去了
hbase.table.name=subscriber_info linger.ms=10
#<EFBFBD>ʼ<EFBFBD>Ĭ<EFBFBD>ϱ<EFBFBD><EFBFBD><EFBFBD> #如果在超时之前未收到响应,客户端将在必要时重新发送请求
mail.default.charset=UTF-8 request.timeout.ms=30000
#producer都是按照batch进行发送的,批次大小,默认:16384
batch.size=262144
#Producer端用于缓存消息的缓冲区大小
#128M
buffer.memory=134217728
#这个参数决定了每次发送给Kafka服务器请求的最大大小,默认1048576
#10M
max.request.size=10485760
#====================kafka default====================#
#kafka source protocol; SSL or SASL #kafka source protocol; SSL or SASL
kafka.source.protocol=SASL kafka.source.protocol=SASL
#kafka sink protocol; SSL or SASL #kafka sink protocol; SSL or SASL
kafka.sink.protocol= kafka.sink.protocol=
#kafka SASL验证用户名
kafka.user=admin
#kafka SASL及SSL验证密码
kafka.pin=galaxy2019
#====================Topology Default====================#
#hbase table name
hbase.table.name=tsg_galaxy:relation_framedip_account
#邮件默认编码
mail.default.charset=UTF-8
#0不做任何校验1强类型校验2弱类型校验
log.transform.type=2
#两个输出之间的最大时间(单位milliseconds)
buffer.timeout=5000

View File

@@ -0,0 +1,5 @@
txt
html
eml
jpg
png

View File

@@ -4,7 +4,7 @@
input.kafka.servers=10.3.60.3:9094 input.kafka.servers=10.3.60.3:9094
#管理输出kafka地址 #管理输出kafka地址
output.kafka.servers=10.3.45.126:6667,10.3.45.127:6667,10.3.45.128:6667 output.kafka.servers=10.3.60.3:9092
#zookeeper 地址 用于配置log_id #zookeeper 地址 用于配置log_id
zookeeper.servers=10.3.60.3:2181 zookeeper.servers=10.3.60.3:2181
@@ -12,33 +12,31 @@ zookeeper.servers=10.3.60.3:2181
#hbase zookeeper地址 用于连接HBase #hbase zookeeper地址 用于连接HBase
hbase.zookeeper.servers=10.3.60.3:2181 hbase.zookeeper.servers=10.3.60.3:2181
#oos地址
oos.servers=10.3.45.124:8057
#--------------------------------HTTP/定位库------------------------------# #--------------------------------HTTP/定位库------------------------------#
#定位库地址 #定位库地址
#tools.library=/opt/dat/
tools.library=D:/dingweiku/dat/ tools.library=D:/dingweiku/dat/
#网关的schema位置 #网关的schema位置
schema.http=http://10.3.60.3:9999/metadata/schema/v1/fields/proxy_event schema.http=http://10.3.60.3:9999/metadata/schema/v1/fields/security_event
#网关APP_ID 获取接口 #网关APP_ID 获取接口
app.id.http=http://10.3.60.3:9999/open-api/appDicList app.id.http=http://10.3.60.3:9999/open-api/appDicList
#--------------------------------Kafka消费组信息------------------------------# #--------------------------------Kafka消费组信息------------------------------#
#kafka 接收数据topic #oos地址
#input.kafka.topic=SESSION-RECORD oos.servers=10.3.45.124:8057
input.kafka.topic=PROXY-EVENT #输入kafka
input.kafka.topic=test11
#文件源数据topic #文件源数据topic
output.kafka.topic=TRAFFIC-FILE-METADATA output.kafka.topic=test-file-data
percent.kafka.topic=test
percent.kafka.topic=PROXY-EVENT
#读取topic,存储该spout id的消费offset信息可通过该拓扑命名;具体存储offset的位置确定下次读取不重复的数据 #读取topic,存储该spout id的消费offset信息可通过该拓扑命名;具体存储offset的位置确定下次读取不重复的数据
group.id=session-record-log-20211018-A group.id=flink-test-1
#生产者压缩模式 none or snappy #生产者压缩模式 none or snappy
producer.kafka.compression.type=none producer.kafka.compression.type=none
@@ -49,13 +47,16 @@ producer.ack=1
#--------------------------------topology配置------------------------------# #--------------------------------topology配置------------------------------#
#consumer 并行度 #consumer 并行度
consumer.parallelism=1 source.parallelism=10
#转换函数并行度 #转换函数并行度
transform.parallelism=1 transform.parallelism=10
#kafka producer 并行度
sink.parallelism=10
#数据中心,取值范围(0-63) #数据中心,取值范围(0-63)
data.center.id.num=0 data.center.id.num=7
#hbase 更新时间如填写0则不更新缓存 #hbase 更新时间如填写0则不更新缓存
hbase.tick.tuple.freq.secs=180 hbase.tick.tuple.freq.secs=180
@@ -69,4 +70,4 @@ app.tick.tuple.freq.secs=0
mail.default.charset=UTF-8 mail.default.charset=UTF-8
#0不需要补全原样输出日志1需要补全 #0不需要补全原样输出日志1需要补全
log.need.complete=1 log.need.complete=1

View File

@@ -1,51 +0,0 @@
package com.zdjizhi.bean;
import com.alibaba.fastjson.JSONArray;
public class FileMeta {
private long common_log_id;
protected int common_recv_time;
private String common_schema_type;
private JSONArray sourceList;
private int processing_time;
public long getCommon_log_id() {
return common_log_id;
}
public void setCommon_log_id(long common_log_id) {
this.common_log_id = common_log_id;
}
public int getCommon_recv_time() {
return common_recv_time;
}
public void setCommon_recv_time(int common_recv_time) {
this.common_recv_time = common_recv_time;
}
public String getCommon_schema_type() {
return common_schema_type;
}
public void setCommon_schema_type(String common_schema_type) {
this.common_schema_type = common_schema_type;
}
public JSONArray getSourceList() {
return sourceList;
}
public void setSourceList(JSONArray sourceList) {
this.sourceList = sourceList;
}
public int getProcessing_time() {
return processing_time;
}
public void setProcessing_time(int processing_time) {
this.processing_time = processing_time;
}
}

View File

@@ -1,22 +0,0 @@
package com.zdjizhi.bean;
public class SourceList {
private String destination_oss_path;
private String source_oss_path;
public String getDestination_oss_path() {
return destination_oss_path;
}
public void setDestination_oss_path(String destination_oss_path) {
this.destination_oss_path = destination_oss_path;
}
public String getSource_oss_path() {
return source_oss_path;
}
public void setSource_oss_path(String source_oss_path) {
this.source_oss_path = source_oss_path;
}
}

View File

@@ -1,83 +0,0 @@
package com.zdjizhi.common;
import com.zdjizhi.utils.system.FlowWriteConfigurations;
/**
* @author Administrator
*/
public class FlowWriteConfig {
public static final int IF_PARAM_LENGTH = 3;
public static final String VISIBILITY = "disabled";
public static final String FORMAT_SPLITTER = ",";
public static final String IS_JSON_KEY_TAG = "$.";
public static final String IF_CONDITION_SPLITTER = "=";
public static final String MODEL = "remote";
public static final String PROTOCOL_SPLITTER = "\\.";
/**
* System config
*/
public static final Integer CONSUMER_PARALLELISM = FlowWriteConfigurations.getIntProperty(0, "consumer.parallelism");
public static final Integer TRANSFORM_PARALLELISM = FlowWriteConfigurations.getIntProperty(0, "transform.parallelism");
public static final Integer HBASE_TICK_TUPLE_FREQ_SECS = FlowWriteConfigurations.getIntProperty(0, "hbase.tick.tuple.freq.secs");
public static final Integer APP_TICK_TUPLE_FREQ_SECS = FlowWriteConfigurations.getIntProperty(0, "app.tick.tuple.freq.secs");
public static final Integer DATA_CENTER_ID_NUM = FlowWriteConfigurations.getIntProperty(0, "data.center.id.num");
public static final Integer LOG_NEED_COMPLETE = FlowWriteConfigurations.getIntProperty(0, "log.need.complete");
public static final String MAIL_DEFAULT_CHARSET = FlowWriteConfigurations.getStringProperty(0, "mail.default.charset");
public static final String HBASE_TABLE_NAME = FlowWriteConfigurations.getStringProperty(1, "hbase.table.name");
/**
* kafka source config
*/
public static final String SESSION_TIMEOUT_MS = FlowWriteConfigurations.getStringProperty(1, "session.timeout.ms");
public static final String MAX_POLL_RECORDS = FlowWriteConfigurations.getStringProperty(1, "max.poll.records");
public static final String MAX_PARTITION_FETCH_BYTES = FlowWriteConfigurations.getStringProperty(1, "max.partition.fetch.bytes");
/**
* kafka sink config
*/
public static final String INPUT_KAFKA_SERVERS = FlowWriteConfigurations.getStringProperty(0, "input.kafka.servers");
public static final String OUTPUT_KAFKA_SERVERS = FlowWriteConfigurations.getStringProperty(0, "output.kafka.servers");
public static final String ZOOKEEPER_SERVERS = FlowWriteConfigurations.getStringProperty(0, "zookeeper.servers");
public static final String HBASE_ZOOKEEPER_SERVERS = FlowWriteConfigurations.getStringProperty(0, "hbase.zookeeper.servers");
public static final String GROUP_ID = FlowWriteConfigurations.getStringProperty(0, "group.id");
public static final String OUTPUT_KAFKA_TOPIC = FlowWriteConfigurations.getStringProperty(0, "output.kafka.topic");
public static final String INPUT_KAFKA_TOPIC = FlowWriteConfigurations.getStringProperty(0, "input.kafka.topic");
public static final String PRODUCER_ACK = FlowWriteConfigurations.getStringProperty(0, "producer.ack");
public static final String TOOLS_LIBRARY = FlowWriteConfigurations.getStringProperty(0, "tools.library");
public static final String PRODUCER_KAFKA_COMPRESSION_TYPE = FlowWriteConfigurations.getStringProperty(0, "producer.kafka.compression.type");
public static final String KAFKA_SOURCE_PROTOCOL = FlowWriteConfigurations.getStringProperty(1, "kafka.source.protocol");
public static final String KAFKA_SINK_PROTOCOL = FlowWriteConfigurations.getStringProperty(1, "kafka.sink.protocol");
public static final String KAFKA_USER = FlowWriteConfigurations.getStringProperty(1, "kafka.user");
public static final String KAFKA_PIN = FlowWriteConfigurations.getStringProperty(1, "kafka.pin");
public static final String PERCENT_KAFKA_TOPIC = FlowWriteConfigurations.getStringProperty(0, "percent.kafka.topic");
/**
* connection kafka
*/
public static final String RETRIES = FlowWriteConfigurations.getStringProperty(1, "retries");
public static final String LINGER_MS = FlowWriteConfigurations.getStringProperty(1, "linger.ms");
public static final Integer REQUEST_TIMEOUT_MS = FlowWriteConfigurations.getIntProperty(1, "request.timeout.ms");
public static final Integer BATCH_SIZE = FlowWriteConfigurations.getIntProperty(1, "batch.size");
public static final Integer BUFFER_MEMORY = FlowWriteConfigurations.getIntProperty(1, "buffer.memory");
public static final Integer MAX_REQUEST_SIZE = FlowWriteConfigurations.getIntProperty(1, "max.request.size");
/**
* http
*/
public static final String SCHEMA_HTTP = FlowWriteConfigurations.getStringProperty(0, "schema.http");
public static final String APP_ID_HTTP = FlowWriteConfigurations.getStringProperty(0, "app.id.http");
/**
* oos
*/
public static final String OOS_SERVERS = FlowWriteConfigurations.getStringProperty(0, "oos.servers");
}

View File

@@ -1,75 +0,0 @@
package com.zdjizhi.topology;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.functions.DealFileProcessFunction;
import com.zdjizhi.utils.functions.FilterNullFunction;
import com.zdjizhi.utils.functions.MapCompletedFunction;
import com.zdjizhi.utils.kafka.Consumer;
import com.zdjizhi.utils.kafka.Producer;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
/**
* @author qidaijie
* @Package com.zdjizhi.topology
* @Description:
* @date 2021/5/2016:42
*/
public class LogFlowWriteTopology {
private static final Log logger = LogFactory.get();
public static void main(String[] args) {
final StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
// 开启Checkpointinterval用于指定checkpoint的触发间隔(单位milliseconds)
// environment.enableCheckpointing(5000);
//
DataStreamSource<String> streamSource = environment.addSource(Consumer.getKafkaConsumer())
.setParallelism(FlowWriteConfig.CONSUMER_PARALLELISM);
// DataStreamSource<String> streamSource = environment.readTextFile("D:\\flinkdata\\security.log", "utf-8");
if (FlowWriteConfig.LOG_NEED_COMPLETE == 1) {
//对原始日志进行处理补全转换等
DataStream<String> cleaningLog = streamSource.map(new MapCompletedFunction()).name("TransFormLogs")
.setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
//过滤空数据不发送到Kafka内
DataStream<String> result = cleaningLog.filter(new FilterNullFunction()).name("FilterAbnormalData")
.setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
//处理带有非结构化日志的数据
SingleOutputStreamOperator<String> process = result.process(new DealFileProcessFunction());
//文件元数据发送至TRAFFIC-FILE-METADATA
process.getSideOutput(DealFileProcessFunction.metaToKafa).addSink(Producer.getKafkaProducer()).name("toTrafficFileMeta")
.setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
// //补全后的数据发送给百分点的kafka
process.addSink(Producer.getPercentKafkaProducer()).name("toPercentKafka")
.setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
} else {
//过滤空数据不发送到Kafka内
DataStream<String> result = streamSource.filter(new FilterNullFunction()).name("FilterOriginalData")
.setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
//发送数据到Kafka
result.addSink(Producer.getPercentKafkaProducer()).name("LogSinkKafka")
.setParallelism(FlowWriteConfig.TRANSFORM_PARALLELISM);
}
try {
environment.execute(args[0]);
} catch (Exception e) {
logger.error("This Flink task start ERROR! Exception information is :" + e);
e.printStackTrace();
}
}
}

View File

@@ -1,123 +0,0 @@
package com.zdjizhi.utils.app;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.utils.http.HttpClientUtil;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* AppId 工具类
*
* @author qidaijie
*/
public class AppUtils {
private static final Log logger = LogFactory.get();
private static Map<Integer, String> appIdMap = new ConcurrentHashMap<>(128);
private static AppUtils appUtils;
private static void getAppInstance() {
appUtils = new AppUtils();
}
/**
* 构造函数-新
*/
private AppUtils() {
//定时更新
updateAppIdCache();
}
/**
* 更新变量
*/
private static void change() {
if (appUtils == null) {
getAppInstance();
}
timestampsFilter();
}
/**
* 获取变更内容
*/
private static void timestampsFilter() {
try {
Long begin = System.currentTimeMillis();
String schema = HttpClientUtil.requestByGetMethod(FlowWriteConfig.APP_ID_HTTP);
if (StringUtil.isNotBlank(schema)) {
String data = JSONObject.parseObject(schema).getString("data");
JSONArray objects = JSONArray.parseArray(data);
for (Object object : objects) {
JSONArray jsonArray = JSONArray.parseArray(object.toString());
int key = jsonArray.getInteger(0);
String value = jsonArray.getString(1);
if (appIdMap.containsKey(key)) {
if (!value.equals(appIdMap.get(key))) {
appIdMap.put(key, value);
}
} else {
appIdMap.put(key, value);
}
}
logger.warn("Updating the correspondence takes time:" + (begin - System.currentTimeMillis()));
logger.warn("Pull the length of the interface data:[" + objects.size() + "]");
}
} catch (RuntimeException e) {
logger.error("Update cache app-id failed, exception" + e);
}
}
/**
* 验证定时器,每隔一段时间验证一次-验证获取新的Cookie
*/
private void updateAppIdCache() {
ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1);
executorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
if (FlowWriteConfig.APP_TICK_TUPLE_FREQ_SECS != 0) {
change();
}
} catch (RuntimeException e) {
logger.error("AppUtils update AppCache is error===>{" + e + "}<===");
}
}
}, 1, FlowWriteConfig.APP_TICK_TUPLE_FREQ_SECS, TimeUnit.SECONDS);
}
/**
* 获取 appName
*
* @param appId app_id
* @return account
*/
public static String getAppName(int appId) {
if (appUtils == null) {
getAppInstance();
}
if (appIdMap.containsKey(appId)) {
return appIdMap.get(appId);
} else {
logger.warn("AppMap get appName is null, ID is :" + appId);
return "";
}
}
}

View File

@@ -1,18 +0,0 @@
package com.zdjizhi.utils.exception;
/**
* @author qidaijie
* @Package com.zdjizhi.storm.utils.execption
* @Description:
* @date 2021/3/259:42
*/
public class FlowWriteException extends RuntimeException {
public FlowWriteException() {
}
public FlowWriteException(String message) {
super(message);
}
}

View File

@@ -1,123 +0,0 @@
package com.zdjizhi.utils.functions;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.zdjizhi.bean.FileMeta;
import com.zdjizhi.bean.SourceList;
import com.zdjizhi.utils.JsonMapper;
import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.utils.general.FileEdit;
import com.zdjizhi.utils.json.JsonTypeUtils;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import java.util.Map;
/**
* @author wangchengcheng
* @Package com.zdjizhi.utils.functions
* @Description:
* @date 2021/10/14
*/
public class DealFileProcessFunction extends ProcessFunction<String,String> {
private static final Log logger = LogFactory.get();
private Map<String, Object> jsonMap = null;
private String rpUrlValue;
private String rqUrlValue;
private String emailUrlValue;
private long cfgId = 0; //= common_policy_id;
private String sIp = null; // = common_client_ip;
private int sPort = 0;// = common_client_port;
private String dIp = null;//= common_server_ip;
private int dPort = 0;// = common_server_port;
private long foundTime = 0;// = common_recv_time;
private String account = null;
private String domain = null;
private String schemaType = null;
//初始化侧输流的标记
public static OutputTag<String> metaToKafa = new OutputTag<String>("metaToKafka") {};
@SuppressWarnings("unchecked")
@Override
public void processElement(String message, Context context, Collector<String> collector) throws Exception {
try {
if (StringUtil.isNotBlank(message)) {
Map<String, Object> map = (Map<String, Object>) JsonMapper.fromJsonString(message, Map.class);
jsonMap = JsonTypeUtils.typeTransform(map);
rpUrlValue = (String) jsonMap.get("http_response_body");
rqUrlValue = (String) jsonMap.get("http_request_body");
emailUrlValue = (String) jsonMap.get("mail_eml_file");
if (StringUtil.isNotBlank(rpUrlValue) || StringUtil.isNotBlank(rqUrlValue) || StringUtil.isNotBlank(emailUrlValue)) {
cfgId = (long) jsonMap.get("common_policy_id");
sIp = (String) jsonMap.get("common_client_ip");
sPort = (int) jsonMap.get("common_client_port");
dIp = (String) jsonMap.get("common_server_ip");
dPort = (int) jsonMap.get("common_server_port");
foundTime = (long) jsonMap.get("common_recv_time");
schemaType = (String) jsonMap.get("common_schema_type");
if (StringUtil.isNotBlank((String) jsonMap.get("http_domain"))) {
domain = jsonMap.get("http_domain").toString();
} else {
domain = "NA";
}
if (StringUtil.isNotBlank((String) jsonMap.get("common_subscribe_id"))) {
account = jsonMap.get("common_subscribe_id").toString();
} else {
account = "NA";
}
FileMeta fileMeta = new FileMeta();
JSONArray jsonarray = new JSONArray();
if (StringUtil.isNotBlank(rqUrlValue)) {
jsonMap.put("http_request_body", FileEdit.dealFileUrlToPercent(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, rqUrlValue, schemaType, "_1"));
SourceList request = new SourceList();
request.setSource_oss_path(rqUrlValue);
request.setDestination_oss_path(FileEdit.dealFileUrlToPercent(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, rqUrlValue, schemaType, "_1"));
jsonarray.add(request);
}
if (StringUtil.isNotBlank(rpUrlValue)) {
jsonMap.put("http_response_body", FileEdit.dealFileUrlToPercent(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, rpUrlValue, schemaType, "_2"));
SourceList response = new SourceList();
response.setSource_oss_path(rpUrlValue);
response.setDestination_oss_path(FileEdit.dealFileUrlToPercent(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, rpUrlValue, schemaType, "_2"));
jsonarray.add(response);
}
if (StringUtil.isNotBlank(emailUrlValue)) {
jsonMap.put("mail_eml_file", FileEdit.dealFileUrlToPercent(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, emailUrlValue, schemaType, "_9"));
SourceList emailFile = new SourceList();
emailFile.setSource_oss_path(emailUrlValue);
emailFile.setDestination_oss_path(FileEdit.dealFileUrlToPercent(cfgId, sIp, sPort, dIp, dPort, foundTime, account, domain, emailUrlValue, schemaType, "_9"));
jsonarray.add(emailFile);
}
fileMeta.setSourceList(jsonarray);
fileMeta.setCommon_log_id((long) jsonMap.get("common_log_id"));
fileMeta.setCommon_recv_time(Integer.parseInt(jsonMap.get("common_recv_time").toString()));
fileMeta.setCommon_schema_type((String) jsonMap.get("common_schema_type"));
fileMeta.setProcessing_time((int) (System.currentTimeMillis() / 1000));
context.output(metaToKafa, JSONObject.toJSONString(fileMeta));
}
collector.collect(JsonMapper.toJsonString(jsonMap));
} else {
collector.collect(message);
}
}catch (RuntimeException e) {
logger.error("处理带有非结构结构化字段的日志出错:" + e + "\n" + message);
}
}
}

View File

@@ -1,17 +0,0 @@
package com.zdjizhi.utils.functions;
import com.zdjizhi.utils.StringUtil;
import org.apache.flink.api.common.functions.FilterFunction;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.functions
* @Description:
* @date 2021/5/2715:01
*/
public class FilterNullFunction implements FilterFunction<String> {
@Override
public boolean filter(String message) {
return StringUtil.isNotBlank(message);
}
}

View File

@@ -1,28 +0,0 @@
package com.zdjizhi.utils.functions;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.utils.general.TransFormTypeMap;
import org.apache.flink.api.common.functions.MapFunction;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.functions
* @Description:
* @date 2021/5/2715:01
*/
public class MapCompletedFunction implements MapFunction<String, String> {
private static final Log logger = LogFactory.get();
@Override
@SuppressWarnings("unchecked")
public String map(String logs) {
try {
return TransFormTypeMap.dealCommonMessage(logs);
} catch (RuntimeException e) {
logger.error("解析补全日志信息过程异常,异常信息:" + e + "\n" + logs);
return "";
}
}
}

View File

@@ -1,40 +0,0 @@
package com.zdjizhi.utils.general;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.ordinary.MD5Utils;
/**
* 文件字段操作工具
*/
public class FileEdit {
private static final Log logger = LogFactory.get();
public static String dealFileUrlToPercent(long cfgId,String sIp,int sPort,String dIp,int dPort,long foundTime,String account,String domain, String urlValue,String schemaType,String fileSuffix) throws Exception {
String fileType = null;
if (schemaType.equals("HTTP")){
fileType = "html";
}
if (schemaType.equals("MAIL")){
fileType = "eml";
}
return "http://"+ FlowWriteConfig.OOS_SERVERS+"/upload_v2"+"/"+cfgId+"/"+fileType+"/"+sIp+"/"+sPort+"/"+dIp+"/"+dPort+"/"+foundTime+"/"+account+"/"+domain+"/"+getFileName(urlValue,fileSuffix);
}
public static String getFileType(String url){
String[] split = url.split("\\.");
return split[split.length-1];
}
public static String getFileName(String url,String fileSuffix) throws Exception {
String[] arr = url.split("/");
String filename = arr[arr.length-1].substring(0,arr[arr.length-1].lastIndexOf("_"));
String prefix = MD5Utils.md5Encode(filename);
// String suffix = arr[arr.length-1].substring(arr[arr.length-1].lastIndexOf("_"),arr[arr.length-1].lastIndexOf("."));
return prefix+fileSuffix;
}
}

View File

@@ -1,213 +0,0 @@
package com.zdjizhi.utils.general;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.zookeeper.DistributedLock;
import com.zdjizhi.utils.zookeeper.ZookeeperUtils;
/**
* 雪花算法
*
* @author qidaijie
*/
public class SnowflakeId {
private static final Log logger = LogFactory.get();
/**
* 共64位 第一位为符号位 默认0
* 时间戳 39位(17 year), centerId:(关联每个环境或任务数) :6位(0-63),
* workerId(关联进程):7(0-127) ,序列号11位(2047/ms)
*
* 序列号 /ms = (-1L ^ (-1L << 11))
* 最大使用年 = (1L << 39) / (1000L * 60 * 60 * 24 * 365)
*/
/**
* 开始时间截 (2020-11-14 00:00:00) max 17years
*/
private final long twepoch = 1605283200000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 8L;
/**
* 数据标识id所占的位数
*/
private final long dataCenterIdBits = 5L;
/**
* 支持的最大机器id结果是63 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
* M << n = M * 2^n
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id结果是127
*/
private final long maxDataCenterId = -1L ^ (-1L << dataCenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 11L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(14+6)
*/
private final long dataCenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(4+6+14)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + dataCenterIdBits;
/**
* 生成序列的掩码这里为2047
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~255)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long dataCenterId;
/**
* 毫秒内序列(0~2047)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
/**
* 设置允许时间回拨的最大限制10s
*/
private static final long rollBackTime = 10000L;
private static SnowflakeId idWorker;
private static ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
static {
idWorker = new SnowflakeId(FlowWriteConfig.ZOOKEEPER_SERVERS, FlowWriteConfig.DATA_CENTER_ID_NUM);
}
//==============================Constructors=====================================
/**
* 构造函数
*/
private SnowflakeId(String zookeeperIp, long dataCenterIdNum) {
DistributedLock lock = new DistributedLock(FlowWriteConfig.ZOOKEEPER_SERVERS, "disLocks1");
try {
lock.lock();
int tmpWorkerId = zookeeperUtils.modifyNode("/Snowflake/" + "worker" + dataCenterIdNum, zookeeperIp);
if (tmpWorkerId > maxWorkerId || tmpWorkerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (dataCenterIdNum > maxDataCenterId || dataCenterIdNum < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than ", maxDataCenterId));
}
this.workerId = tmpWorkerId;
this.dataCenterId = dataCenterIdNum;
} catch (RuntimeException e) {
logger.error("This is not usual error!!!===>>>" + e + "<<<===");
}finally {
lock.unlock();
}
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
private synchronized long nextId() {
long timestamp = timeGen();
//设置一个允许回拨限制时间系统时间回拨范围在rollBackTime内可以等待校准
if (lastTimestamp - timestamp > 0 && lastTimestamp - timestamp < rollBackTime) {
timestamp = tilNextMillis(lastTimestamp);
}
//如果当前时间小于上一次ID生成的时间戳说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift)
| (dataCenterId << dataCenterIdShift)
| (workerId << workerIdShift)
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
return System.currentTimeMillis();
}
/**
* 静态工具类
*
* @return
*/
public static Long generateId() {
return idWorker.nextId();
}
}

View File

@@ -1,146 +0,0 @@
package com.zdjizhi.utils.general;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.JsonMapper;
import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.utils.json.JsonParseUtil;
import com.zdjizhi.utils.json.JsonTypeUtils;
import java.util.ArrayList;
import java.util.Map;
/**
* 描述:转换或补全工具类
*
* @author qidaijie
*/
public class TransFormMap {
private static final Log logger = LogFactory.get();
/**
* 获取任务列表
* list的每个元素是一个四元字符串数组 (有format标识的字段补全的字段用到的功能函数用到的参数),例如:
* (mail_subject mail_subject decode_of_base64 mail_subject_charset)
*/
private static ArrayList<String[]> jobList = JsonParseUtil.getJobListFromHttp(FlowWriteConfig.SCHEMA_HTTP);
/**
* 解析日志,并补全
*
* @param message kafka Topic原始日志
* @return 补全后的日志
*/
@SuppressWarnings("unchecked")
public static String dealCommonMessage(String message) {
try {
if (StringUtil.isNotBlank(message)) {
Map<String, Object> jsonMap = (Map<String, Object>) JsonMapper.fromJsonString(message, Map.class);
for (String[] strings : jobList) {
//用到的参数的值
Object logValue = JsonParseUtil.getValue(jsonMap, strings[0]);
//需要补全的字段的key
String appendToKeyName = strings[1];
//需要补全的字段的值
Object appendTo = JsonParseUtil.getValue(jsonMap, appendToKeyName);
//匹配操作函数的字段
String function = strings[2];
//额外的参数的值
String param = strings[3];
functionSet(function, jsonMap, appendToKeyName, appendTo, logValue, param);
}
return JsonMapper.toJsonString(jsonMap);
} else {
return "";
}
} catch (RuntimeException e) {
logger.error("解析补全日志信息过程异常,异常信息:" + e + "\n" + message);
return "";
}
}
/**
* 根据schema描述对应字段进行操作的 函数集合
*
* @param function 匹配操作函数的字段
* @param jsonMap 原始日志解析map
* @param appendToKeyName 需要补全的字段的key
* @param appendTo 需要补全的字段的值
* @param logValue 用到的参数的值
* @param param 额外的参数的值
*/
private static void functionSet(String function, Map<String, Object> jsonMap, String appendToKeyName, Object appendTo, Object logValue, String param) {
switch (function) {
case "current_timestamp":
if (!(appendTo instanceof Long)) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getCurrentTime());
}
break;
case "snowflake_id":
JsonParseUtil.setValue(jsonMap, appendToKeyName, SnowflakeId.generateId());
break;
case "geo_ip_detail":
if (logValue != null && appendTo == null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoIpDetail(logValue.toString()));
}
break;
case "geo_asn":
if (logValue != null && appendTo == null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoAsn(logValue.toString()));
}
break;
case "geo_ip_country":
if (logValue != null && appendTo == null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoIpCountry(logValue.toString()));
}
break;
case "set_value":
if (param != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, param);
}
break;
case "get_value":
if (logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, logValue);
}
break;
case "if":
if (param != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.condition(jsonMap, param));
}
break;
case "sub_domain":
if (appendTo == null && logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getTopDomain(logValue.toString()));
}
break;
case "radius_match":
if (logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.radiusMatch(logValue.toString()));
}
break;
case "app_match":
if (logValue != null && appendTo == null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.appMatch(logValue.toString()));
}
break;
case "decode_of_base64":
if (logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.decodeBase64(logValue.toString(), TransFunction.isJsonValue(jsonMap, param)));
}
break;
case "flattenSpec":
if (logValue != null && param != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.flattenSpec(logValue.toString(), param));
}
break;
default:
}
}
}

View File

@@ -1,153 +0,0 @@
package com.zdjizhi.utils.general;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.JsonMapper;
import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.utils.json.JsonParseUtil;
import java.util.ArrayList;
import java.util.HashMap;
/**
* 描述:转换或补全工具类
*
* @author qidaijie
*/
public class TransFormObject {
private static final Log logger = LogFactory.get();
/**
* 在内存中加载反射类用的map
*/
private static HashMap<String, Class> map = JsonParseUtil.getMapFromHttp(FlowWriteConfig.SCHEMA_HTTP);
/**
* 反射成一个类
*/
private static Object mapObject = JsonParseUtil.generateObject(map);
/**
* 获取任务列表
* list的每个元素是一个四元字符串数组 (有format标识的字段补全的字段用到的功能函数用到的参数),例如:
* (mail_subject mail_subject decode_of_base64 mail_subject_charset)
*/
private static ArrayList<String[]> jobList = JsonParseUtil.getJobListFromHttp(FlowWriteConfig.SCHEMA_HTTP);
/**
* 解析日志,并补全
*
* @param message kafka Topic原始日志
* @return 补全后的日志
*/
public static String dealCommonMessage(String message) {
try {
if (StringUtil.isNotBlank(message)) {
Object object = JsonMapper.fromJsonString(message, mapObject.getClass());
for (String[] strings : jobList) {
//用到的参数的值
Object name = JsonParseUtil.getValue(object, strings[0]);
//需要补全的字段的key
String appendToKeyName = strings[1];
//需要补全的字段的值
Object appendTo = JsonParseUtil.getValue(object, appendToKeyName);
//匹配操作函数的字段
String function = strings[2];
//额外的参数的值
String param = strings[3];
functionSet(function, object, appendToKeyName, appendTo, name, param);
}
return JsonMapper.toJsonString(object);
} else {
return "";
}
} catch (RuntimeException e) {
logger.error("解析补全日志信息过程异常,异常信息:" + e + "\n" + message);
return "";
}
}
/**
* 根据schema描述对应字段进行操作的 函数集合
*
* @param function 匹配操作函数的字段
* @param object 动态POJO Object
* @param appendToKeyName 需要补全的字段的key
* @param appendTo 需要补全的字段的值
* @param name 用到的参数的值
* @param param 额外的参数的值
*/
private static void functionSet(String function, Object object, String appendToKeyName, Object appendTo, Object name, String param) {
switch (function) {
case "current_timestamp":
if (!(appendTo instanceof Long)) {
JsonParseUtil.setValue(object, appendToKeyName, TransFunction.getCurrentTime());
}
break;
case "snowflake_id":
JsonParseUtil.setValue(object, appendToKeyName, SnowflakeId.generateId());
break;
case "geo_ip_detail":
if (name != null && appendTo == null) {
JsonParseUtil.setValue(object, appendToKeyName, TransFunction.getGeoIpDetail(name.toString()));
}
break;
case "geo_asn":
if (name != null && appendTo == null) {
JsonParseUtil.setValue(object, appendToKeyName, TransFunction.getGeoAsn(name.toString()));
}
break;
case "geo_ip_country":
if (name != null && appendTo == null) {
JsonParseUtil.setValue(object, appendToKeyName, TransFunction.getGeoIpCountry(name.toString()));
}
break;
case "set_value":
if (name != null && param != null) {
JsonParseUtil.setValue(object, appendToKeyName, TransFunction.setValue(param));
}
break;
case "get_value":
if (name != null) {
JsonParseUtil.setValue(object, appendToKeyName, name);
}
break;
case "if":
if (param != null) {
JsonParseUtil.setValue(object, appendToKeyName, TransFunction.condition(object, param));
}
break;
case "sub_domain":
if (appendTo == null && name != null) {
JsonParseUtil.setValue(object, appendToKeyName, TransFunction.getTopDomain(name.toString()));
}
break;
case "radius_match":
if (name != null) {
JsonParseUtil.setValue(object, appendToKeyName, TransFunction.radiusMatch(name.toString()));
}
break;
case "app_match":
if (name != null && appendTo == null) {
JsonParseUtil.setValue(object, appendToKeyName, TransFunction.appMatch(name.toString()));
}
break;
case "decode_of_base64":
if (name != null) {
JsonParseUtil.setValue(object, appendToKeyName, TransFunction.decodeBase64(name.toString(), TransFunction.isJsonValue(object, param)));
}
break;
case "flattenSpec":
if (name != null && param != null) {
JsonParseUtil.setValue(object, appendToKeyName, TransFunction.flattenSpec(name.toString(), param));
}
break;
default:
}
}
}

View File

@@ -1,146 +0,0 @@
package com.zdjizhi.utils.general;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.JsonMapper;
import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.utils.json.JsonParseUtil;
import com.zdjizhi.utils.json.JsonTypeUtils;
import java.util.ArrayList;
import java.util.Map;
/**
* 描述:转换或补全工具类
*
* @author qidaijie
*/
public class TransFormTypeMap {
private static final Log logger = LogFactory.get();
/**
* 获取任务列表
* list的每个元素是一个四元字符串数组 (有format标识的字段补全的字段用到的功能函数用到的参数),例如:
* (mail_subject mail_subject decode_of_base64 mail_subject_charset)
*/
private static ArrayList<String[]> jobList = JsonParseUtil.getJobListFromHttp(FlowWriteConfig.SCHEMA_HTTP);
/**
* 解析日志,并补全
*
* @param message kafka Topic原始日志
* @return 补全后的日志
*/
@SuppressWarnings("unchecked")
public static String dealCommonMessage(String message) {
try {
if (StringUtil.isNotBlank(message)) {
Map<String, Object> map = (Map<String, Object>) JsonMapper.fromJsonString(message, Map.class);
Map<String, Object> jsonMap = JsonTypeUtils.typeTransform(map);
for (String[] strings : jobList) {
//用到的参数的值
Object logValue = JsonParseUtil.getValue(jsonMap, strings[0]);
//需要补全的字段的key
String appendToKeyName = strings[1];
//需要补全的字段的值
Object appendToKeyValue = JsonParseUtil.getValue(jsonMap, appendToKeyName);
//匹配操作函数的字段
String function = strings[2];
//额外的参数的值
String param = strings[3];
functionSet(function, jsonMap, appendToKeyName, appendToKeyValue, logValue, param);
}
return JsonMapper.toJsonString(jsonMap);
} else {
return "";
}
} catch (RuntimeException e) {
logger.error("解析补全日志信息过程异常,异常信息:" + e + "\n" + message);
return "";
}
}
/**
* 根据schema描述对应字段进行操作的 函数集合
*
* @param function 匹配操作函数的字段
* @param jsonMap 原始日志解析map
* @param appendToKeyName 需要补全的字段的key
* @param appendToKeyValue 需要补全的字段的值
* @param logValue 用到的参数的值
* @param param 额外的参数的值
*/
private static void functionSet(String function, Map<String, Object> jsonMap, String appendToKeyName, Object appendToKeyValue, Object logValue, String param) {
switch (function) {
case "current_timestamp":
if (!(appendToKeyValue instanceof Long)) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getCurrentTime());
}
break;
case "snowflake_id":
JsonParseUtil.setValue(jsonMap, appendToKeyName, SnowflakeId.generateId());
break;
case "geo_ip_detail":
if (logValue != null && appendToKeyValue == null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoIpDetail(logValue.toString()));
}
break;
case "geo_asn":
if (logValue != null && appendToKeyValue == null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoAsn(logValue.toString()));
}
break;
case "geo_ip_country":
if (logValue != null && appendToKeyValue == null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getGeoIpCountry(logValue.toString()));
}
break;
case "set_value":
if (param != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, param);
}
break;
case "get_value":
if (logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, logValue);
}
break;
case "if":
if (param != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.condition(jsonMap, param));
}
break;
case "sub_domain":
if (appendToKeyValue == null && logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.getTopDomain(logValue.toString()));
}
break;
case "radius_match":
if (logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.radiusMatch(logValue.toString()));
}
break;
case "app_match":
if (logValue != null && appendToKeyValue == null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.appMatch(logValue.toString()));
}
break;
case "decode_of_base64":
if (logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.decodeBase64(logValue.toString(), TransFunction.isJsonValue(jsonMap, param)));
}
break;
case "flattenSpec":
if (logValue != null && param != null) {
JsonParseUtil.setValue(jsonMap, appendToKeyName, TransFunction.flattenSpec(logValue.toString(), param));
}
break;
default:
}
}
}

View File

@@ -1,316 +0,0 @@
package com.zdjizhi.utils.general;
import cn.hutool.core.codec.Base64;
import cn.hutool.core.text.StrSpliter;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.jayway.jsonpath.InvalidPathException;
import com.jayway.jsonpath.JsonPath;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.FormatUtils;
import com.zdjizhi.utils.IpLookup;
import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.utils.app.AppUtils;
import com.zdjizhi.utils.hbase.HBaseUtils;
import com.zdjizhi.utils.json.JsonParseUtil;
import com.zdjizhi.utils.json.TypeUtils;
import java.util.ArrayList;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author qidaijie
*/
class TransFunction {
private static final Log logger = LogFactory.get();
private static final Pattern PATTERN = Pattern.compile("[0-9]*");
/**
* IP定位库工具类
*/
private static IpLookup ipLookup = new IpLookup.Builder(false)
.loadDataFileV4(FlowWriteConfig.TOOLS_LIBRARY + "ip_v4.mmdb")
.loadDataFileV6(FlowWriteConfig.TOOLS_LIBRARY + "ip_v6.mmdb")
.loadDataFilePrivateV4(FlowWriteConfig.TOOLS_LIBRARY + "ip_private_v4.mmdb")
.loadDataFilePrivateV6(FlowWriteConfig.TOOLS_LIBRARY + "ip_private_v6.mmdb")
.loadAsnDataFile(FlowWriteConfig.TOOLS_LIBRARY + "asn_v4.mmdb")
.loadAsnDataFileV6(FlowWriteConfig.TOOLS_LIBRARY + "asn_v6.mmdb")
.build();
/**
* 生成当前时间戳的操作
*/
static long getCurrentTime() {
return System.currentTimeMillis() / 1000;
}
/**
* 根据clientIp获取location信息
*
* @param ip client IP
* @return ip地址详细信息
*/
static String getGeoIpDetail(String ip) {
return ipLookup.cityLookupDetail(ip);
}
/**
* 根据ip获取asn信息
*
* @param ip client/server IP
* @return ASN
*/
static String getGeoAsn(String ip) {
return ipLookup.asnLookup(ip);
}
/**
* 根据ip获取country信息
*
* @param ip server IP
* @return 国家
*/
static String getGeoIpCountry(String ip) {
return ipLookup.countryLookup(ip);
}
/**
* radius借助HBase补齐
*
* @param ip client IP
* @return account
*/
static String radiusMatch(String ip) {
String account = HBaseUtils.getAccount(ip.trim());
// if (StringUtil.isBlank(account)) {
// logger.warn("HashMap get account is null, Ip is :" + ip);
// }
return account;
}
/**
* appId与缓存中对应关系补全appName
*
* @param appIds app id 列表
* @return appName
*/
static String appMatch(String appIds) {
try {
String appId = StrSpliter.split(appIds, FlowWriteConfig.FORMAT_SPLITTER, true, true).get(0);
return AppUtils.getAppName(Integer.parseInt(appId));
} catch (NumberFormatException | ClassCastException exception) {
logger.error("APP ID列表分割转换异常异常APP ID列表:" + appIds);
return "";
}
}
/**
* 解析顶级域名
*
* @param domain 初始域名
* @return 顶级域名
*/
static String getTopDomain(String domain) {
try {
return FormatUtils.getTopPrivateDomain(domain);
} catch (StringIndexOutOfBoundsException outException) {
logger.error("解析顶级域名异常,异常域名:" + domain);
return "";
}
}
/**
* 根据编码解码base64
*
* @param message base64
* @param charset 编码
* @return 解码字符串
*/
static String decodeBase64(String message, Object charset) {
String result = "";
try {
if (StringUtil.isNotBlank(message)) {
if (charset == null) {
result = Base64.decodeStr(message, FlowWriteConfig.MAIL_DEFAULT_CHARSET);
} else {
result = Base64.decodeStr(message, charset.toString());
}
}
} catch (RuntimeException rune) {
logger.error("解析 Base64 异常,异常信息:" + rune);
}
return result;
}
/**
* 根据表达式解析json
*
* @param message json
* @param expr 解析表达式
* @return 解析结果
*/
static String flattenSpec(String message, String expr) {
String flattenResult = "";
try {
if (StringUtil.isNotBlank(expr)) {
ArrayList<String> read = JsonPath.parse(message).read(expr);
flattenResult = read.get(0);
}
} catch (ClassCastException | InvalidPathException e) {
logger.error("设备标签解析异常,[ " + expr + " ]解析表达式错误" + e);
}
return flattenResult;
}
/**
* 判断是否为日志字段,是则返回对应value否则返回原始字符串
*
* @param object 内存实体类
* @param param 字段名/普通字符串
* @return JSON.Value or String
*/
static Object isJsonValue(Object object, String param) {
if (param.contains(FlowWriteConfig.IS_JSON_KEY_TAG)) {
return JsonParseUtil.getValue(object, param.substring(2));
} else {
return param;
}
}
/**
* 判断是否为日志字段,是则返回对应value否则返回原始字符串
*
* @param jsonMap 内存实体类
* @param param 字段名/普通字符串
* @return JSON.Value or String
*/
static Object isJsonValue(Map<String, Object> jsonMap, String param) {
if (param.contains(FlowWriteConfig.IS_JSON_KEY_TAG)) {
return JsonParseUtil.getValue(jsonMap, param.substring(2));
} else {
return param;
}
}
/**
* IF函数实现解析日志构建三目运算;包含判断是否为数字若为数字则转换为long类型返回结果。
*
* @param object 内存实体类
* @param ifParam 字段名/普通字符串
* @return resultA or resultB or null
*/
static Object condition(Object object, String ifParam) {
Object result = null;
try {
String[] split = ifParam.split(FlowWriteConfig.FORMAT_SPLITTER);
if (split.length == FlowWriteConfig.IF_PARAM_LENGTH) {
String[] norms = split[0].split(FlowWriteConfig.IF_CONDITION_SPLITTER);
Object direction = isJsonValue(object, norms[0]);
Object resultA = isJsonValue(object, split[1]);
Object resultB = isJsonValue(object, split[2]);
if (direction instanceof Number) {
// result = (Integer.parseInt(direction.toString()) == Integer.parseInt(norms[1])) ? resultA : resultB;
result = TypeUtils.castToIfFunction((Integer.parseInt(direction.toString()) == Integer.parseInt(norms[1])) ? resultA : resultB);
} else if (direction instanceof String) {
result = TypeUtils.castToIfFunction(direction.equals(norms[1]) ? resultA : resultB);
// result = direction.equals(norms[1]) ? resultA : resultB;
}
}
} catch (RuntimeException e) {
logger.error("IF 函数执行异常,异常信息:" + e);
}
return result;
}
/**
* IF函数实现解析日志构建三目运算;包含判断是否为数字若为数字则转换为long类型返回结果。
*
* @param jsonMap 内存实体类
* @param ifParam 字段名/普通字符串
* @return resultA or resultB or null
*/
static Object condition(Map<String, Object> jsonMap, String ifParam) {
Object result = null;
try {
String[] split = ifParam.split(FlowWriteConfig.FORMAT_SPLITTER);
if (split.length == FlowWriteConfig.IF_PARAM_LENGTH) {
String[] norms = split[0].split(FlowWriteConfig.IF_CONDITION_SPLITTER);
Object direction = isJsonValue(jsonMap, norms[0]);
Object resultA = isJsonValue(jsonMap, split[1]);
Object resultB = isJsonValue(jsonMap, split[2]);
if (direction instanceof Number) {
result = (Integer.parseInt(direction.toString()) == Integer.parseInt(norms[1])) ? resultA : resultB;
// result = TypeUtils.castToIfFunction((Integer.parseInt(direction.toString()) == Integer.parseInt(norms[1])) ? resultA : resultB);
} else if (direction instanceof String) {
// result = TypeUtils.castToIfFunction(direction.equals(norms[1]) ? resultA : resultB);
result = direction.equals(norms[1]) ? resultA : resultB;
}
}
} catch (RuntimeException e) {
logger.error("IF 函数执行异常,异常信息:" + e);
}
return result;
}
// /**
// * IF函数实现解析日志构建三目运算;包含判断是否为数字若为数字则转换为long类型返回结果。
// *
// * @param jsonMap 原始日志
// * @param ifParam 字段名/普通字符串
// * @return resultA or resultB or null
// */
// static Object condition(Map<String, Object> jsonMap, String ifParam) {
// try {
// String[] split = ifParam.split(FlowWriteConfig.FORMAT_SPLITTER);
// String[] norms = split[0].split(FlowWriteConfig.IF_CONDITION_SPLITTER);
// String direction = isJsonValue(jsonMap, norms[0]);
// if (StringUtil.isNotBlank(direction)) {
// if (split.length == FlowWriteConfig.IF_PARAM_LENGTH) {
// String resultA = isJsonValue(jsonMap, split[1]);
// String resultB = isJsonValue(jsonMap, split[2]);
// String result = (Integer.parseInt(direction) == Integer.parseInt(norms[1])) ? resultA : resultB;
// Matcher isNum = PATTERN.matcher(result);
// if (isNum.matches()) {
// return Long.parseLong(result);
// } else {
// return result;
// }
// }
// }
// } catch (RuntimeException e) {
// logger.error("IF 函数执行异常,异常信息:" + e);
// }
// return null;
// }
/**
* 设置固定值函数 若为数字则转为long返回
*
* @param param 默认值
* @return 返回数字或字符串
*/
static Object setValue(String param) {
try {
Matcher isNum = PATTERN.matcher(param);
if (isNum.matches()) {
return Long.parseLong(param);
} else {
return param;
}
} catch (RuntimeException e) {
logger.error("SetValue 函数异常,异常信息:" + e);
}
return null;
}
}

View File

@@ -1,201 +0,0 @@
package com.zdjizhi.utils.hbase;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* HBase 工具类
*
* @author qidaijie
*/
public class HBaseUtils {
private static final Log logger = LogFactory.get();
private static Map<String, String> subIdMap = new ConcurrentHashMap<>(83334);
private static Connection connection;
private static Long time;
private static String zookeeperIp;
private static String hBaseTable;
private static HBaseUtils hBaseUtils;
private static void getInstance() {
hBaseUtils = new HBaseUtils();
}
/**
* 构造函数-新
*/
private HBaseUtils() {
zookeeperIp = FlowWriteConfig.HBASE_ZOOKEEPER_SERVERS;
hBaseTable = FlowWriteConfig.HBASE_TABLE_NAME;
//获取连接
getConnection();
//拉取所有
getAll();
//定时更新
updateCache();
}
private static void getConnection() {
try {
// 管理Hbase的配置信息
Configuration configuration = HBaseConfiguration.create();
// 设置zookeeper节点
configuration.set("hbase.zookeeper.quorum", zookeeperIp);
configuration.set("hbase.client.retries.number", "3");
configuration.set("hbase.bulkload.retries.number", "3");
configuration.set("zookeeper.recovery.retry", "3");
connection = ConnectionFactory.createConnection(configuration);
time = System.currentTimeMillis();
logger.warn("HBaseUtils get HBase connection,now to getAll().");
} catch (IOException ioe) {
logger.error("HBaseUtils getHbaseConn() IOException===>{" + ioe + "}<===");
} catch (RuntimeException e) {
logger.error("HBaseUtils getHbaseConn() Exception===>{" + e + "}<===");
}
}
/**
* 更新变量
*/
private static void change() {
if (hBaseUtils == null) {
getInstance();
}
long nowTime = System.currentTimeMillis();
timestampsFilter(time - 1000, nowTime + 500);
}
/**
* 获取变更内容
*
* @param startTime 开始时间
* @param endTime 结束时间
*/
private static void timestampsFilter(Long startTime, Long endTime) {
Long begin = System.currentTimeMillis();
Table table = null;
ResultScanner scanner = null;
Scan scan2 = new Scan();
try {
table = connection.getTable(TableName.valueOf("sub:" + hBaseTable));
scan2.setTimeRange(startTime, endTime);
scanner = table.getScanner(scan2);
for (Result result : scanner) {
Cell[] cells = result.rawCells();
for (Cell cell : cells) {
String key = Bytes.toString(CellUtil.cloneRow(cell)).trim();
String value = Bytes.toString(CellUtil.cloneValue(cell)).trim();
if (subIdMap.containsKey(key)) {
if (!value.equals(subIdMap.get(key))) {
subIdMap.put(key, value);
}
} else {
subIdMap.put(key, value);
}
}
}
Long end = System.currentTimeMillis();
logger.warn("HBaseUtils Now subIdMap.keySet().size() is: " + subIdMap.keySet().size());
logger.warn("HBaseUtils Update cache timeConsuming is: " + (end - begin) + ",BeginTime: " + startTime + ",EndTime: " + endTime);
time = endTime;
} catch (IOException ioe) {
logger.error("HBaseUtils timestampsFilter is IOException===>{" + ioe + "}<===");
} catch (RuntimeException e) {
logger.error("HBaseUtils timestampsFilter is Exception===>{" + e + "}<===");
} finally {
if (scanner != null) {
scanner.close();
}
if (table != null) {
try {
table.close();
} catch (IOException e) {
logger.error("HBase Table Close ERROR! Exception message is:" + e);
}
}
}
}
/**
* 获取所有的 key value
*/
private static void getAll() {
long begin = System.currentTimeMillis();
try {
Table table = connection.getTable(TableName.valueOf("sub:" + hBaseTable));
Scan scan2 = new Scan();
ResultScanner scanner = table.getScanner(scan2);
for (Result result : scanner) {
Cell[] cells = result.rawCells();
for (Cell cell : cells) {
subIdMap.put(Bytes.toString(CellUtil.cloneRow(cell)), Bytes.toString(CellUtil.cloneValue(cell)));
}
}
logger.warn("HBaseUtils Get fullAmount List size->subIdMap.size(): " + subIdMap.size());
logger.warn("HBaseUtils Get fullAmount List size->subIdMap.size() timeConsuming is: " + (System.currentTimeMillis() - begin));
scanner.close();
} catch (IOException ioe) {
logger.error("HBaseUtils getAll() is IOException===>{" + ioe + "}<===");
} catch (RuntimeException e) {
logger.error("HBaseUtils getAll() is Exception===>{" + e + "}<===");
}
}
/**
* 验证定时器,每隔一段时间验证一次-验证获取新的Cookie
*/
private void updateCache() {
// ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1,
// new BasicThreadFactory.Builder().namingPattern("hbase-change-pool-%d").daemon(true).build());
ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1);
executorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
if (FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS != 0) {
change();
}
} catch (RuntimeException e) {
logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
}
}
}, 1, FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS, TimeUnit.SECONDS);
}
/**
* 获取 account
*
* @param clientIp client_ip
* @return account
*/
public static String getAccount(String clientIp) {
if (hBaseUtils == null) {
getInstance();
}
return subIdMap.get(clientIp);
}
}

View File

@@ -1,77 +0,0 @@
package com.zdjizhi.utils.http;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
/**
* 获取网关schema的工具类
*
* @author qidaijie
*/
public class HttpClientUtil {
private static final Log logger = LogFactory.get();
/**
* 请求网关获取schema
*
* @param http 网关url
* @return schema
*/
public static String requestByGetMethod(String http) {
CloseableHttpClient httpClient = HttpClients.createDefault();
StringBuilder entityStringBuilder;
HttpGet get = new HttpGet(http);
BufferedReader bufferedReader = null;
CloseableHttpResponse httpResponse = null;
try {
httpResponse = httpClient.execute(get);
HttpEntity entity = httpResponse.getEntity();
entityStringBuilder = new StringBuilder();
if (null != entity) {
bufferedReader = new BufferedReader(new InputStreamReader(httpResponse.getEntity().getContent(), "UTF-8"), 8 * 1024);
int intC;
while ((intC = bufferedReader.read()) != -1) {
char c = (char) intC;
if (c == '\n') {
break;
}
entityStringBuilder.append(c);
}
return entityStringBuilder.toString();
}
} catch (IOException e) {
logger.error("Get Schema from Query engine ERROR! Exception message is:" + e);
} finally {
if (httpClient != null) {
try {
httpClient.close();
} catch (IOException e) {
logger.error("Close HTTP Client ERROR! Exception messgae is:" + e);
}
}
if (httpResponse != null) {
try {
httpResponse.close();
} catch (IOException e) {
logger.error("Close httpResponse ERROR! Exception messgae is:" + e);
}
}
if (bufferedReader != null) {
IOUtils.closeQuietly(bufferedReader);
}
}
return "";
}
}

View File

@@ -1,283 +0,0 @@
package com.zdjizhi.utils.json;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.jayway.jsonpath.JsonPath;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.utils.http.HttpClientUtil;
import net.sf.cglib.beans.BeanGenerator;
import net.sf.cglib.beans.BeanMap;
import java.util.*;
/**
* 使用FastJson解析json的工具类
*
* @author qidaijie
*/
public class JsonParseUtil {
private static final Log logger = LogFactory.get();
private static ArrayList<String> dropList = new ArrayList<>();
/**
* 模式匹配,给定一个类型字符串返回一个类类型
*
* @param type 类型
* @return 类类型
*/
private static Class getClassName(String type) {
Class clazz;
switch (type) {
case "int":
clazz = Integer.class;
break;
case "string":
clazz = String.class;
break;
case "long":
clazz = long.class;
break;
case "array":
clazz = List.class;
break;
case "double":
clazz = double.class;
break;
case "float":
clazz = float.class;
break;
case "char":
clazz = char.class;
break;
case "byte":
clazz = byte.class;
break;
case "boolean":
clazz = boolean.class;
break;
case "short":
clazz = short.class;
break;
default:
clazz = String.class;
}
return clazz;
}
/**
* 获取属性值的方法
*
* @param obj 对象
* @param property key
* @return 属性的值
*/
public static Object getValue(Object obj, String property) {
try {
BeanMap beanMap = BeanMap.create(obj);
return beanMap.get(property);
} catch (RuntimeException e) {
logger.error("获取json-value异常异常key" + property + "异常信息为:" + e);
return null;
}
}
/**
* 获取属性值的方法
*
* @param jsonMap 原始日志
* @param property key
* @return 属性的值
*/
public static Object getValue(Map<String, Object> jsonMap, String property) {
try {
return jsonMap.getOrDefault(property, null);
} catch (RuntimeException e) {
logger.error("获取json-value异常异常key" + property + "异常信息为:" + e);
return null;
}
}
/**
* 更新属性值的方法
*
* @param jsonMap 原始日志json map
* @param property 更新的key
* @param value 更新的值
*/
public static void setValue(Map<String, Object> jsonMap, String property, Object value) {
try {
jsonMap.put(property, value);
} catch (RuntimeException e) {
logger.error("赋予实体类错误类型数据", e);
}
}
/**
* 更新属性值的方法
*
* @param obj 对象
* @param property 更新的key
* @param value 更新的值
*/
public static void setValue(Object obj, String property, Object value) {
try {
BeanMap beanMap = BeanMap.create(obj);
beanMap.put(property, value);
} catch (ClassCastException e) {
logger.error("赋予实体类错误类型数据", e);
}
}
/**
* 根据反射生成对象的方法
*
* @param properties 反射类用的map
* @return 生成的Object类型的对象
*/
public static Object generateObject(Map properties) {
BeanGenerator generator = new BeanGenerator();
Set keySet = properties.keySet();
for (Object aKeySet : keySet) {
String key = (String) aKeySet;
generator.addProperty(key, (Class) properties.get(key));
}
return generator.create();
}
/**
* 通过获取String类型的网关schema链接来获取map用于生成一个Object类型的对象
*
* @param http 网关schema地址
* @return 用于反射生成schema类型的对象的一个map集合
*/
public static HashMap<String, Class> getMapFromHttp(String http) {
HashMap<String, Class> map = new HashMap<>(16);
String schema = HttpClientUtil.requestByGetMethod(http);
Object data = JSON.parseObject(schema).get("data");
//获取fields并转化为数组数组的每个元素都是一个name doc type
JSONObject schemaJson = JSON.parseObject(data.toString());
JSONArray fields = (JSONArray) schemaJson.get("fields");
for (Object field : fields) {
String filedStr = field.toString();
if (checkKeepField(filedStr)) {
String name = JsonPath.read(filedStr, "$.name").toString();
String type = JsonPath.read(filedStr, "$.type").toString();
if (type.contains("{")) {
type = JsonPath.read(filedStr, "$.type.type").toString();
}
//组合用来生成实体类的map
map.put(name, getClassName(type));
} else {
dropList.add(filedStr);
}
}
return map;
}
/**
* 判断字段是否需要保留
*
* @param message 单个field-json
* @return true or false
*/
private static boolean checkKeepField(String message) {
boolean isKeepField = true;
boolean isHiveDoc = JSON.parseObject(message).containsKey("doc");
if (isHiveDoc) {
boolean isHiveVi = JsonPath.read(message, "$.doc").toString().contains("visibility");
if (isHiveVi) {
String visibility = JsonPath.read(message, "$.doc.visibility").toString();
if (FlowWriteConfig.VISIBILITY.equals(visibility)) {
isKeepField = false;
}
}
}
return isKeepField;
}
static void dropJsonField(Map<String, Object> jsonMap) {
for (String field : dropList) {
jsonMap.remove(field);
}
}
/**
* 根据http链接获取schema解析之后返回一个任务列表 (useList toList funcList paramlist)
*
* @param http 网关url
* @return 任务列表
*/
public static ArrayList<String[]> getJobListFromHttp(String http) {
ArrayList<String[]> list = new ArrayList<>();
String schema = HttpClientUtil.requestByGetMethod(http);
//解析data
Object data = JSON.parseObject(schema).get("data");
//获取fields并转化为数组数组的每个元素都是一个name doc type
JSONObject schemaJson = JSON.parseObject(data.toString());
JSONArray fields = (JSONArray) schemaJson.get("fields");
for (Object field : fields) {
if (JSON.parseObject(field.toString()).containsKey("doc")) {
Object doc = JSON.parseObject(field.toString()).get("doc");
if (JSON.parseObject(doc.toString()).containsKey("format")) {
String name = JSON.parseObject(field.toString()).get("name").toString();
Object format = JSON.parseObject(doc.toString()).get("format");
JSONObject formatObject = JSON.parseObject(format.toString());
String functions = formatObject.get("functions").toString();
String appendTo = null;
String params = null;
if (formatObject.containsKey("appendTo")) {
appendTo = formatObject.get("appendTo").toString();
}
if (formatObject.containsKey("param")) {
params = formatObject.get("param").toString();
}
if (StringUtil.isNotBlank(appendTo) && StringUtil.isBlank(params)) {
String[] functionArray = functions.split(FlowWriteConfig.FORMAT_SPLITTER);
String[] appendToArray = appendTo.split(FlowWriteConfig.FORMAT_SPLITTER);
for (int i = 0; i < functionArray.length; i++) {
list.add(new String[]{name, appendToArray[i], functionArray[i], null});
}
} else if (StringUtil.isNotBlank(appendTo) && StringUtil.isNotBlank(params)) {
String[] functionArray = functions.split(FlowWriteConfig.FORMAT_SPLITTER);
String[] appendToArray = appendTo.split(FlowWriteConfig.FORMAT_SPLITTER);
String[] paramArray = params.split(FlowWriteConfig.FORMAT_SPLITTER);
for (int i = 0; i < functionArray.length; i++) {
list.add(new String[]{name, appendToArray[i], functionArray[i], paramArray[i]});
}
} else {
list.add(new String[]{name, name, functions, params});
}
}
}
}
return list;
}
}

View File

@@ -1,187 +0,0 @@
package com.zdjizhi.utils.json;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.JsonMapper;
import com.zdjizhi.utils.exception.FlowWriteException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author qidaijie
* @Package PACKAGE_NAME
* @Description:
* @date 2021/7/1217:34
*/
public class JsonTypeUtils {
private static final Log logger = LogFactory.get();
/**
* 在内存中加载反射类用的map
*/
private static HashMap<String, Class> map = JsonParseUtil.getMapFromHttp(FlowWriteConfig.SCHEMA_HTTP);
/**
* 类型转换
*
* @param jsonMap 原始日志map
*/
public static Map<String, Object> typeTransform(Map<String, Object> jsonMap) throws RuntimeException {
JsonParseUtil.dropJsonField(jsonMap);
HashMap<String, Object> tmpMap = new HashMap<>(192);
for (String key : jsonMap.keySet()) {
if (map.containsKey(key)) {
String simpleName = map.get(key).getSimpleName();
switch (simpleName) {
case "String":
tmpMap.put(key, checkString(jsonMap.get(key)));
break;
case "Integer":
tmpMap.put(key, getIntValue(jsonMap.get(key)));
break;
case "long":
tmpMap.put(key, checkLongValue(jsonMap.get(key)));
break;
case "List":
tmpMap.put(key, checkArray(jsonMap.get(key)));
break;
case "Map":
tmpMap.put(key, checkObject(jsonMap.get(key)));
break;
case "double":
tmpMap.put(key, checkDouble(jsonMap.get(key)));
break;
default:
tmpMap.put(key, checkString(jsonMap.get(key)));
}
}
}
return tmpMap;
}
/**
* String 类型检验转换方法
*
* @param value json value
* @return String value
*/
private static String checkString(Object value) {
if (value == null) {
return null;
}
if (value instanceof Map){
return JsonMapper.toJsonString(value);
}
if (value instanceof List){
return JsonMapper.toJsonString(value);
}
return value.toString();
}
/**
* array 类型检验转换方法
*
* @param value json value
* @return List value
*/
private static Map checkObject(Object value) {
if (value == null) {
return null;
}
if (value instanceof Map) {
return (Map) value;
}
throw new FlowWriteException("can not cast to map, value : " + value);
}
/**
* array 类型检验转换方法
*
* @param value json value
* @return List value
*/
private static List checkArray(Object value) {
if (value == null) {
return null;
}
if (value instanceof List) {
return (List) value;
}
throw new FlowWriteException("can not cast to List, value : " + value);
}
private static Long checkLong(Object value) {
if (value == null) {
return null;
}
return TypeUtils.castToLong(value);
}
/**
* long 类型检验转换方法,若为空返回基础值
*
* @param value json value
* @return Long value
*/
private static long checkLongValue(Object value) {
Long longVal = TypeUtils.castToLong(value);
if (longVal == null) {
return 0L;
}
// return longVal.longValue();
return longVal;
}
/**
* Double 类型校验转换方法
*
* @param value json value
* @return Double value
*/
private static Double checkDouble(Object value) {
if (value == null) {
return null;
}
return TypeUtils.castToDouble(value);
}
private static Integer checkInt(Object value) {
if (value == null) {
return null;
}
return TypeUtils.castToInt(value);
}
/**
* int 类型检验转换方法,若为空返回基础值
*
* @param value json value
* @return int value
*/
private static int getIntValue(Object value) {
Integer intVal = TypeUtils.castToInt(value);
if (intVal == null) {
return 0;
}
// return intVal.intValue();
return intVal;
}
}

View File

@@ -1,171 +0,0 @@
package com.zdjizhi.utils.json;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.utils.exception.FlowWriteException;
/**
* @author qidaijie
* @Package PACKAGE_NAME
* @Description:
* @date 2021/7/1218:20
*/
public class TypeUtils {
private static final Log logger = LogFactory.get();
/**
* Integer 类型判断方法
*
* @param value json value
* @return Integer value or null
*/
public static Object castToIfFunction(Object value) {
if (value == null) {
return null;
}
if (value instanceof String) {
return value.toString();
}
if (value instanceof Integer) {
return ((Number) value).intValue();
}
if (value instanceof Long) {
return ((Number) value).longValue();
}
// if (value instanceof Map) {
// return (Map) value;
// }
//
// if (value instanceof List) {
// return Collections.singletonList(value.toString());
// }
if (value instanceof Boolean) {
return (Boolean) value ? 1 : 0;
}
throw new FlowWriteException("can not cast to int, value : " + value);
}
/**
* Integer 类型判断方法
*
* @param value json value
* @return Integer value or null
*/
static Integer castToInt(Object value) {
if (value == null) {
return null;
}
if (value instanceof Integer) {
return (Integer) value;
}
if (value instanceof Number) {
return ((Number) value).intValue();
}
if (value instanceof String) {
String strVal = (String) value;
if (StringUtil.isBlank(strVal)) {
return null;
}
//将 10,20 类数据转换为10
if (strVal.contains(FlowWriteConfig.FORMAT_SPLITTER)) {
strVal = strVal.split(FlowWriteConfig.FORMAT_SPLITTER)[0];
}
try {
return Integer.parseInt(strVal);
} catch (NumberFormatException ex) {
logger.error("String change Integer Error,The error Str is:" + strVal);
}
}
if (value instanceof Boolean) {
return (Boolean) value ? 1 : 0;
}
throw new FlowWriteException("can not cast to int, value : " + value);
}
/**
* Double类型判断方法
*
* @param value json value
* @return double value or null
*/
static Double castToDouble(Object value) {
if (value instanceof Number) {
return ((Number) value).doubleValue();
}
if (value instanceof String) {
String strVal = (String) value;
if (StringUtil.isBlank(strVal)) {
return null;
}
//将 10,20 类数据转换为10
if (strVal.contains(FlowWriteConfig.FORMAT_SPLITTER)) {
strVal = strVal.split(FlowWriteConfig.FORMAT_SPLITTER)[0];
}
try {
return Double.parseDouble(strVal);
} catch (NumberFormatException ex) {
logger.error("String change Double Error,The error Str is:" + strVal);
}
}
throw new FlowWriteException("can not cast to double, value : " + value);
}
/**
* Long类型判断方法
*
* @param value json value
* @return (Long)value or null
*/
static Long castToLong(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof String) {
String strVal = (String) value;
if (StringUtil.isBlank(strVal)) {
return null;
}
//将 10,20 类数据转换为10
if (strVal.contains(FlowWriteConfig.FORMAT_SPLITTER)) {
strVal = strVal.split(FlowWriteConfig.FORMAT_SPLITTER)[0];
}
try {
return Long.parseLong(strVal);
} catch (NumberFormatException ex) {
logger.error("String change Long Error,The error Str is:" + strVal);
}
}
throw new FlowWriteException("can not cast to long, value : " + value);
}
}

View File

@@ -1,36 +0,0 @@
package com.zdjizhi.utils.kafka;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.kafka.common.config.SslConfigs;
import java.util.Properties;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.kafka
* @Description:
* @date 2021/9/610:37
*/
class CertUtils {
static void chooseCert(String type, Properties properties) {
switch (type) {
case "SSL":
properties.put("security.protocol", "SSL");
properties.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
properties.put("ssl.keystore.location", FlowWriteConfig.TOOLS_LIBRARY + "keystore.jks");
properties.put("ssl.keystore.password", FlowWriteConfig.KAFKA_PIN);
properties.put("ssl.truststore.location", FlowWriteConfig.TOOLS_LIBRARY + "truststore.jks");
properties.put("ssl.truststore.password", FlowWriteConfig.KAFKA_PIN);
properties.put("ssl.key.password", FlowWriteConfig.KAFKA_PIN);
break;
case "SASL":
properties.put("security.protocol", "SASL_PLAINTEXT");
properties.put("sasl.mechanism", "PLAIN");
properties.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username="
+ FlowWriteConfig.KAFKA_USER + " password=" + FlowWriteConfig.KAFKA_PIN + ";");
break;
default:
}
}
}

View File

@@ -1,43 +0,0 @@
package com.zdjizhi.utils.kafka;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.config.SslConfigs;
import java.util.Properties;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.kafka
* @Description:
* @date 2021/6/813:54
*/
public class Consumer {
private static Properties createConsumerConfig() {
Properties properties = new Properties();
properties.put("bootstrap.servers", FlowWriteConfig.INPUT_KAFKA_SERVERS);
properties.put("group.id", FlowWriteConfig.GROUP_ID);
properties.put("session.timeout.ms", "60000");
properties.put("max.poll.records", "3000");
properties.put("max.partition.fetch.bytes", "31457280");
properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
CertUtils.chooseCert(FlowWriteConfig.KAFKA_SOURCE_PROTOCOL,properties);
return properties;
}
public static FlinkKafkaConsumer<String> getKafkaConsumer() {
FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(FlowWriteConfig.INPUT_KAFKA_TOPIC,
new SimpleStringSchema(), createConsumerConfig());
kafkaConsumer.setCommitOffsetsOnCheckpoints(false);
kafkaConsumer.setStartFromGroupOffsets();
return kafkaConsumer;
}
}

View File

@@ -1,79 +0,0 @@
package com.zdjizhi.utils.kafka;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SslConfigs;
import java.util.Optional;
import java.util.Properties;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.kafka
* @Description:
* @date 2021/6/814:04
*/
public class Producer {
private static Properties createPercentProducerConfig() {
Properties properties = new Properties();
properties.put("bootstrap.servers", FlowWriteConfig.OUTPUT_KAFKA_SERVERS);
properties.put("acks", FlowWriteConfig.PRODUCER_ACK);
properties.put("retries", FlowWriteConfig.RETRIES);
properties.put("linger.ms", FlowWriteConfig.LINGER_MS);
properties.put("request.timeout.ms", FlowWriteConfig.REQUEST_TIMEOUT_MS);
properties.put("batch.size", FlowWriteConfig.BATCH_SIZE);
properties.put("buffer.memory", FlowWriteConfig.BUFFER_MEMORY);
properties.put("max.request.size", FlowWriteConfig.MAX_REQUEST_SIZE);
properties.put("compression.type", FlowWriteConfig.PRODUCER_KAFKA_COMPRESSION_TYPE);
CertUtils.chooseCert(FlowWriteConfig.KAFKA_SINK_PROTOCOL, properties);
return properties;
}
private static Properties createProducerConfig() {
Properties properties = new Properties();
properties.put("bootstrap.servers", FlowWriteConfig.INPUT_KAFKA_SERVERS);
properties.put("acks", FlowWriteConfig.PRODUCER_ACK);
properties.put("retries", FlowWriteConfig.RETRIES);
properties.put("linger.ms", FlowWriteConfig.LINGER_MS);
properties.put("request.timeout.ms", FlowWriteConfig.REQUEST_TIMEOUT_MS);
properties.put("batch.size", FlowWriteConfig.BATCH_SIZE);
properties.put("buffer.memory", FlowWriteConfig.BUFFER_MEMORY);
properties.put("max.request.size", FlowWriteConfig.MAX_REQUEST_SIZE);
properties.put("compression.type", FlowWriteConfig.PRODUCER_KAFKA_COMPRESSION_TYPE);
CertUtils.chooseCert(FlowWriteConfig.KAFKA_SOURCE_PROTOCOL,properties);
return properties;
}
public static FlinkKafkaProducer<String> getPercentKafkaProducer() {
FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<String>(
FlowWriteConfig.PERCENT_KAFKA_TOPIC,
new SimpleStringSchema(),
createPercentProducerConfig(), Optional.empty());
kafkaProducer.setLogFailuresOnly(false);
// kafkaProducer.setWriteTimestampToKafka(true);
return kafkaProducer;
}
public static FlinkKafkaProducer<String> getKafkaProducer() {
FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<String>(
FlowWriteConfig.OUTPUT_KAFKA_TOPIC,
new SimpleStringSchema(),
createProducerConfig(), Optional.empty());
kafkaProducer.setLogFailuresOnly(false);
// kafkaProducer.setWriteTimestampToKafka(true);
return kafkaProducer;
}
}

View File

@@ -1,64 +0,0 @@
package com.zdjizhi.utils.ordinary;
import org.apache.log4j.Logger;
import java.security.MessageDigest;
/**
* 描述:转换MD5工具类
*
* @author Administrator
* @create 2018-08-13 15:11
*/
public class MD5Utils {
private static Logger logger = Logger.getLogger(MD5Utils.class);
public static String md5Encode(String msg) throws Exception {
try {
byte[] msgBytes = msg.getBytes("utf-8");
/*
* 声明使用Md5算法,获得MessaDigest对象
*/
MessageDigest md5 = MessageDigest.getInstance("MD5");
/*
* 使用指定的字节更新摘要
*/
md5.update(msgBytes);
/*
* 完成哈希计算,获得密文
*/
byte[] digest = md5.digest();
/*
* 以上两行代码等同于
* byte[] digest = md5.digest(msgBytes);
*/
return byteArr2hexString(digest);
} catch (Exception e) {
logger.error("Error in conversion MD5! " + msg);
// e.printStackTrace();
return "";
}
}
/**
* 将byte数组转化为16进制字符串形式
*
* @param bys 字节数组
* @return 字符串
*/
public static String byteArr2hexString(byte[] bys) {
StringBuffer hexVal = new StringBuffer();
int val = 0;
for (byte by : bys) {
//将byte转化为int 如果byte是一个负数就必须要和16进制的0xff做一次与运算
val = ((int) by) & 0xff;
if (val < 16) {
hexVal.append("0");
}
hexVal.append(Integer.toHexString(val));
}
return hexVal.toString();
}
}

View File

@@ -1,70 +0,0 @@
package com.zdjizhi.utils.system;
import com.zdjizhi.utils.StringUtil;
import java.io.IOException;
import java.util.Locale;
import java.util.Properties;
/**
* @author Administrator
*/
public final class FlowWriteConfigurations {
private static Properties propKafka = new Properties();
private static Properties propService = new Properties();
public static String getStringProperty(Integer type, String key) {
if (type == 0) {
return propService.getProperty(key);
} else if (type == 1) {
return propKafka.getProperty(key);
} else {
return null;
}
}
public static Integer getIntProperty(Integer type, String key) {
if (type == 0) {
return Integer.parseInt(propService.getProperty(key));
} else if (type == 1) {
return Integer.parseInt(propKafka.getProperty(key));
} else {
return null;
}
}
public static Long getLongProperty(Integer type, String key) {
if (type == 0) {
return Long.parseLong(propService.getProperty(key));
} else if (type == 1) {
return Long.parseLong(propKafka.getProperty(key));
} else {
return null;
}
}
public static Boolean getBooleanProperty(Integer type, String key) {
if (type == 0) {
return StringUtil.equals(propService.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
} else if (type == 1) {
return StringUtil.equals(propKafka.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
} else {
return null;
}
}
static {
try {
propService.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("service_flow_config.properties"));
propKafka.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("default_config.properties"));
} catch (IOException | RuntimeException e) {
propKafka = null;
propService = null;
}
}
}

View File

@@ -1,190 +0,0 @@
package com.zdjizhi.utils.zookeeper;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.Stat;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
/**
* @author qidaijie
*/
public class DistributedLock implements Lock, Watcher {
private static final Log logger = LogFactory.get();
private ZooKeeper zk = null;
/**
* 根节点
*/
private final String ROOT_LOCK = "/locks";
/**
* 竞争的资源
*/
private String lockName;
/**
* 等待的前一个锁
*/
private String waitLock;
/**
* 当前锁
*/
private String currentLock;
/**
* 计数器
*/
private CountDownLatch countDownLatch;
private int sessionTimeout = 2000;
private List<Exception> exceptionList = new ArrayList<Exception>();
/**
* 配置分布式锁
*
* @param config 连接的url
* @param lockName 竞争资源
*/
public DistributedLock(String config, String lockName) {
this.lockName = lockName;
try {
// 连接zookeeper
zk = new ZooKeeper(config, sessionTimeout, this);
Stat stat = zk.exists(ROOT_LOCK, false);
if (stat == null) {
// 如果根节点不存在,则创建根节点
zk.create(ROOT_LOCK, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
}
} catch (IOException | InterruptedException | KeeperException e) {
logger.error("Node already exists!");
}
}
// 节点监视器
@Override
public void process(WatchedEvent event) {
if (this.countDownLatch != null) {
this.countDownLatch.countDown();
}
}
@Override
public void lock() {
if (exceptionList.size() > 0) {
throw new LockException(exceptionList.get(0));
}
try {
if (this.tryLock()) {
logger.info(Thread.currentThread().getName() + " " + lockName + "获得了锁");
} else {
// 等待锁
waitForLock(waitLock, sessionTimeout);
}
} catch (InterruptedException | KeeperException e) {
logger.error("获取锁异常" + e);
}
}
@Override
public boolean tryLock() {
try {
String splitStr = "_lock_";
if (lockName.contains(splitStr)) {
throw new LockException("锁名有误");
}
// 创建临时有序节点
currentLock = zk.create(ROOT_LOCK + "/" + lockName + splitStr, new byte[0],
ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL);
// 取所有子节点
List<String> subNodes = zk.getChildren(ROOT_LOCK, false);
// 取出所有lockName的锁
List<String> lockObjects = new ArrayList<String>();
for (String node : subNodes) {
String tmpNode = node.split(splitStr)[0];
if (tmpNode.equals(lockName)) {
lockObjects.add(node);
}
}
Collections.sort(lockObjects);
// 若当前节点为最小节点,则获取锁成功
if (currentLock.equals(ROOT_LOCK + "/" + lockObjects.get(0))) {
return true;
}
// 若不是最小节点,则找到自己的前一个节点
String prevNode = currentLock.substring(currentLock.lastIndexOf("/") + 1);
waitLock = lockObjects.get(Collections.binarySearch(lockObjects, prevNode) - 1);
} catch (InterruptedException | KeeperException e) {
logger.error("获取锁过程异常" + e);
}
return false;
}
@Override
public boolean tryLock(long timeout, TimeUnit unit) {
try {
if (this.tryLock()) {
return true;
}
return waitForLock(waitLock, timeout);
} catch (KeeperException | InterruptedException | RuntimeException e) {
logger.error("判断是否锁定异常" + e);
}
return false;
}
// 等待锁
private boolean waitForLock(String prev, long waitTime) throws KeeperException, InterruptedException {
Stat stat = zk.exists(ROOT_LOCK + "/" + prev, true);
if (stat != null) {
this.countDownLatch = new CountDownLatch(1);
// 计数等待若等到前一个节点消失则precess中进行countDown停止等待获取锁
this.countDownLatch.await(waitTime, TimeUnit.MILLISECONDS);
this.countDownLatch = null;
}
return true;
}
@Override
public void unlock() {
try {
zk.delete(currentLock, -1);
currentLock = null;
zk.close();
} catch (InterruptedException | KeeperException e) {
logger.error("关闭锁异常" + e);
}
}
@Override
public Condition newCondition() {
return null;
}
@Override
public void lockInterruptibly() throws InterruptedException {
this.lock();
}
public class LockException extends RuntimeException {
private static final long serialVersionUID = 1L;
public LockException(String e) {
super(e);
}
public LockException(Exception e) {
super(e);
}
}
}

View File

@@ -1,140 +0,0 @@
package com.zdjizhi.utils.zookeeper;
import cn.hutool.core.util.StrUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Stat;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.CountDownLatch;
/**
* @author qidaijie
* @Package cn.ac.iie.utils.zookeeper
* @Description:
* @date 2020/11/1411:28
*/
public class ZookeeperUtils implements Watcher {
private static final Log logger = LogFactory.get();
private static final int ID_MAX = 255;
private ZooKeeper zookeeper;
private static final int SESSION_TIME_OUT = 20000;
private CountDownLatch countDownLatch = new CountDownLatch(1);
@Override
public void process(WatchedEvent event) {
if (event.getState() == Event.KeeperState.SyncConnected) {
countDownLatch.countDown();
}
}
/**
* 修改节点信息
*
* @param path 节点路径
*/
public int modifyNode(String path, String zookeeperIp) {
createNode(path, "0".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, zookeeperIp);
int workerId = 0;
try {
connectZookeeper(zookeeperIp);
Stat stat = zookeeper.exists(path, true);
workerId = Integer.parseInt(getNodeDate(path));
if (workerId > ID_MAX) {
workerId = 0;
zookeeper.setData(path, "1".getBytes(), stat.getVersion());
} else {
String result = String.valueOf(workerId + 1);
if (stat != null) {
zookeeper.setData(path, result.getBytes(), stat.getVersion());
} else {
logger.error("Node does not exist!,Can't modify");
}
}
} catch (KeeperException | InterruptedException e) {
logger.error("modify error Can't modify," + e);
} finally {
closeConn();
}
logger.warn("workerID is" + workerId);
return workerId;
}
/**
* 连接zookeeper
*
* @param host 地址
*/
public void connectZookeeper(String host) {
try {
zookeeper = new ZooKeeper(host, SESSION_TIME_OUT, this);
countDownLatch.await();
} catch (IOException | InterruptedException e) {
logger.error("Connection to the Zookeeper Exception! message:" + e);
}
}
/**
* 关闭连接
*/
public void closeConn() {
try {
if (zookeeper != null) {
zookeeper.close();
}
} catch (InterruptedException e) {
logger.error("Close the Zookeeper connection Exception! message:" + e);
}
}
/**
* 获取节点内容
*
* @param path 节点路径
* @return 内容/异常null
*/
public String getNodeDate(String path) {
String result = null;
Stat stat = new Stat();
try {
byte[] resByte = zookeeper.getData(path, true, stat);
result = StrUtil.str(resByte, "UTF-8");
} catch (KeeperException | InterruptedException e) {
logger.error("Get node information exception" + e);
}
return result;
}
/**
* @param path 节点创建的路径
* @param date 节点所存储的数据的byte[]
* @param acls 控制权限策略
*/
public void createNode(String path, byte[] date, List<ACL> acls, String zookeeperIp) {
try {
connectZookeeper(zookeeperIp);
Stat exists = zookeeper.exists(path, true);
if (exists == null) {
Stat existsSnowflakeld = zookeeper.exists("/Snowflake", true);
if (existsSnowflakeld == null) {
zookeeper.create("/Snowflake", null, acls, CreateMode.PERSISTENT);
}
zookeeper.create(path, date, acls, CreateMode.PERSISTENT);
} else {
logger.warn("Node already exists ! Don't need to create");
}
} catch (KeeperException | InterruptedException e) {
logger.error(e);
} finally {
closeConn();
}
}
}

View File

@@ -1,25 +0,0 @@
#Log4j
log4j.rootLogger=info,console,file
# 控制台日志设置
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.Threshold=info
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] [Thread\:%t] %l %x - <%m>%n
# 文件日志设置
log4j.appender.file=org.apache.log4j.DailyRollingFileAppender
log4j.appender.file.Threshold=info
log4j.appender.file.encoding=UTF-8
log4j.appender.file.Append=true
#路径请用相对路径,做好相关测试输出到应用目下
log4j.appender.file.file=${nis.root}/log/galaxy-name.log
log4j.appender.file.DatePattern='.'yyyy-MM-dd
log4j.appender.file.layout=org.apache.log4j.PatternLayout
#log4j.appender.file.layout.ConversionPattern=%d{HH:mm:ss} %X{ip} [%t] %5p %c{1} %m%n
log4j.appender.file.layout.ConversionPattern=[%d{yyyy-MM-dd HH\:mm\:ss}] [%-5p] %X{ip} [Thread\:%t] %l %x - %m%n
#MyBatis 配置com.nis.web.dao是mybatis接口所在包
log4j.logger.com.nis.web.dao=debug
#bonecp数据源配置
log4j.category.com.jolbox=debug,console

View File

@@ -1,42 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<!-- 格式化输出:%date表示日期%thread表示线程名%-5level级别从左显示5个字符宽度 %msg日志消息%n是换行符-->
<property name="LOG_PATTERN" value="%date{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n" />
<!-- 定义日志存储的路径,不要配置相对路径 -->
<property name="LOG_FILE_PATH" value="E:/logs/demo.%d{yyyy-MM-dd}.%i.log" />
<!-- 控制台输出日志 -->
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<!-- 按照上面配置的LOG_PATTERN来打印日志 -->
<pattern>${LOG_PATTERN}</pattern>
</encoder>
</appender>
<!--每天生成一个日志文件保存30天的日志文件。rollingFile是用来切分文件的 -->
<appender name="FILE"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_FILE_PATH}</fileNamePattern>
<!-- keep 15 days' worth of history -->
<maxHistory>30</maxHistory>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<!-- 日志文件的最大大小 -->
<maxFileSize>20MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
<encoder>
<pattern>${LOG_PATTERN}</pattern>
</encoder>
</appender>
<!-- project default level项目输出的日志级别 -->
<logger name="com.example.demo" level="DEBUG" />
<!-- 日志输出级别 常用的日志级别按照从高到低依次为ERROR、WARN、INFO、DEBUG。 -->
<root level="INFO">
<appender-ref ref="CONSOLE" />
<appender-ref ref="FILE" /><!--对应appender name="FILE"。 -->
</root>
</configuration>

View File

@@ -1,55 +0,0 @@
package com.zdjizhi;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.config.SslConfigs;
import java.util.Properties;
/**
* @author qidaijie
* @Package com.zdjizhi
* @Description:
* @date 2021/8/217:39
*/
public class KafkaTest {
private static final Log logger = LogFactory.get();
public static void main(String[] args) {
Properties properties = new Properties();
properties.put("bootstrap.servers", "192.168.44.12:9091");
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("acks", "1");
// properties.put("retries", DefaultProConfig.RETRIES);
// properties.put("linger.ms", DefaultProConfig.LINGER_MS);
// properties.put("request.timeout.ms", DefaultProConfig.REQUEST_TIMEOUT_MS);
// properties.put("batch.size", DefaultProConfig.BATCH_SIZE);
// properties.put("buffer.memory", DefaultProConfig.BUFFER_MEMORY);
// properties.put("max.request.size", DefaultProConfig.MAX_REQUEST_SIZE);
properties.put("security.protocol", "SSL");
// properties.put("ssl.keystore.location", "D:\\K18-Phase2\\tsgSpace\\dat\\kafka\\client.keystore.jks");
properties.put("ssl.keystore.location", "D:\\K18-Phase2\\tsgSpace\\dat\\tsg\\keystore.jks");
properties.put("ssl.keystore.password", "galaxy2019");
// properties.put("ssl.truststore.location", "D:\\K18-Phase2\\tsgSpace\\dat\\kafka\\client.truststore.jks");
properties.put("ssl.truststore.location", "D:\\K18-Phase2\\tsgSpace\\dat\\tsg\\truststore.jks");
properties.put("ssl.truststore.password", "galaxy2019");
properties.put("ssl.key.password", "galaxy2019");
properties.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
Producer<String, String> producer = new KafkaProducer<String, String>(properties);
producer.send(new ProducerRecord<>("test", "hello!"), new Callback() {
@Override
public void onCompletion(RecordMetadata metadata, Exception exception) {
if (exception != null) {
logger.error("写入test出现异常", exception);
}
}
});
producer.close();
}
}

View File

@@ -1,28 +0,0 @@
package com.zdjizhi;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.IpLookup;
import org.junit.Test;
/**
* @author qidaijie
* @Package com.zdjizhi
* @Description:
* @date 2021/8/1811:34
*/
public class LocationTest {
private static IpLookup ipLookup = new IpLookup.Builder(false)
.loadDataFileV4("D:\\K18-Phase2\\tsgSpace\\dat\\tsg\\ip_v4.mmdb")
.loadDataFileV6("D:\\K18-Phase2\\tsgSpace\\dat\\tsg\\ip_v6.mmdb")
.loadDataFilePrivateV4("D:\\K18-Phase2\\tsgSpace\\dat\\tsg\\ip_private_v4.mmdb")
.loadDataFilePrivateV6("D:\\K18-Phase2\\tsgSpace\\dat\\tsg\\ip_private_v6.mmdb")
.build();
@Test
public void IpLocationTest() {
System.out.println(ipLookup.cityLookupDetail("24.241.112.0"));
System.out.println(ipLookup.cityLookupDetail("1.1.1.1"));
System.out.println(ipLookup.cityLookupDetail("192.168.50.58"));
System.out.println(ipLookup.cityLookupDetail("2600:1700:9010::"));
}
}

View File

@@ -1,19 +0,0 @@
package com.zdjizhi;
import com.zdjizhi.utils.StringUtil;
public class TestTime {
public static void main(String[] args) {
String s = null;
String rrr = rrr(s);
System.out.println(rrr);
}
public static String rrr(String url){
if (StringUtil.isBlank(url)) {
return "这是空的";
}else {
return "这不是空的";
}
}
}

View File

@@ -1,50 +0,0 @@
#producer<65><72><EFBFBD>ԵĴ<D4B5><C4B4><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
retries=0
#<23><><EFBFBD>ĺ<EFBFBD><C4BA><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>˵һ<CBB5><D2BB>Batch<63><68><EFBFBD><EFBFBD><EFBFBD><EFBFBD>֮<EFBFBD><D6AE><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ã<EFBFBD><C3A3><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Batch<63><68>û<EFBFBD><C3BB>д<EFBFBD><D0B4><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ͳ<EFBFBD>ȥ<EFBFBD><C8A5>
linger.ms=10
#<23><><EFBFBD><EFBFBD><EFBFBD>ڳ<EFBFBD>ʱ֮ǰδ<C7B0>յ<EFBFBD><D5B5><EFBFBD>Ӧ<EFBFBD><D3A6><EFBFBD>ͻ<EFBFBD><CDBB>˽<EFBFBD><CBBD>ڱ<EFBFBD>Ҫʱ<D2AA><CAB1><EFBFBD>·<EFBFBD><C2B7><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
request.timeout.ms=30000
#producer<65><72><EFBFBD>ǰ<EFBFBD><C7B0><EFBFBD>batch<63><68><EFBFBD>з<EFBFBD><D0B7>͵<EFBFBD>,<2C><><EFBFBD>δ<EFBFBD>С<EFBFBD><D0A1>Ĭ<EFBFBD><C4AC>:16384
batch.size=262144
#Producer<65><72><EFBFBD><EFBFBD><EFBFBD>ڻ<EFBFBD><DABB><EFBFBD><EFBFBD><EFBFBD>Ϣ<EFBFBD>Ļ<EFBFBD><C4BB><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>С
#64M
#buffer.memory=67108864
#128M
buffer.memory=134217728
#<23><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ÿ<EFBFBD>η<EFBFBD><CEB7>͸<EFBFBD>Kafka<6B><61><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>С<><C4AC>1048576
#5M
#max.request.size=5242880
#10M
max.request.size=10485760
#kafka SASL<53><4C>֤<EFBFBD>û<EFBFBD><C3BB><EFBFBD>
kafka.user=admin
#kafka SASL<53><4C>SSL<53><4C>֤<EFBFBD><D6A4><EFBFBD><EFBFBD>
kafka.pin=galaxy2019
#kafka source connection timeout
session.timeout.ms=60000
#kafka source poll
max.poll.records=3000
#kafka source poll bytes
max.partition.fetch.bytes=31457280
#hbase table name
hbase.table.name=subscriber_info
#<23>ʼ<EFBFBD>Ĭ<EFBFBD>ϱ<EFBFBD><CFB1><EFBFBD>
mail.default.charset=UTF-8
#kafka source protocol; SSL or SASL
kafka.source.protocol=SASL
#kafka sink protocol; SSL or SASL
kafka.sink.protocol=

View File

@@ -1,72 +0,0 @@
#--------------------------------地址配置------------------------------#
#管理kafka地址
input.kafka.servers=10.3.60.3:9094
#管理输出kafka地址
output.kafka.servers=10.3.45.126:6667,10.3.45.127:6667,10.3.45.128:6667
#zookeeper 地址 用于配置log_id
zookeeper.servers=10.3.60.3:2181
#hbase zookeeper地址 用于连接HBase
hbase.zookeeper.servers=10.3.60.3:2181
#oos地址
oos.servers=10.3.45.124:8057
#--------------------------------HTTP/定位库------------------------------#
#定位库地址
#tools.library=/opt/dat/
tools.library=D:/dingweiku/dat/
#网关的schema位置
schema.http=http://10.3.60.3:9999/metadata/schema/v1/fields/proxy_event
#网关APP_ID 获取接口
app.id.http=http://10.3.60.3:9999/open-api/appDicList
#--------------------------------Kafka消费组信息------------------------------#
#kafka 接收数据topic
#input.kafka.topic=SESSION-RECORD
input.kafka.topic=PROXY-EVENT
#文件源数据topic
output.kafka.topic=TRAFFIC-FILE-METADATA
percent.kafka.topic=PROXY-EVENT
#读取topic,存储该spout id的消费offset信息可通过该拓扑命名;具体存储offset的位置确定下次读取不重复的数据
group.id=session-record-log-20211018-A
#生产者压缩模式 none or snappy
producer.kafka.compression.type=none
#生产者ack
producer.ack=1
#--------------------------------topology配置------------------------------#
#consumer 并行度
consumer.parallelism=1
#转换函数并行度
transform.parallelism=1
#数据中心,取值范围(0-63)
data.center.id.num=0
#hbase 更新时间如填写0则不更新缓存
hbase.tick.tuple.freq.secs=180
#app_id 更新时间如填写0则不更新缓存
app.tick.tuple.freq.secs=0
#--------------------------------默认值配置------------------------------#
#邮件默认编码
mail.default.charset=UTF-8
#0不需要补全原样输出日志1需要补全
log.need.complete=1