commit a74a5b5a9825913140933968cdb355174a558890 Author: caohui Date: Fri Oct 9 16:21:21 2020 +0800 ZX Flume yb_flume_cus_sink_file Initial commit 202010091620 diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 0000000..5aea0bb --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,8 @@ +# Default ignored files +/shelf/ +/workspace.xml +# Datasource local storage ignored files +/../../../../../../../../../:\dev_code\testForFlume\flumeWork\YiBuDaGongCheng\multi-thread\for-lxk-git\yb_flume_cus_sink_file\.idea/dataSources/ +/dataSources.local.xml +# Editor-based HTTP Client requests +/httpRequests/ diff --git a/.idea/codeStyles/Project.xml b/.idea/codeStyles/Project.xml new file mode 100644 index 0000000..919ce1f --- /dev/null +++ b/.idea/codeStyles/Project.xml @@ -0,0 +1,7 @@ + + + + + + \ No newline at end of file diff --git a/.idea/codeStyles/codeStyleConfig.xml b/.idea/codeStyles/codeStyleConfig.xml new file mode 100644 index 0000000..a55e7a1 --- /dev/null +++ b/.idea/codeStyles/codeStyleConfig.xml @@ -0,0 +1,5 @@ + + + + \ No newline at end of file diff --git a/.idea/compiler.xml b/.idea/compiler.xml new file mode 100644 index 0000000..1e433aa --- /dev/null +++ b/.idea/compiler.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/encodings.xml b/.idea/encodings.xml new file mode 100644 index 0000000..c71a1bd --- /dev/null +++ b/.idea/encodings.xml @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/.idea/jarRepositories.xml b/.idea/jarRepositories.xml new file mode 100644 index 0000000..84881a3 --- /dev/null +++ b/.idea/jarRepositories.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_101tec_zkclient_0_10.xml b/.idea/libraries/Maven__com_101tec_zkclient_0_10.xml new file mode 100644 index 0000000..a2b08a2 --- /dev/null +++ b/.idea/libraries/Maven__com_101tec_zkclient_0_10.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_alibaba_fastjson_1_2_47.xml b/.idea/libraries/Maven__com_alibaba_fastjson_1_2_47.xml new file mode 100644 index 0000000..dcd6ee5 --- /dev/null +++ b/.idea/libraries/Maven__com_alibaba_fastjson_1_2_47.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_annotations_2_9_5.xml b/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_annotations_2_9_5.xml new file mode 100644 index 0000000..89adb44 --- /dev/null +++ b/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_annotations_2_9_5.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_core_2_9_5.xml b/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_core_2_9_5.xml new file mode 100644 index 0000000..1205e0d --- /dev/null +++ b/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_core_2_9_5.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_databind_2_9_1.xml b/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_databind_2_9_1.xml new file mode 100644 index 0000000..c197771 --- /dev/null +++ b/.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_databind_2_9_1.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_google_code_gson_gson_2_2_2.xml b/.idea/libraries/Maven__com_google_code_gson_gson_2_2_2.xml new file mode 100644 index 0000000..b6113ec --- /dev/null +++ b/.idea/libraries/Maven__com_google_code_gson_gson_2_2_2.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_google_guava_guava_18_0.xml b/.idea/libraries/Maven__com_google_guava_guava_18_0.xml new file mode 100644 index 0000000..bbd71d7 --- /dev/null +++ b/.idea/libraries/Maven__com_google_guava_guava_18_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_maxmind_db_maxmind_db_1_2_2.xml b/.idea/libraries/Maven__com_maxmind_db_maxmind_db_1_2_2.xml new file mode 100644 index 0000000..1342872 --- /dev/null +++ b/.idea/libraries/Maven__com_maxmind_db_maxmind_db_1_2_2.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_maxmind_geoip2_geoip2_2_12_0.xml b/.idea/libraries/Maven__com_maxmind_geoip2_geoip2_2_12_0.xml new file mode 100644 index 0000000..1471229 --- /dev/null +++ b/.idea/libraries/Maven__com_maxmind_geoip2_geoip2_2_12_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_maxmind_geoip_geoip_api_1_3_1.xml b/.idea/libraries/Maven__com_maxmind_geoip_geoip_api_1_3_1.xml new file mode 100644 index 0000000..bd438ab --- /dev/null +++ b/.idea/libraries/Maven__com_maxmind_geoip_geoip_api_1_3_1.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_squareup_okhttp3_okhttp_3_9_0.xml b/.idea/libraries/Maven__com_squareup_okhttp3_okhttp_3_9_0.xml new file mode 100644 index 0000000..90e2b81 --- /dev/null +++ b/.idea/libraries/Maven__com_squareup_okhttp3_okhttp_3_9_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_squareup_okhttp_okhttp_2_4_0.xml b/.idea/libraries/Maven__com_squareup_okhttp_okhttp_2_4_0.xml new file mode 100644 index 0000000..be09ac3 --- /dev/null +++ b/.idea/libraries/Maven__com_squareup_okhttp_okhttp_2_4_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_squareup_okio_okio_1_13_0.xml b/.idea/libraries/Maven__com_squareup_okio_okio_1_13_0.xml new file mode 100644 index 0000000..cb99e2f --- /dev/null +++ b/.idea/libraries/Maven__com_squareup_okio_okio_1_13_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_squareup_retrofit_retrofit_1_9_0.xml b/.idea/libraries/Maven__com_squareup_retrofit_retrofit_1_9_0.xml new file mode 100644 index 0000000..86047ed --- /dev/null +++ b/.idea/libraries/Maven__com_squareup_retrofit_retrofit_1_9_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_thoughtworks_paranamer_paranamer_2_3.xml b/.idea/libraries/Maven__com_thoughtworks_paranamer_paranamer_2_3.xml new file mode 100644 index 0000000..3807eb2 --- /dev/null +++ b/.idea/libraries/Maven__com_thoughtworks_paranamer_paranamer_2_3.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_yammer_metrics_metrics_core_2_2_0.xml b/.idea/libraries/Maven__com_yammer_metrics_metrics_core_2_2_0.xml new file mode 100644 index 0000000..fc159c5 --- /dev/null +++ b/.idea/libraries/Maven__com_yammer_metrics_metrics_core_2_2_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__com_zdjizhi_galaxy_1_0_1.xml b/.idea/libraries/Maven__com_zdjizhi_galaxy_1_0_1.xml new file mode 100644 index 0000000..5ec91ab --- /dev/null +++ b/.idea/libraries/Maven__com_zdjizhi_galaxy_1_0_1.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__commons_cli_commons_cli_1_2.xml b/.idea/libraries/Maven__commons_cli_commons_cli_1_2.xml new file mode 100644 index 0000000..cec2493 --- /dev/null +++ b/.idea/libraries/Maven__commons_cli_commons_cli_1_2.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__commons_codec_commons_codec_1_8.xml b/.idea/libraries/Maven__commons_codec_commons_codec_1_8.xml new file mode 100644 index 0000000..a63c271 --- /dev/null +++ b/.idea/libraries/Maven__commons_codec_commons_codec_1_8.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__commons_collections_commons_collections_3_2_1.xml b/.idea/libraries/Maven__commons_collections_commons_collections_3_2_1.xml new file mode 100644 index 0000000..3caee7e --- /dev/null +++ b/.idea/libraries/Maven__commons_collections_commons_collections_3_2_1.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__commons_io_commons_io_2_4.xml b/.idea/libraries/Maven__commons_io_commons_io_2_4.xml new file mode 100644 index 0000000..bc2aad0 --- /dev/null +++ b/.idea/libraries/Maven__commons_io_commons_io_2_4.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__commons_lang_commons_lang_2_6.xml b/.idea/libraries/Maven__commons_lang_commons_lang_2_6.xml new file mode 100644 index 0000000..2ec8376 --- /dev/null +++ b/.idea/libraries/Maven__commons_lang_commons_lang_2_6.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__commons_logging_commons_logging_1_2.xml b/.idea/libraries/Maven__commons_logging_commons_logging_1_2.xml new file mode 100644 index 0000000..eab40b3 --- /dev/null +++ b/.idea/libraries/Maven__commons_logging_commons_logging_1_2.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__io_netty_netty_3_10_6_Final.xml b/.idea/libraries/Maven__io_netty_netty_3_10_6_Final.xml new file mode 100644 index 0000000..1bf9986 --- /dev/null +++ b/.idea/libraries/Maven__io_netty_netty_3_10_6_Final.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__javax_servlet_javax_servlet_api_3_1_0.xml b/.idea/libraries/Maven__javax_servlet_javax_servlet_api_3_1_0.xml new file mode 100644 index 0000000..c24f7e3 --- /dev/null +++ b/.idea/libraries/Maven__javax_servlet_javax_servlet_api_3_1_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__joda_time_joda_time_2_9_9.xml b/.idea/libraries/Maven__joda_time_joda_time_2_9_9.xml new file mode 100644 index 0000000..a468f58 --- /dev/null +++ b/.idea/libraries/Maven__joda_time_joda_time_2_9_9.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__junit_junit_3_8_1.xml b/.idea/libraries/Maven__junit_junit_3_8_1.xml new file mode 100644 index 0000000..71b2993 --- /dev/null +++ b/.idea/libraries/Maven__junit_junit_3_8_1.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__log4j_log4j_1_2_14.xml b/.idea/libraries/Maven__log4j_log4j_1_2_14.xml new file mode 100644 index 0000000..2825a67 --- /dev/null +++ b/.idea/libraries/Maven__log4j_log4j_1_2_14.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__net_sf_jopt_simple_jopt_simple_5_0_4.xml b/.idea/libraries/Maven__net_sf_jopt_simple_jopt_simple_5_0_4.xml new file mode 100644 index 0000000..1ef5173 --- /dev/null +++ b/.idea/libraries/Maven__net_sf_jopt_simple_jopt_simple_5_0_4.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_avro_avro_1_7_4.xml b/.idea/libraries/Maven__org_apache_avro_avro_1_7_4.xml new file mode 100644 index 0000000..cc03056 --- /dev/null +++ b/.idea/libraries/Maven__org_apache_avro_avro_1_7_4.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_avro_avro_ipc_1_7_4.xml b/.idea/libraries/Maven__org_apache_avro_avro_ipc_1_7_4.xml new file mode 100644 index 0000000..05fdc11 --- /dev/null +++ b/.idea/libraries/Maven__org_apache_avro_avro_ipc_1_7_4.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_avro_avro_tools_1_7_4.xml b/.idea/libraries/Maven__org_apache_avro_avro_tools_1_7_4.xml new file mode 100644 index 0000000..3173eee --- /dev/null +++ b/.idea/libraries/Maven__org_apache_avro_avro_tools_1_7_4.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_commons_commons_compress_1_4_1.xml b/.idea/libraries/Maven__org_apache_commons_commons_compress_1_4_1.xml new file mode 100644 index 0000000..190209e --- /dev/null +++ b/.idea/libraries/Maven__org_apache_commons_commons_compress_1_4_1.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_commons_commons_lang3_3_4.xml b/.idea/libraries/Maven__org_apache_commons_commons_lang3_3_4.xml new file mode 100644 index 0000000..78cfcd3 --- /dev/null +++ b/.idea/libraries/Maven__org_apache_commons_commons_lang3_3_4.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_flume_flume_ng_auth_1_9_0.xml b/.idea/libraries/Maven__org_apache_flume_flume_ng_auth_1_9_0.xml new file mode 100644 index 0000000..0a0a25b --- /dev/null +++ b/.idea/libraries/Maven__org_apache_flume_flume_ng_auth_1_9_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_flume_flume_ng_configfilters_flume_ng_config_filter_api_1_9_0.xml b/.idea/libraries/Maven__org_apache_flume_flume_ng_configfilters_flume_ng_config_filter_api_1_9_0.xml new file mode 100644 index 0000000..d10ad63 --- /dev/null +++ b/.idea/libraries/Maven__org_apache_flume_flume_ng_configfilters_flume_ng_config_filter_api_1_9_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_flume_flume_ng_configuration_1_9_0.xml b/.idea/libraries/Maven__org_apache_flume_flume_ng_configuration_1_9_0.xml new file mode 100644 index 0000000..0c84574 --- /dev/null +++ b/.idea/libraries/Maven__org_apache_flume_flume_ng_configuration_1_9_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_flume_flume_ng_core_1_9_0.xml b/.idea/libraries/Maven__org_apache_flume_flume_ng_core_1_9_0.xml new file mode 100644 index 0000000..8482bfd --- /dev/null +++ b/.idea/libraries/Maven__org_apache_flume_flume_ng_core_1_9_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_flume_flume_ng_sdk_1_9_0.xml b/.idea/libraries/Maven__org_apache_flume_flume_ng_sdk_1_9_0.xml new file mode 100644 index 0000000..ae04833 --- /dev/null +++ b/.idea/libraries/Maven__org_apache_flume_flume_ng_sdk_1_9_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_httpcomponents_httpasyncclient_4_1_3.xml b/.idea/libraries/Maven__org_apache_httpcomponents_httpasyncclient_4_1_3.xml new file mode 100644 index 0000000..d5da2b5 --- /dev/null +++ b/.idea/libraries/Maven__org_apache_httpcomponents_httpasyncclient_4_1_3.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_httpcomponents_httpclient_4_5_2.xml b/.idea/libraries/Maven__org_apache_httpcomponents_httpclient_4_5_2.xml new file mode 100644 index 0000000..fdb7ead --- /dev/null +++ b/.idea/libraries/Maven__org_apache_httpcomponents_httpclient_4_5_2.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_httpcomponents_httpcore_4_4_6.xml b/.idea/libraries/Maven__org_apache_httpcomponents_httpcore_4_4_6.xml new file mode 100644 index 0000000..a9f6e19 --- /dev/null +++ b/.idea/libraries/Maven__org_apache_httpcomponents_httpcore_4_4_6.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_httpcomponents_httpcore_nio_4_4_6.xml b/.idea/libraries/Maven__org_apache_httpcomponents_httpcore_nio_4_4_6.xml new file mode 100644 index 0000000..3613e21 --- /dev/null +++ b/.idea/libraries/Maven__org_apache_httpcomponents_httpcore_nio_4_4_6.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_httpcomponents_httpmime_4_3_1.xml b/.idea/libraries/Maven__org_apache_httpcomponents_httpmime_4_3_1.xml new file mode 100644 index 0000000..ff08d3d --- /dev/null +++ b/.idea/libraries/Maven__org_apache_httpcomponents_httpmime_4_3_1.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_kafka_kafka_2_11_1_0_0.xml b/.idea/libraries/Maven__org_apache_kafka_kafka_2_11_1_0_0.xml new file mode 100644 index 0000000..cad83b3 --- /dev/null +++ b/.idea/libraries/Maven__org_apache_kafka_kafka_2_11_1_0_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_kafka_kafka_clients_1_0_0.xml b/.idea/libraries/Maven__org_apache_kafka_kafka_clients_1_0_0.xml new file mode 100644 index 0000000..25e3efc --- /dev/null +++ b/.idea/libraries/Maven__org_apache_kafka_kafka_clients_1_0_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_mina_mina_core_2_0_4.xml b/.idea/libraries/Maven__org_apache_mina_mina_core_2_0_4.xml new file mode 100644 index 0000000..3a77804 --- /dev/null +++ b/.idea/libraries/Maven__org_apache_mina_mina_core_2_0_4.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_thrift_libthrift_0_9_3.xml b/.idea/libraries/Maven__org_apache_thrift_libthrift_0_9_3.xml new file mode 100644 index 0000000..0ff6c53 --- /dev/null +++ b/.idea/libraries/Maven__org_apache_thrift_libthrift_0_9_3.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_velocity_velocity_1_7.xml b/.idea/libraries/Maven__org_apache_velocity_velocity_1_7.xml new file mode 100644 index 0000000..7a2914a --- /dev/null +++ b/.idea/libraries/Maven__org_apache_velocity_velocity_1_7.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_apache_zookeeper_zookeeper_3_4_10.xml b/.idea/libraries/Maven__org_apache_zookeeper_zookeeper_3_4_10.xml new file mode 100644 index 0000000..c595d79 --- /dev/null +++ b/.idea/libraries/Maven__org_apache_zookeeper_zookeeper_3_4_10.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_codehaus_jackson_jackson_core_asl_1_8_8.xml b/.idea/libraries/Maven__org_codehaus_jackson_jackson_core_asl_1_8_8.xml new file mode 100644 index 0000000..d649056 --- /dev/null +++ b/.idea/libraries/Maven__org_codehaus_jackson_jackson_core_asl_1_8_8.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_codehaus_jackson_jackson_mapper_asl_1_8_8.xml b/.idea/libraries/Maven__org_codehaus_jackson_jackson_mapper_asl_1_8_8.xml new file mode 100644 index 0000000..148cdfd --- /dev/null +++ b/.idea/libraries/Maven__org_codehaus_jackson_jackson_mapper_asl_1_8_8.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_eclipse_jetty_jetty_http_9_4_6_v20170531.xml b/.idea/libraries/Maven__org_eclipse_jetty_jetty_http_9_4_6_v20170531.xml new file mode 100644 index 0000000..348cfac --- /dev/null +++ b/.idea/libraries/Maven__org_eclipse_jetty_jetty_http_9_4_6_v20170531.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_eclipse_jetty_jetty_io_9_4_6_v20170531.xml b/.idea/libraries/Maven__org_eclipse_jetty_jetty_io_9_4_6_v20170531.xml new file mode 100644 index 0000000..537e5e4 --- /dev/null +++ b/.idea/libraries/Maven__org_eclipse_jetty_jetty_io_9_4_6_v20170531.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_eclipse_jetty_jetty_jmx_9_4_6_v20170531.xml b/.idea/libraries/Maven__org_eclipse_jetty_jetty_jmx_9_4_6_v20170531.xml new file mode 100644 index 0000000..b8f3918 --- /dev/null +++ b/.idea/libraries/Maven__org_eclipse_jetty_jetty_jmx_9_4_6_v20170531.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_eclipse_jetty_jetty_security_9_4_6_v20170531.xml b/.idea/libraries/Maven__org_eclipse_jetty_jetty_security_9_4_6_v20170531.xml new file mode 100644 index 0000000..43e5889 --- /dev/null +++ b/.idea/libraries/Maven__org_eclipse_jetty_jetty_security_9_4_6_v20170531.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_eclipse_jetty_jetty_server_9_4_6_v20170531.xml b/.idea/libraries/Maven__org_eclipse_jetty_jetty_server_9_4_6_v20170531.xml new file mode 100644 index 0000000..9a5d606 --- /dev/null +++ b/.idea/libraries/Maven__org_eclipse_jetty_jetty_server_9_4_6_v20170531.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_eclipse_jetty_jetty_servlet_9_4_6_v20170531.xml b/.idea/libraries/Maven__org_eclipse_jetty_jetty_servlet_9_4_6_v20170531.xml new file mode 100644 index 0000000..937f9fb --- /dev/null +++ b/.idea/libraries/Maven__org_eclipse_jetty_jetty_servlet_9_4_6_v20170531.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_eclipse_jetty_jetty_util_9_4_6_v20170531.xml b/.idea/libraries/Maven__org_eclipse_jetty_jetty_util_9_4_6_v20170531.xml new file mode 100644 index 0000000..94631d2 --- /dev/null +++ b/.idea/libraries/Maven__org_eclipse_jetty_jetty_util_9_4_6_v20170531.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_influxdb_influxdb_java_2_1.xml b/.idea/libraries/Maven__org_influxdb_influxdb_java_2_1.xml new file mode 100644 index 0000000..a6c5b54 --- /dev/null +++ b/.idea/libraries/Maven__org_influxdb_influxdb_java_2_1.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_lz4_lz4_java_1_4.xml b/.idea/libraries/Maven__org_lz4_lz4_java_1_4.xml new file mode 100644 index 0000000..8cf9aff --- /dev/null +++ b/.idea/libraries/Maven__org_lz4_lz4_java_1_4.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_mockito_mockito_all_1_10_19.xml b/.idea/libraries/Maven__org_mockito_mockito_all_1_10_19.xml new file mode 100644 index 0000000..c87b108 --- /dev/null +++ b/.idea/libraries/Maven__org_mockito_mockito_all_1_10_19.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_mortbay_jetty_jetty_6_1_26.xml b/.idea/libraries/Maven__org_mortbay_jetty_jetty_6_1_26.xml new file mode 100644 index 0000000..0f5d0b8 --- /dev/null +++ b/.idea/libraries/Maven__org_mortbay_jetty_jetty_6_1_26.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_mortbay_jetty_jetty_util_6_1_26.xml b/.idea/libraries/Maven__org_mortbay_jetty_jetty_util_6_1_26.xml new file mode 100644 index 0000000..f6ec2b9 --- /dev/null +++ b/.idea/libraries/Maven__org_mortbay_jetty_jetty_util_6_1_26.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_scala_lang_scala_library_2_11_11.xml b/.idea/libraries/Maven__org_scala_lang_scala_library_2_11_11.xml new file mode 100644 index 0000000..1553078 --- /dev/null +++ b/.idea/libraries/Maven__org_scala_lang_scala_library_2_11_11.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_slf4j_slf4j_api_1_7_25.xml b/.idea/libraries/Maven__org_slf4j_slf4j_api_1_7_25.xml new file mode 100644 index 0000000..20e8163 --- /dev/null +++ b/.idea/libraries/Maven__org_slf4j_slf4j_api_1_7_25.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_tukaani_xz_1_0.xml b/.idea/libraries/Maven__org_tukaani_xz_1_0.xml new file mode 100644 index 0000000..8b8042f --- /dev/null +++ b/.idea/libraries/Maven__org_tukaani_xz_1_0.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__org_xerial_snappy_snappy_java_1_1_4.xml b/.idea/libraries/Maven__org_xerial_snappy_snappy_java_1_1_4.xml new file mode 100644 index 0000000..33526ab --- /dev/null +++ b/.idea/libraries/Maven__org_xerial_snappy_snappy_java_1_1_4.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/libraries/Maven__tech_allegro_schema_json2avro_converter_0_2_5.xml b/.idea/libraries/Maven__tech_allegro_schema_json2avro_converter_0_2_5.xml new file mode 100644 index 0000000..5ad65a6 --- /dev/null +++ b/.idea/libraries/Maven__tech_allegro_schema_json2avro_converter_0_2_5.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 0000000..4361200 --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,11 @@ + + + + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 0000000..f59ea60 --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..3ec49a0 --- /dev/null +++ b/pom.xml @@ -0,0 +1,16 @@ + + + 4.0.0 + + cn.ac.iie + yb_flume_cus_sink_file + pom + 1.0-SNAPSHOT + + yb_http_avro_sink_file + + + + \ No newline at end of file diff --git a/yb_flume_cus_sink_file.iml b/yb_flume_cus_sink_file.iml new file mode 100644 index 0000000..4fd5057 --- /dev/null +++ b/yb_flume_cus_sink_file.iml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/yb_http_avro_sink_file/pom.xml b/yb_http_avro_sink_file/pom.xml new file mode 100644 index 0000000..a10fc25 --- /dev/null +++ b/yb_http_avro_sink_file/pom.xml @@ -0,0 +1,399 @@ + + + + yb_flume_cus_sink_file + cn.ac.iie + 1.0-SNAPSHOT + + 4.0.0 + + yb_http_avro_sink_file + + + + nexus + Team Nexus Repository + http://192.168.40.125:8099/content/groups/public + + + + ebi + www.ebi.ac.uk + http://www.ebi.ac.uk/intact/maven/nexus/content/groups/public/ + + + + + UTF-8 + 1.9.0 + 1.0.0 + + + + + + org.apache.maven.plugins + maven-shade-plugin + 2.4.1 + + true + + + + package + + shade + + + + + + cn.ac.iie.cusflume.sink.YbHttpAvroSinkFile + + + + + + + + org.codehaus.mojo + exec-maven-plugin + 1.2.1 + + + + exec + + + + + java + true + false + compile + cn.ac.iie.cusflume.sink.YbHttpAvroSinkFile + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.8 + 1.8 + + + + + + + + + + + + + + + + + + + src/main/resources + + + realtime_service_config.properties + flume_config.properties + + false + + + + + + + org.apache.flume + flume-ng-core + ${flume.version} + provided + + + + + + + + + + + + + + com.zdjizhi + galaxy + 1.0.1 + + + slf4j-log4j12 + org.slf4j + + + log4j-over-slf4j + org.slf4j + + + + + + com.alibaba + fastjson + 1.2.47 + + + + + org.apache.flume + flume-ng-sdk + ${flume.version} + + + + + + + + + + org.apache.flume + flume-ng-configuration + ${flume.version} + + + + + + + + + + + + + + + + + + + + + + + commons-lang + commons-lang + 2.6 + + + + + + + + + + junit + junit + 3.8.1 + test + + + + + + + + + + + org.mockito + mockito-all + 1.10.19 + test + + + + + + + + + + + + + + + + + + org.apache.httpcomponents + httpclient + 4.5.2 + + + org.apache.httpcomponents + httpcore + 4.4.6 + + + org.apache.httpcomponents + httpmime + 4.3.1 + + + org.apache.httpcomponents + httpasyncclient + 4.1.3 + + + + + com.squareup.okhttp3 + okhttp + 3.9.0 + + + + commons-io + commons-io + 2.4 + + + + tech.allegro.schema.json2avro + converter + 0.2.5 + + + + org.apache.avro + avro-tools + 1.7.4 + + + + org.apache.kafka + kafka_2.11 + ${kafka.version} + + + slf4j-log4j12 + org.slf4j + + + log4j-over-slf4j + org.slf4j + + + + + + org.apache.kafka + kafka-clients + ${kafka.version} + + + slf4j-log4j12 + org.slf4j + + + log4j-over-slf4j + org.slf4j + + + + + + + + + + + + + + + + + + + + + org.influxdb + influxdb-java + 2.1 + + + + com.google.guava + guava + 18.0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/CommonUtils/DecodeUtils.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/CommonUtils/DecodeUtils.java new file mode 100644 index 0000000..a5a4344 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/CommonUtils/DecodeUtils.java @@ -0,0 +1,39 @@ +package cn.ac.iie.cusflume.sink.CommonUtils; + + +import com.zdjizhi.utils.StringUtil; +import org.apache.log4j.Logger; + +import java.util.Base64; + + +public class DecodeUtils { + private static Logger logger = Logger.getLogger(DecodeUtils.class); + + public static String base64Str(String encodedText, String subjectCharset) { + Base64.Decoder decoder = Base64.getDecoder(); + String sub; + try { + if (StringUtil.isBlank(subjectCharset)) { + sub = new String(decoder.decode(encodedText), "UTF-8"); + } else if ("ISO8859-16".equals(subjectCharset)) { + sub = new String(decoder.decode(encodedText), "ISO8859-1"); + } else { + sub = new String(decoder.decode(encodedText), subjectCharset); + } + return sub; + } catch (Exception e) { + logger.warn("This encodedText===>" + encodedText + "<===, " + + "This subjectCharset===>" + subjectCharset + "<===, " + + "Transform base64 String failed===>" + e.getMessage() + "<===."); + return ""; +// e.printStackTrace(); + } + } + +// public static void main(String[] args) { +// String s = base64Str("eWJqQlRURmdVclNRbmJJLnR4dA==", " "); +// System.out.println(s); +// } + +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/CommonUtils/GetDataDictionaryCodeByTopicUtils.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/CommonUtils/GetDataDictionaryCodeByTopicUtils.java new file mode 100644 index 0000000..7211567 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/CommonUtils/GetDataDictionaryCodeByTopicUtils.java @@ -0,0 +1,144 @@ +package cn.ac.iie.cusflume.sink.CommonUtils; + +import cn.ac.iie.cusflume.sink.daoUtils.RealtimeCountConfig; +import org.apache.log4j.Logger; + + +public class GetDataDictionaryCodeByTopicUtils { + private static Logger logger = Logger.getLogger(GetDataDictionaryCodeByTopicUtils.class); + + /** + * 获取数据对象自身的类型编码 + * + * @param topic + * @return + */ + public static int getDataObjectTypeCodeByTopicName(String topic) { + switch (topic) { + /** + * 非文件消息-log + */ + case "NTC-CONN-RECORD-LOG": + case "NTC-COLLECT-HTTP-LOG": + case "NTC-COLLECT-SSL-LOG": + case "NTC-COLLECT-DNS-LOG": + case "NTC-COLLECT-FILE-LOG"://此时没有数据对象,数据对象类型选择1-log日志 + return 1; + /** + * 文件消息-file + */ + case "NTC-COLLECT-FTP-DOC-LOG": + case "NTC-COLLECT-HTTP-DOC-LOG": + case "NTC-COLLECT-HTTP-AV-LOG"://20200810新增,等同于NTC-COLLECT-HTTP-DOC-LOG + case "NTC-COLLECT-MAIL-LOG": + case "NTC-COLLECT-TELNET-LOG"://20200325新增 + return 2; + /** + * 状态消息(监控消息) + * status + */ + case "monitor-msg": + case "INFLUX-SAPP-BPS-STAT-LOG"://20200113新增-广东流量状态上传 + return 4; + default: + logger.error("GetDataDictionaryCodeByTopicUtils--->getDataObjectTypeCodeByTopicName There is a unknown topic! topic name is :" + topic); + break; + } + return 0;//标识错误 + } + + /** + * 获取数据标签SchemaID + * NTC-COLLECT-FILE-LOG填文件标签76 + * 其他日志都是读取自身的d_tag应该填写日志标签75 + * 状态上报(一部需要)暂时作为状态标签处理,暂定78 + * @param topic + * @return + */ + public static int getDataTagSchemaIDByTopicName(String topic) { + switch (topic) { + /** + * 文件标签-file_tag + */ + case "NTC-COLLECT-FILE-LOG"://此时没有数据对象,数据对象类型选择1-log日志,但数据标签的SchemaID选择文件标签76 + return 76; + /** + * 日志标签-log_tag + */ + case "NTC-CONN-RECORD-LOG": + case "NTC-COLLECT-HTTP-LOG": + case "NTC-COLLECT-SSL-LOG": + case "NTC-COLLECT-DNS-LOG": + case "NTC-COLLECT-FTP-DOC-LOG": + case "NTC-COLLECT-HTTP-DOC-LOG": + case "NTC-COLLECT-HTTP-AV-LOG"://20200810新增,等同于NTC-COLLECT-HTTP-DOC-LOG + case "NTC-COLLECT-MAIL-LOG": + case "NTC-COLLECT-TELNET-LOG"://20200325新增 + return 75; + /** + * 状态标签-status_tag + */ + case "monitor-msg": + case "INFLUX-SAPP-BPS-STAT-LOG"://20200113新增-广东流量状态上传 + return 78;//不太清楚状态标签是使用日志标签还是使用状态标签,78为状态标签SchemaID + default: + logger.error("GetDataDictionaryCodeByTopicUtils--->getDataTagSchemaIDByTopicName There is a unknown topic! topic name is :" + topic); + break; + } + return 0;//标识错误 + } + + /** + * 获取数据对象SchemaID-需要到总线注册获取 + * + * @param topic + * @return + */ + public static int getDataObjectSchemaIDByTopicName(String topic) { + switch (topic) { + /** + * 非文件消息-log + */ + case "NTC-COLLECT-FILE-LOG"://文件标签单独发送,作为消息特殊处理,此处的SchemaID为0(规定),即没有数据对象这一块 + return RealtimeCountConfig.SCHEMA_ID_NTC_COLLECT_FILE_LOG; + case "NTC-CONN-RECORD-LOG": + return RealtimeCountConfig.SCHEMA_ID_NTC_CONN_RECORD_LOG; + case "NTC-COLLECT-HTTP-LOG": + return 3; + case "NTC-COLLECT-SSL-LOG": + return 4; + case "NTC-COLLECT-DNS-LOG": + return 5; + /** + * 文件消息-file + */ + case "NTC-COLLECT-FTP-DOC-LOG": + return RealtimeCountConfig.SCHEMA_ID_NTC_COLLECT_FTP_DOC_LOG; + case "NTC-COLLECT-TELNET-LOG"://20200325新增 + return RealtimeCountConfig.SCHEMA_ID_NTC_COLLECT_TELNET_LOG; + case "NTC-COLLECT-HTTP-DOC-LOG": + return RealtimeCountConfig.SCHEMA_ID_NTC_COLLECT_HTTP_DOC_LOG; + case "NTC-COLLECT-HTTP-AV-LOG"://20200810新增,等同于NTC-COLLECT-HTTP-DOC-LOG + return RealtimeCountConfig.SCHEMA_ID_NTC_COLLECT_HTTP_AV_LOG; + case "NTC-COLLECT-MAIL-LOG": + return RealtimeCountConfig.SCHEMA_ID_NTC_COLLECT_MAIL_LOG; + /** + * 一部状态消息(监控消息)-一部数据对象SchemaID-总线本身已经提前注册 + * status + */ + case "monitor-msg": + return RealtimeCountConfig.SCHEMA_ID_MONITOR_MSG;//此avro格式为总线规定的状态对象schema,数据字典数据子类编码-16387,schemaID-22 + /** + * 广东状态消息(监控消息)-广东数据对象SchemaID + * status + */ + case "INFLUX-SAPP-BPS-STAT-LOG": + return RealtimeCountConfig.SCHEMA_ID_INFLUX_SAPP_BPS_STAT_LOG; + default: + logger.error("GetDataDictionaryCodeByTopicUtils--->getDataObjectSchemaIDByTopicName There is a unknown topic! topic name is :" + topic); + break; + } + return 0;//0表示标识错误 + } + +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/CommonUtils/GetFilePathByTopicUtils.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/CommonUtils/GetFilePathByTopicUtils.java new file mode 100644 index 0000000..57690d9 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/CommonUtils/GetFilePathByTopicUtils.java @@ -0,0 +1,139 @@ +package cn.ac.iie.cusflume.sink.CommonUtils; + +import cn.ac.iie.cusflume.sink.bean.fileBean.*; +import com.alibaba.fastjson.JSONObject; +import com.zdjizhi.utils.StringUtil; +import org.apache.commons.lang.StringUtils; +import org.apache.log4j.Logger; + +import java.util.Arrays; + + +public class GetFilePathByTopicUtils { + private static Logger logger = Logger.getLogger(GetFilePathByTopicUtils.class); + + public static String getFilePathByTopicName(String dataJson, String topic) { + switch (topic) { + /** + * 非文件消息 + */ + case "NTC-CONN-RECORD-LOG": + return "-"; + case "NTC-COLLECT-HTTP-LOG": + return "-"; + case "NTC-COLLECT-SSL-LOG": + return "-"; + case "NTC-COLLECT-DNS-LOG": + return "-"; + /** + * 文件消息 + */ + case "NTC-COLLECT-FTP-DOC-LOG": + return getCollFtpDocFilePath(dataJson);//20200211新增 + case "NTC-COLLECT-HTTP-AV-LOG"://和NTC-COLLECT-HTTP-DOC-LOG共用相同Schema-20200904 +// return getCollHttpAvFilePath(dataJson); + case "NTC-COLLECT-HTTP-DOC-LOG": + return getCollHttpDocFilePath(dataJson); + case "NTC-COLLECT-MAIL-LOG": + return getCollMailFilePath(dataJson); + case "NTC-COLLECT-FILE-LOG": +// return getCollFileFilePath(dataJson); + return "-";//由于这里Mail与Http都已经设置了传FilePath请求头,所以这里就没有设置再设置请求头,后期有需要可以加上 + case "NTC-COLLECT-TELNET-LOG": + return getCollTelnetFilePath(dataJson);//20200325新增 + /** + * 状态消息(监控消息) + */ + case "monitor-msg": + return "-"; + case "INFLUX-SAPP-BPS-STAT-LOG": + return "-"; + default: + logger.error("GetFilePathByTopicUtils---There is a unknown topic! topic name is :" + topic); + break; + } + return "-"; + } + + /** + * 获取NTC-COLLECT-MAIL-LOG的FilePath + * + * @param dataJson + * @return + */ + private static String getCollMailFilePath(String dataJson) { + NTC_COLLECT_MAIL_LOG ntcCollectMailLog = JSONObject.parseObject(dataJson, NTC_COLLECT_MAIL_LOG.class); + String filePathList = "[\"" + StringUtils.join(ntcCollectMailLog.getD_tag().getFile_path_list(), "\",\"") + "\"]";//新-20191222;//元素有双引号与转义\版本 + if (!("[\"\"]".equals(filePathList)) && StringUtil.isNotBlank(filePathList)) { + return filePathList; + } else { + return "-"; + } + + } + + /** + * 获取NTC-COLLECT-HTTP-DOC-LOG的FilePath + * NTC-COLLECT-HTTP-AV-LOG复用NTC-COLLECT-HTTP-DOC-LOG的数据结构 + * @param dataJson + * @return + */ + private static String getCollHttpDocFilePath(String dataJson) { + NTC_COLLECT_HTTP_DOC_LOG ntcCollectHttpDocLog = JSONObject.parseObject(dataJson, NTC_COLLECT_HTTP_DOC_LOG.class); + String filePathList = "[\"" + StringUtils.join(ntcCollectHttpDocLog.getD_tag().getFile_path_list(), "\",\"") + "\"]";//新-20191222;//元素有双引号与转义\版本 + if (!("[\"\"]".equals(filePathList)) && StringUtil.isNotBlank(filePathList)) { + return filePathList; + } else { + return "-"; + } + } + + /** + * 获取NTC-COLLECT-FTP-DOC-LOG的FilePath + * + * @param dataJson + * @return + */ + private static String getCollFtpDocFilePath(String dataJson) { + NTC_COLLECT_FTP_DOC_LOG ntcCollectFtpDocLog = JSONObject.parseObject(dataJson, NTC_COLLECT_FTP_DOC_LOG.class); + String filePathList = "[\"" + StringUtils.join(ntcCollectFtpDocLog.getD_tag().getFile_path_list(), "\",\"") + "\"]"; + if (!("[\"\"]".equals(filePathList)) && StringUtil.isNotBlank(filePathList)) { + return filePathList; + } else { + return "-"; + } + } + + /** + * 获取NTC-COLLECT-TELNET-LOG的FilePath + * + * @param dataJson + * @return + */ + private static String getCollTelnetFilePath(String dataJson) { + NTC_COLLECT_TELNET_LOG ntcCollectTelnetLog = JSONObject.parseObject(dataJson, NTC_COLLECT_TELNET_LOG.class); + String filePathList = "[\"" + StringUtils.join(ntcCollectTelnetLog.getD_tag().getFile_path_list(), "\",\"") + "\"]"; + if (!("[\"\"]".equals(filePathList)) && StringUtil.isNotBlank(filePathList)) { + return filePathList; + } else { + return "-"; + } + } + + /** + * 获取NTC-COLLECT-FILE-LOG的FilePath + * + * @param dataJson + * @return + */ + private static String getCollFileFilePath(String dataJson) { + FILE_TAG_BEAN ntcCollectHttpDocLog = JSONObject.parseObject(dataJson, FILE_TAG_BEAN.class); + String filePath = "[\"" + ntcCollectHttpDocLog.getFile_path() + "\"]"; + if (!("[\"\"]".equals(filePath)) && StringUtil.isNotBlank(filePath)) { + return filePath; + } else { + return "-"; + } + } + +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/CommonUtils/PutIdOnMsgByTopicUtils.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/CommonUtils/PutIdOnMsgByTopicUtils.java new file mode 100644 index 0000000..2f7dff0 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/CommonUtils/PutIdOnMsgByTopicUtils.java @@ -0,0 +1,92 @@ +package cn.ac.iie.cusflume.sink.CommonUtils; + +import cn.ac.iie.cusflume.sink.bean.fileBean.NTC_COLLECT_FTP_DOC_LOG; +import cn.ac.iie.cusflume.sink.bean.fileBean.NTC_COLLECT_HTTP_DOC_LOG; +import cn.ac.iie.cusflume.sink.bean.fileBean.NTC_COLLECT_MAIL_LOG; +import com.alibaba.fastjson.JSONObject; +import org.apache.commons.lang.StringUtils; +import org.apache.log4j.Logger; + + +public class PutIdOnMsgByTopicUtils { + private static Logger logger = Logger.getLogger(PutIdOnMsgByTopicUtils.class); + + /** + * 单条数据包含单个文件时使用 + * + * @param topic + * @param dataJson + * @param fileId + * @return + */ + public static String putIdOnMsgByTopicName(String topic, String dataJson, String fileId) { + switch (topic) { +// case "NTC-COLLECT-MAIL-LOG": +// return compCollectMail(); + case "NTC-COLLECT-HTTP-DOC-LOG": + return compCollectHttpDoc(topic, dataJson, fileId); + case "NTC-COLLECT-FTP-DOC-LOG": + return compCollectFtpDoc(topic, dataJson, fileId); + default: + logger.error("There is no corresponding topic! topic name is :" + topic); + break; + } + return null; + } + + /** + * 单条数据包含多种文件时使用 + * + * @param topic + * @param dataJson + * @param fileId + * @param fileKind + * @return + */ + public static String putIdOnMsgByTopicName(String topic, String dataJson, String fileId, String fileKind) { + switch (topic) { + case "NTC-COLLECT-MAIL-LOG": + return compCollectMail(topic, dataJson, fileId, fileKind); + default: + logger.error("There is no corresponding topic! topic name is :" + topic); + break; + } + return null; + } + + private static String compCollectMail(String topic, String dataJson, String fileId, String fileKind) { + NTC_COLLECT_MAIL_LOG ntc_collect_mail_log = JSONObject.parseObject(dataJson, NTC_COLLECT_MAIL_LOG.class); + if ("eml_file".equals(fileKind)) { + ntc_collect_mail_log.setEml_file_id(fileId); + } else if ("attachments".equals(fileKind)) { + ntc_collect_mail_log.setAttachments_id(fileId); + } else { + logger.error("PutIdOnMsgByTopicUtils compCollectMail fileKind is unknown===>>>" + fileKind); + } + return JSONObject.toJSONString(ntc_collect_mail_log); + } + + + private static String compCollectFtpDoc(String topic, String dataJson, String fileId) { + NTC_COLLECT_FTP_DOC_LOG ntc_collect_ftp_doc_log = JSONObject.parseObject(dataJson, NTC_COLLECT_FTP_DOC_LOG.class); +// ntc_collect_ftp_doc_log.setRes_body_file_id(fileId); + ntc_collect_ftp_doc_log.setContent_path(fileId); + return JSONObject.toJSONString(ntc_collect_ftp_doc_log); + } + + + private static String compCollectHttpDoc(String topic, String dataJson, String fileId) { + NTC_COLLECT_HTTP_DOC_LOG ntcCollectHttpDocLog = JSONObject.parseObject(dataJson, NTC_COLLECT_HTTP_DOC_LOG.class); + if (StringUtils.isNotBlank(ntcCollectHttpDocLog.getReq_body_file())) { + ntcCollectHttpDocLog.setReq_body_file_id(fileId); + } else if (StringUtils.isNotBlank(ntcCollectHttpDocLog.getRes_body_file())) { + ntcCollectHttpDocLog.setRes_body_file_id(fileId); + } else { + logger.error("PutIdOnMsgByTopicUtils there is no Req_body_file or Res_body_file in this message===>>>" + dataJson); + } + return JSONObject.toJSONString(ntcCollectHttpDocLog); + } + + /**---------------------------------删除的topic------------------------------------------------------------**/ + +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/AsyncHttpClientGetFileCallback.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/AsyncHttpClientGetFileCallback.java new file mode 100644 index 0000000..944d5ed --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/AsyncHttpClientGetFileCallback.java @@ -0,0 +1,260 @@ +package cn.ac.iie.cusflume.sink.HttpAsyncUtils; + +import cn.ac.iie.cusflume.sink.avroUtils.AvroMonitorTimerTask; +import cn.ac.iie.cusflume.sink.avroUtils.DataCenterLoad; +import cn.ac.iie.cusflume.sink.bean.configBean.ConfigInfo; +import cn.ac.iie.cusflume.sink.daoUtils.KafkaDB; +import com.alibaba.fastjson.JSONObject; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.ParseException; +import org.apache.http.client.utils.HttpClientUtils; +import org.apache.http.concurrent.FutureCallback; +import org.apache.http.util.EntityUtils; +import org.apache.log4j.Logger; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.List; +import java.util.Map; + +/** + * 异步httpclient回调对象---GET文件 + * 执行成功,开始将get到的文件post至zx + * 执行失败,重试3次 + */ +public class AsyncHttpClientGetFileCallback implements FutureCallback { + private static Logger LOG = Logger.getLogger(AsyncHttpClientGetFileCallback.class); + + private KafkaDB kafkaDBAsyncGet; + + private String postFileUrl; + private String postMsgUrl; + private String getFileUrl; + private AsyncHttpClientPostFileCallback asyncHttpClientPostFileCallback; + private ConfigInfo configInfo; + private String sendMsg;//用于get文件失败后发往kafka + private int count; + + //暂未使用 + public AsyncHttpClientGetFileCallback(String postFileUrl, String postMsgUrl, String getFileUrl, AsyncHttpClientPostFileCallback asyncHttpClientPostFileCallback, int count) { + this.postFileUrl = postFileUrl; + this.postMsgUrl = postMsgUrl; + this.getFileUrl = getFileUrl; + this.asyncHttpClientPostFileCallback = asyncHttpClientPostFileCallback; + this.count = count;//建议初始为0 + } + + public AsyncHttpClientGetFileCallback(ConfigInfo configInfo, String getFileUrl, String sendMsg, AsyncHttpClientPostFileCallback asyncHttpClientPostFileCallback, int count) { + this.configInfo = configInfo; + this.postFileUrl = configInfo.getPostFileUrl();//通过configInfo赋值 + this.postMsgUrl = configInfo.getPostMsgUrl();//通过configInfo赋值 + this.getFileUrl = getFileUrl; + this.sendMsg = sendMsg; + this.asyncHttpClientPostFileCallback = asyncHttpClientPostFileCallback; + this.count = count;//建议初始为0 + + //初始化入kafka程序 + kafkaDBAsyncGet = KafkaDB.getInstance(); + } + + public String getPostFileUrl() { + return postFileUrl; + } + + public void setPostFileUrl(String postFileUrl) { + this.postFileUrl = postFileUrl; + } + + public String getPostMsgUrl() { + return postMsgUrl; + } + + public void setPostMsgUrl(String postMsgUrl) { + this.postMsgUrl = postMsgUrl; + } + + public String getGetFileUrl() { + return getFileUrl; + } + + public void setGetFileUrl(String getFileUrl) { + this.getFileUrl = getFileUrl; + } + + public AsyncHttpClientPostFileCallback getAsyncHttpClientPostFileCallback() { + return asyncHttpClientPostFileCallback; + } + + public void setAsyncHttpClientPostFileCallback(AsyncHttpClientPostFileCallback asyncHttpClientPostFileCallback) { + this.asyncHttpClientPostFileCallback = asyncHttpClientPostFileCallback; + } + + public ConfigInfo getConfigInfo() { + return configInfo; + } + + public void setConfigInfo(ConfigInfo configInfo) { + this.configInfo = configInfo; + } + + public String getSendMsg() { + return sendMsg; + } + + public void setSendMsg(String sendMsg) { + this.sendMsg = sendMsg; + } + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } + + @Override + public String toString() { + return "AsyncHttpClientGetFileCallback{" + + "postFileUrl='" + postFileUrl + '\'' + + ", postMsgUrl='" + postMsgUrl + '\'' + + ", getFileUrl='" + getFileUrl + '\'' + + ", asyncHttpClientPostFileCallback=" + asyncHttpClientPostFileCallback + + ", configInfo=" + configInfo + + ", sendMsg='" + sendMsg + '\'' + + ", count=" + count + + '}'; + } + + /** + * 请求完成后调用该函数 + */ + @Override + public void completed(HttpResponse response) { + try { + byte[] result = IOUtils.toByteArray(response.getEntity().getContent()); + int statusCode = response.getStatusLine().getStatusCode(); + if (statusCode == 200) { + asyncHttpClientPostFileCallback.setResultIs(result);//添加获取的文件流,用于post文件失败时重试使用 + LOG.info("AsyncHttpClientGetFileCallback completed,response status:{" + statusCode + "},get file success,post file to zx next."); + + AvroMonitorTimerTask.fileReadyPostSum++;//文件获取流成功才会开始推送文件,计个数 + AvroMonitorTimerTask.fileBytesReadyPostSum += result.length;//文件获取流成功才会开始推送文件,计字节数 + + //文件获取成功后以流的形式开始发往总线 + HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), result, asyncHttpClientPostFileCallback);//失败时进入到的这个asyncHttpClientPostFileCallback内部是有文件流数据的 + } else if (statusCode == 404) { + //文件未存在,重试 + retryFor404(); + } else { + LOG.info("AsyncHttpClientGetFileCallback getFile failed,response status:{" + statusCode + "},this msg is===>" + sendMsg + "<==="); + } + + HttpClientUtils.closeQuietly(response); + } catch (Exception e) { + e.printStackTrace(); + } + } + + /** + * 由于404引起的重试,大概率是因为文件未完全生成 + */ + private void retryFor404() { + try { + count++; + if (count > 1) { + LOG.warn("AsyncHttpClientGetFileCallback getFile is failed,statusCode is 404,retry count=" + count); + } + if (count > 4) { + LOG.error("AsyncHttpClientGetFileCallback getFile is failed and already retry 3 times,statusCode is 404,This message is===>>>" + sendMsg + "<<<==="); + } else { + Map map = JSONObject.parseObject(sendMsg, Map.class); + int found_time = (int) map.get("found_time"); + long now_time = System.currentTimeMillis() / 1000; + long timeDiffer = now_time - found_time; + if (timeDiffer >= 20) { + AsyncHttpClientGetFileCallback asyncHttpClientGetFileCallback = new AsyncHttpClientGetFileCallback(configInfo, getFileUrl, sendMsg, asyncHttpClientPostFileCallback, count);//注意此处count不再为0 + HttpClientUtil.getFileAndPostFile(getFileUrl, asyncHttpClientGetFileCallback); + } else { + Thread.sleep((20 - timeDiffer) * 1000); + AsyncHttpClientGetFileCallback asyncHttpClientGetFileCallback = new AsyncHttpClientGetFileCallback(configInfo, getFileUrl, sendMsg, asyncHttpClientPostFileCallback, count);//注意此处count不再为0 + HttpClientUtil.getFileAndPostFile(getFileUrl, asyncHttpClientGetFileCallback); + } + } + } catch (Exception e2) { + LOG.error("AsyncHttpClientGetFileCallback retryFor404 getFile retry is error===>>>" + e2); + } + } + + /** + * 请求取消后调用该函数 + */ + @Override + public void cancelled() { + LOG.warn("request is cancelled..."); + } + + /** + * 请求失败后调用该函数 + */ + @Override + public void failed(Exception e) { + retryForFailed(e); + } + + /** + * 由于请求失败引起的重试 + * + * @param e + */ + private void retryForFailed(Exception e) { + try { + count++; + if (count > 1) { + LOG.warn("AsyncHttpClientGetFileCallback getFile is failed,retry count=" + count); + } + if (count > 4) { + LOG.error("AsyncHttpClientGetFileCallback getFile is failed and already retry 3 times.error===>>>" + e + "<<<===.This message is===>>>" + sendMsg + "<<<==="); + } else { + Map map = JSONObject.parseObject(sendMsg, Map.class); + int found_time = (int) map.get("found_time"); + long now_time = System.currentTimeMillis() / 1000; + long timeDiffer = now_time - found_time; + if (timeDiffer >= 20) { + AsyncHttpClientGetFileCallback asyncHttpClientGetFileCallback = new AsyncHttpClientGetFileCallback(configInfo, getFileUrl, sendMsg, asyncHttpClientPostFileCallback, count);//注意此处count不再为0 + HttpClientUtil.getFileAndPostFile(getFileUrl, asyncHttpClientGetFileCallback); + } else { + Thread.sleep((20 - timeDiffer) * 1000); + AsyncHttpClientGetFileCallback asyncHttpClientGetFileCallback = new AsyncHttpClientGetFileCallback(configInfo, getFileUrl, sendMsg, asyncHttpClientPostFileCallback, count);//注意此处count不再为0 + HttpClientUtil.getFileAndPostFile(getFileUrl, asyncHttpClientGetFileCallback); + } + } + } catch (Exception e2) { + LOG.error("AsyncHttpClientGetFileCallback retryForFailed getFile retry is error===>>>" + e2); + } + } + + + protected String getHttpContent(HttpResponse response) { + HttpEntity entity = response.getEntity(); + String body = null; + + if (entity == null) { + return null; + } + + try { + body = EntityUtils.toString(entity, "utf-8"); + } catch (ParseException e) { + LOG.warn("the response's content inputstream is corrupt", e); + } catch (IOException e) { + LOG.warn("the response's content inputstream is corrupt", e); + } + return body; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/AsyncHttpClientPostFileCallback.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/AsyncHttpClientPostFileCallback.java new file mode 100644 index 0000000..4a53c04 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/AsyncHttpClientPostFileCallback.java @@ -0,0 +1,293 @@ +package cn.ac.iie.cusflume.sink.HttpAsyncUtils; + +//import com.google.api.client.util.Lists; +//import com.mesa.miniotohttp.tools.OssUtil; +//import com.mesa.miniotohttp.tools.TimeTask; + +import cn.ac.iie.cusflume.sink.CommonUtils.PutIdOnMsgByTopicUtils; +import cn.ac.iie.cusflume.sink.HttpAsyncUtils.ybBean.PutFileInfo; +import cn.ac.iie.cusflume.sink.avroUtils.AvroMonitorTimerTask; +import cn.ac.iie.cusflume.sink.avroUtils.DataCenterLoad; +import cn.ac.iie.cusflume.sink.bean.configBean.ConfigInfo; +import cn.ac.iie.cusflume.sink.bean.postFileBean.PostFileResBody; +import cn.ac.iie.cusflume.sink.bean.producer.Res.ProResBody; +import cn.ac.iie.cusflume.sink.daoUtils.KafkaDB; +import com.alibaba.fastjson.JSONObject; +import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.ParseException; +import org.apache.http.client.utils.HttpClientUtils; +import org.apache.http.concurrent.FutureCallback; +import org.apache.http.util.EntityUtils; +import org.apache.log4j.Logger; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; + +/** + * 异步httpclient回调对象---POST文件 + * 在get文件成功后开始执行post文件操作 + * post文件成功时,获取到文件id加入到message中,开始执行post消息操作, + * 为此需要message(kafka获取)以及post消息url(配置文件获取), + * 还需要User-Agent以及X-Tag用于验证 + *

+ * post失败时,重试三次 + */ +public class AsyncHttpClientPostFileCallback implements FutureCallback { + private static Logger LOG = Logger.getLogger(AsyncHttpClientPostFileCallback.class); + + private DataCenterLoad dclAsyncPost; + private KafkaDB kafkaDBAsyncPost; + + private String postFileUrl; + private String postMsgUrl; + private byte[] resultIs;//get到的文件流,用于post文件失败时重试-字节数组版本 + private ConfigInfo configInfo;//1.存储post文件,信息url(配置文件获取);2.存储入库前验证文件,信息url(配置文件获取);3.存储文件,信息SessionCookie(请求总线获取);4.存储msg作为avro入库所需的topicName,userAgent,xTag,batchSize + + private String sendMsg;//用于post成功后补全id发往zx或者post失败发往kafka + + private PostFileResBody postFileResBody;//存储post文件成功后返回的响应信息,内部包含文件id,key名为id + private int count; + + private PutFileInfo putFileInfo;//用于存放发送文件时的需要携带的相关信息 + + public AsyncHttpClientPostFileCallback(ConfigInfo configInfo, String sendMsg, int count) { + this.configInfo = configInfo; + this.postFileUrl = configInfo.getPostFileUrl(); + this.postMsgUrl = configInfo.getPostMsgUrl(); + this.sendMsg = sendMsg;//存放对应于url的数据 + this.count = count; + + //初始化入库程序 + dclAsyncPost = new DataCenterLoad(); + kafkaDBAsyncPost = KafkaDB.getInstance(); + } + + //暂未使用 + public AsyncHttpClientPostFileCallback(String postFileUrl, String postMsgUrl, int count) { + this.postFileUrl = postFileUrl; + this.postMsgUrl = postMsgUrl; + this.count = count; + + //初始化入库程序 + dclAsyncPost = new DataCenterLoad(); + kafkaDBAsyncPost = KafkaDB.getInstance(); + } + + //暂未使用 + public AsyncHttpClientPostFileCallback(String postFileUrl, String postMsgUrl, byte[] resultIs, int count) { + this.postFileUrl = postFileUrl; + this.postMsgUrl = postMsgUrl; + this.resultIs = resultIs; + this.count = count; + + //初始化入库程序 + dclAsyncPost = new DataCenterLoad(); + kafkaDBAsyncPost = KafkaDB.getInstance(); + } + + public String getPostFileUrl() { + return postFileUrl; + } + + public void setPostFileUrl(String postFileUrl) { + this.postFileUrl = postFileUrl; + } + + public String getPostMsgUrl() { + return postMsgUrl; + } + + public void setPostMsgUrl(String postMsgUrl) { + this.postMsgUrl = postMsgUrl; + } + + public byte[] getResultIs() { + return resultIs; + } + + public void setResultIs(byte[] resultIs) { + this.resultIs = resultIs; + } + + public ConfigInfo getConfigInfo() { + return configInfo; + } + + public void setConfigInfo(ConfigInfo configInfo) { + this.configInfo = configInfo; + } + + public String getSendMsg() { + return sendMsg; + } + + public void setSendMsg(String sendMsg) { + this.sendMsg = sendMsg; + } + + public PostFileResBody getPostFileResBody() { + return postFileResBody; + } + + public void setPostFileResBody(PostFileResBody postFileResBody) { + this.postFileResBody = postFileResBody; + } + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } + + @Override + public String toString() { + return "AsyncHttpClientPostFileCallback{" + + "postFileUrl='" + postFileUrl + '\'' + + ", postMsgUrl='" + postMsgUrl + '\'' + + ", configInfo=" + configInfo + + ", sendMsg='" + sendMsg + '\'' + + ", postFileResBody=" + postFileResBody + + ", count=" + count + + '}'; + } + + /** + * 请求完成后调用该函数 + */ + @Override + public void completed(HttpResponse response) { + try { + int statusCode = response.getStatusLine().getStatusCode(); + if (statusCode == 200) { + LOG.info("AsyncHttpClientPostFileCallback completed,post file statuscode is:{" + statusCode + "},now start to send message to zx."); + //post文件成功后可以获取文件id + String postResBody = getHttpContent(response); + System.out.println("Post File to zx resBody====>" + postResBody); + if (StringUtils.isNotBlank(postResBody)) { + this.postFileResBody = JSONObject.parseObject(postResBody, PostFileResBody.class); + String fileId = postFileResBody.getMsg();//获取文件id----新---20191115---返回体格式:{"code": "200", "msg": "2019111507094164188567821505", "data": "不需要额外数据"} + AvroMonitorTimerTask.fileSuccessSum++;//成功获取到ID的文件计数,即成功推送的文件数 + + AvroMonitorTimerTask.fileBytesSuccessSum += resultIs.length;//成功获取到ID的文件计字节数,即成功推送的文件字节数 + + this.sendMsg = PutIdOnMsgByTopicUtils.putIdOnMsgByTopicName(configInfo.getTopicName(), sendMsg, fileId);//补全数据 + + AvroMonitorTimerTask.msgReadyPostSum++;//一个文件对应一条消息 + //往总线发送 + ProResBody proResBody = dclAsyncPost.avroDataLoad(configInfo.getPostMsgUrl(), configInfo.getTopicName(), sendMsg, configInfo.getBatchSize(), configInfo.getUserAgent(), configInfo.getMsgSessionCookie()); + LOG.info("Send message with fileId to zx over,this responseBody is===>" + proResBody.toString()); + } else { + LOG.warn("AsyncHttpClientPostFileCallback post file success but postResBody(response body) is null."); + } + } else if (statusCode == 403) {//空文件,不再重试发送 + LOG.error("AsyncHttpClientPostFileCallback post zxFile statusCode is 403 so get the fileIs but this minio file is empty.This message is===>>>" + sendMsg + "<<<==="); + } else { + RetryAsyncHttpClientPostFileFailed(statusCode); + } + } catch (Exception e) { + LOG.error("AsyncHttpClientPostFileCallback post zxFile or send finalMsg is error .This message is===>>>" + sendMsg + "<<<==="); + e.printStackTrace(); + } + + HttpClientUtils.closeQuietly(response); + } + + /** + * 请求取消后调用该函数 + */ + @Override + public void cancelled() { + LOG.info("AsyncHttpClientPostFileCallback is cancelled... ..."); + } + + /** + * 请求失败后调用该函数 + */ + @Override + public void failed(Exception e) { + //重试 + RetryAsyncHttpClientPostFileFailed(e); + } + + /** + * failed被调用时的重试 + * + * @param e + */ + private void RetryAsyncHttpClientPostFileFailed(Exception e) { + try { + count++; + if (count > 1) { + LOG.warn("AsyncHttpClientPostFileCallback post zxFile is failed,retry count=" + count); + } + if (count > 4) { + LOG.error("AsyncHttpClientPostFileCallback post zxFile is failed and already retry 3 times.error===>>>" + e + "<<<===.This message is===>>>" + sendMsg + "<<<==="); + } else { + if (configInfo != null && StringUtils.isNotBlank(sendMsg) && resultIs != null) { + //重试发送文件 + AsyncHttpClientPostFileCallback asyncHttpClientPostFileCallback = new AsyncHttpClientPostFileCallback(configInfo, sendMsg, count);//注意此处count不再为0 + //重试时也是以之前存储的流的形式发往总线 + HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), resultIs, asyncHttpClientPostFileCallback); + } + } + } catch (Exception e2) { + LOG.error("AsyncHttpClientGetFileCallback retry is error===>>>" + e2); + } + } + + /** + * 流不为空但文件异常情况下的重试 + * + * @param statusCode + */ + private void RetryAsyncHttpClientPostFileFailed(int statusCode) { + try { + count++; + if (count > 1) { + LOG.warn("AsyncHttpClientPostFileCallback post zxFile statusCode is abnormal,retry count=" + count); + } + if (count > 4) { + LOG.error("AsyncHttpClientPostFileCallback post zxFile statusCode is abnormal and already retry 3 times.statusCode is{" + statusCode + "}.This message is===>>>" + sendMsg + "<<<==="); + } else { + if (configInfo != null && StringUtils.isNotBlank(sendMsg) && resultIs != null) { + //重试发送文件 + AsyncHttpClientPostFileCallback asyncHttpClientPostFileCallback = new AsyncHttpClientPostFileCallback(configInfo, sendMsg, count);//注意此处count不再为0 + //重试时也是以之前存储的流的形式发往总线 + HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), resultIs, asyncHttpClientPostFileCallback); + } + } + } catch (Exception e2) { + LOG.error("AsyncHttpClientGetFileCallback retry is error===>>>" + e2); + } + } + + protected String getHttpContent(HttpResponse response) { + + HttpEntity entity = response.getEntity(); + String body = null; + if (entity == null) { + return null; + } + try { + body = EntityUtils.toString(entity, "utf-8"); + } catch (ParseException e) { + LOG.warn("the response's content inputstream is corrupt", e); + } catch (IOException e) { + LOG.warn("the response's content inputstream is corrupt", e); + } + return body; + } +} + + + + + + diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/HttpAsyncClient.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/HttpAsyncClient.java new file mode 100644 index 0000000..8d4579d --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/HttpAsyncClient.java @@ -0,0 +1,195 @@ +package cn.ac.iie.cusflume.sink.HttpAsyncUtils; + +import cn.ac.iie.cusflume.sink.daoUtils.RealtimeCountConfig; +import org.apache.http.Consts; +import org.apache.http.HttpHost; +import org.apache.http.auth.AuthSchemeProvider; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.MalformedChallengeException; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.client.config.AuthSchemes; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.config.ConnectionConfig; +import org.apache.http.config.Lookup; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.conn.ssl.SSLContexts; +import org.apache.http.impl.auth.*; +import org.apache.http.impl.client.BasicCookieStore; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; +import org.apache.http.impl.nio.client.HttpAsyncClients; +import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; +import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; +import org.apache.http.impl.nio.reactor.IOReactorConfig; +import org.apache.http.nio.conn.NoopIOSessionStrategy; +import org.apache.http.nio.conn.SchemeIOSessionStrategy; +import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; +import org.apache.http.nio.reactor.ConnectingIOReactor; +import org.apache.http.nio.reactor.IOReactorException; + +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; +import java.nio.charset.CodingErrorAction; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.UnrecoverableKeyException; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; + +/** + * 异步的HTTP请求对象,可设置代理 + */ +public class HttpAsyncClient { + + /* + private static int socketTimeout = 60000;//设置等待数据超时时间60秒钟 根据业务调整 + private static int connectTimeout = 60000;//连接超时 + private static int poolSize = 5000;//连接池最大连接数 + private static int maxPerRoute = 2500;//每个主机的并发最多只有1500 + private static int connectionRequestTimeout = 90000; //从连接池中后去连接的timeout时间 + */ + private static int socketTimeout = RealtimeCountConfig.HTTP_ASYNC_SOCKETTIMEOUT;//设置等待数据超时时间60秒钟 根据业务调整 + private static int connectTimeout = RealtimeCountConfig.HTTP_ASYNC_CONNECTTIMEOUT;//连接超时 + private static int poolSize = RealtimeCountConfig.HTTP_ASYNC_POOLSIZE;//连接池最大连接数 + private static int maxPerRoute = RealtimeCountConfig.HTTP_ASYNC_MAXPERROUTE;//每个主机的并发最多只有1500 + private static int connectionRequestTimeout = RealtimeCountConfig.HTTP_ASYNC_CONNECTIONREQUESTTIMEOUT; //从连接池中后去连接的timeout时间 + + // http代理相关参数 + private String host = ""; + private int port = 0; + private String username = ""; + private String password = ""; + + // 异步httpclient + private CloseableHttpAsyncClient asyncHttpClient; + + // 异步加代理的httpclient + private CloseableHttpAsyncClient proxyAsyncHttpClient; + + public HttpAsyncClient() { + try { + this.asyncHttpClient = createAsyncClient(false); + this.proxyAsyncHttpClient = createAsyncClient(true); + } catch (Exception e) { + e.printStackTrace(); + } + + } + + /** + * 新版createAsyncClient(boolean proxy)---20200425注释 + * @param proxy + * @return + * @throws KeyManagementException + * @throws UnrecoverableKeyException + * @throws NoSuchAlgorithmException + * @throws KeyStoreException + * @throws MalformedChallengeException + * @throws IOReactorException + */ + public CloseableHttpAsyncClient createAsyncClient(boolean proxy) + throws KeyManagementException, UnrecoverableKeyException, + NoSuchAlgorithmException, KeyStoreException, + MalformedChallengeException, IOReactorException { + + RequestConfig requestConfig = RequestConfig.custom() + .setConnectionRequestTimeout(connectionRequestTimeout) + .setConnectTimeout(connectTimeout) + .setSocketTimeout(socketTimeout).build(); + + SSLContext sslcontext = SSLContext.getInstance(SSLConnectionSocketFactory.TLS); + X509TrustManager tm = new X509TrustManager() { + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { + } + + @Override + public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { + } + + @Override + public X509Certificate[] getAcceptedIssuers() { + return null; + } + }; + sslcontext.init(null, new TrustManager[]{tm}, null); + + + UsernamePasswordCredentials credentials = new UsernamePasswordCredentials( + username, password); + + CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, credentials); + + // 设置协议http和https对应的处理socket链接工厂的对象 + Registry sessionStrategyRegistry = RegistryBuilder + . create() + .register("http", NoopIOSessionStrategy.INSTANCE) + .register("https", new SSLIOSessionStrategy(sslcontext, NoopHostnameVerifier.INSTANCE)) + .build(); + + // 配置io线程 + IOReactorConfig ioReactorConfig = IOReactorConfig.custom().setSoKeepAlive(false).setTcpNoDelay(true) + .setIoThreadCount(Runtime.getRuntime().availableProcessors()) + .build(); + // 设置连接池大小 + ConnectingIOReactor ioReactor; + ioReactor = new DefaultConnectingIOReactor(ioReactorConfig); + PoolingNHttpClientConnectionManager conMgr = new PoolingNHttpClientConnectionManager( + ioReactor, null, sessionStrategyRegistry, null); + + if (poolSize > 0) { + conMgr.setMaxTotal(poolSize); + } + + if (maxPerRoute > 0) { + conMgr.setDefaultMaxPerRoute(maxPerRoute); + } else { + conMgr.setDefaultMaxPerRoute(10); + } + + ConnectionConfig connectionConfig = ConnectionConfig.custom() + .setMalformedInputAction(CodingErrorAction.IGNORE) + .setUnmappableInputAction(CodingErrorAction.IGNORE) + .setCharset(Consts.UTF_8).build(); + + Lookup authSchemeRegistry = RegistryBuilder + . create() + .register(AuthSchemes.BASIC, new BasicSchemeFactory()) + .register(AuthSchemes.DIGEST, new DigestSchemeFactory()) + .register(AuthSchemes.NTLM, new NTLMSchemeFactory()) + .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory()) + .register(AuthSchemes.KERBEROS, new KerberosSchemeFactory()) + .build(); + conMgr.setDefaultConnectionConfig(connectionConfig); + + if (proxy) { + return HttpAsyncClients.custom().setConnectionManager(conMgr) + .setDefaultCredentialsProvider(credentialsProvider) + .setDefaultAuthSchemeRegistry(authSchemeRegistry) + .setProxy(new HttpHost(host, port)) + .setDefaultCookieStore(new BasicCookieStore()) + .setDefaultRequestConfig(requestConfig).build(); + } else { + return HttpAsyncClients.custom().setConnectionManager(conMgr) + .setDefaultCredentialsProvider(credentialsProvider) + .setDefaultAuthSchemeRegistry(authSchemeRegistry) + .setDefaultCookieStore(new BasicCookieStore()).build(); + } + + } + + public CloseableHttpAsyncClient getAsyncHttpClient() { + return asyncHttpClient; + } + + public CloseableHttpAsyncClient getProxyAsyncHttpClient() { + return proxyAsyncHttpClient; + } +} \ No newline at end of file diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/HttpClientFactory.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/HttpClientFactory.java new file mode 100644 index 0000000..9535fbd --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/HttpClientFactory.java @@ -0,0 +1,38 @@ +package cn.ac.iie.cusflume.sink.HttpAsyncUtils; + +/** + * + * httpclient 工厂类 + * */ +public class HttpClientFactory { + + private static HttpAsyncClient httpAsyncClient = new HttpAsyncClient(); + + private static HttpSyncClient httpSyncClient = new HttpSyncClient(); + + private static OkClient okClient = new OkClient(); + + private HttpClientFactory() { + } + + private static HttpClientFactory httpClientFactory = new HttpClientFactory(); + + public static HttpClientFactory getInstance() { + + return httpClientFactory; + + } + + protected HttpAsyncClient getHttpAsyncClientPool() { + return httpAsyncClient; + } + + protected HttpSyncClient getHttpSyncClientPool() { + return httpSyncClient; + } + + protected OkClient getOkClientPool() { + return okClient; + } + +} \ No newline at end of file diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/HttpClientUtil.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/HttpClientUtil.java new file mode 100644 index 0000000..120c7dc --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/HttpClientUtil.java @@ -0,0 +1,1095 @@ +package cn.ac.iie.cusflume.sink.HttpAsyncUtils; + +import cn.ac.iie.cusflume.sink.CommonUtils.GetDataDictionaryCodeByTopicUtils; +import cn.ac.iie.cusflume.sink.CommonUtils.GetFilePathByTopicUtils; +import cn.ac.iie.cusflume.sink.HttpAsyncUtils.mail.AsyncPostMailFilesCallback; +import cn.ac.iie.cusflume.sink.HttpAsyncUtils.msgCallBack.AsyncPostMsgCallBack; +import cn.ac.iie.cusflume.sink.avroUtils.AvroMonitorTimerTask; +import cn.ac.iie.cusflume.sink.avroUtils.MD5Utils; +import cn.ac.iie.cusflume.sink.avroUtils.avroRecord.GetAvroRecordByTopicUtils; +import cn.ac.iie.cusflume.sink.avroUtils.avroSchema.GetAvroSchemaByTopicUtils; +import cn.ac.iie.cusflume.sink.daoUtils.RealtimeCountConfig; +import com.alibaba.fastjson.JSONObject; +import com.zdjizhi.utils.StringUtil; +import okhttp3.*; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.BinaryEncoder; +import org.apache.avro.io.DatumWriter; +import org.apache.avro.io.EncoderFactory; +import org.apache.avro.specific.SpecificDatumWriter; +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.client.ClientProtocolException; +import org.apache.http.client.entity.UrlEncodedFormEntity; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.http.concurrent.FutureCallback; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.BasicCookieStore; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; +import org.apache.http.message.BasicNameValuePair; +import org.apache.http.protocol.HTTP; +import org.apache.http.util.EntityUtils; +import org.apache.log4j.Logger; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.MalformedURLException; +import java.net.URI; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +/** + * http client 业务逻辑处理类 + * 执行请求发送 + */ +public class HttpClientUtil { + + private static Logger LOG = Logger.getLogger(HttpClientUtil.class); + + protected static ExecutorService pool = Executors.newFixedThreadPool(RealtimeCountConfig.HTTP_ASYNC_PARALLELISM);//线程池 + + private static String utf8Charset = "utf-8"; + + private static final MediaType JSON + = MediaType.parse("application/json; charset=utf-8"); + + /** + * 向指定的url发送一次post请求,参数是List + * + * @param baseUrl 请求地址 + * @param list 请求参数,格式是List + * @return 返回结果, 请求失败时返回null + * @apiNote http接口处用 @RequestParam接收参数 + */ + public static String httpSyncPost(String baseUrl, List list) { + + CloseableHttpClient httpClient = HttpClientFactory.getInstance().getHttpSyncClientPool().getHttpClient(); + HttpPost httpPost = new HttpPost(baseUrl); + + //Parameters + LOG.warn("==== Parameters ======" + list); + CloseableHttpResponse response = null; + try { + httpPost.setEntity(new UrlEncodedFormEntity(list)); +// httpPost.setHeader("Connection","close"); + response = httpClient.execute(httpPost); + LOG.warn("========HttpResponseProxy:========" + response.getStatusLine()); + HttpEntity entity = response.getEntity(); + String result = null; + if (entity != null) { + result = EntityUtils.toString(entity, "UTF-8"); + LOG.warn("========Response=======" + result); + } + EntityUtils.consume(entity); + return result; + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (response != null) { + try { + response.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + return null; + } + + /** + * 向指定的url发送一次post请求,参数是字符串 + * + * @param baseUrl 请求地址 + * @param postString 请求参数,格式是json.toString() + * @return 返回结果, 请求失败时返回null + * @apiNote http接口处用 @RequestBody接收参数 + */ + public static String httpSyncPost(String baseUrl, String postString) { + + CloseableHttpClient httpClient = HttpClientFactory.getInstance().getHttpSyncClientPool().getHttpClient(); + HttpPost httpPost = new HttpPost(baseUrl); + //parameters + LOG.warn("==== Parameters ======" + postString); + CloseableHttpResponse response = null; + try { + if (postString == null || "".equals(postString)) { + throw new Exception("missing post String"); + } + + StringEntity stringEntity = new StringEntity(postString.toString(), utf8Charset); + stringEntity.setContentEncoding("UTF-8"); + stringEntity.setContentType("application/json"); + httpPost.setEntity(stringEntity); + + response = httpClient.execute(httpPost); + LOG.warn("========HttpResponseProxy:========" + response.getStatusLine()); + HttpEntity entity = response.getEntity(); + String result = null; + if (entity != null) { + result = EntityUtils.toString(entity, "UTF-8"); + LOG.warn("========Response=======" + result); + } + EntityUtils.consume(entity); + return result; + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (response != null) { + try { + response.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + return null; + } + + + /** + * 向指定的url发送一次get请求,参数是List + * + * @param baseUrl 请求地址 + * @param list 请求参数,格式是List + * @return 返回结果, 请求失败时返回null + * @apiNote http接口处用 @RequestParam接收参数 + */ + public static String httpSyncGet(String baseUrl, List list) { + + CloseableHttpClient httpClient = HttpClientFactory.getInstance().getHttpSyncClientPool().getHttpClient(); + HttpGet httpGet = new HttpGet(baseUrl); + //Parameters + LOG.warn("==== Parameters ======" + list); + CloseableHttpResponse response = null; + try { + + if (list != null) { + String getUrl = EntityUtils + .toString(new UrlEncodedFormEntity(list)); + httpGet.setURI(new URI(httpGet.getURI().toString() + + "?" + getUrl)); + } else { + httpGet.setURI(new URI(httpGet.getURI().toString())); + } + + response = httpClient.execute(httpGet); + LOG.warn("========HttpResponseProxy:========" + response.getStatusLine()); + HttpEntity entity = response.getEntity(); + String result = null; + if (entity != null) { + result = EntityUtils.toString(entity, "UTF-8"); + LOG.warn("========Response=======" + result); + } + EntityUtils.consume(entity); + return result; + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (response != null) { + try { + response.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + return null; + } + + /** + * 向指定的url发送一次get请求,参数是字符串 + * + * @param baseUrl 请求地址 + * @param urlParams 请求参数,格式是String + * @return 返回结果, 请求失败时返回null + * @apiNote http接口处用 @RequestParam接收参数 + */ + public static String httpSyncGet(String baseUrl, String urlParams) { + + CloseableHttpClient httpClient = HttpClientFactory.getInstance().getHttpSyncClientPool().getHttpClient(); + HttpGet httpGet = new HttpGet(baseUrl); + //Parameters + LOG.warn("==== Parameters ======" + urlParams); + CloseableHttpResponse response = null; + try { + + if (null != urlParams || "".equals(urlParams)) { + + httpGet.setURI(new URI(httpGet.getURI().toString() + + "?" + urlParams)); + } else { + httpGet.setURI(new URI(httpGet.getURI().toString())); + } + + response = httpClient.execute(httpGet); + LOG.warn("========HttpResponseProxy:========" + response.getStatusLine()); + HttpEntity entity = response.getEntity(); + String result = null; + if (entity != null) { + result = EntityUtils.toString(entity, "UTF-8"); + LOG.warn("========Response=======" + result); + } + EntityUtils.consume(entity); + return result; + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (response != null) { + try { + response.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + return null; + } + + + /** + * 向指定的url发送一次get请求,参数是字符串 + * + * @param baseUrl 请求地址 + * @return 返回结果, 请求失败时返回null + * @apiNote http接口处用 @RequestParam接收参数 + */ + public static String httpSyncGet(String baseUrl) { + + CloseableHttpClient httpClient = HttpClientFactory.getInstance().getHttpSyncClientPool().getHttpClient(); + HttpGet httpGet = new HttpGet(baseUrl); + + CloseableHttpResponse response = null; + try { + httpGet.setURI(new URI(httpGet.getURI().toString())); + response = httpClient.execute(httpGet); + LOG.warn("========HttpResponseProxy:========" + response.getStatusLine()); + HttpEntity entity = response.getEntity(); + String result = null; + if (entity != null) { + result = EntityUtils.toString(entity, "UTF-8"); + LOG.warn("========Response=======" + result); + } + EntityUtils.consume(entity); + return result; + } catch (Exception e) { + e.printStackTrace(); + } finally { + if (response != null) { + try { + response.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + return null; + } + + + /** + * 向指定的url发送一次异步post请求,参数是字符串 + * + * @param baseUrl 请求地址 + * @param postString 请求参数,格式是json.toString() + * @param urlParams 请求参数,格式是String + * @param callback 回调方法,格式是FutureCallback + * @return 返回结果, 请求失败时返回null + * @apiNote http接口处用 @RequestParam接收参数 + */ + public static void httpAsyncPost(String baseUrl, String postString, + String urlParams, FutureCallback callback) throws Exception { + if (baseUrl == null || "".equals(baseUrl)) { + LOG.warn("we don't have base url, check config"); + throw new Exception("missing base url"); + } + CloseableHttpAsyncClient hc = HttpClientFactory.getInstance().getHttpAsyncClientPool() + .getAsyncHttpClient(); + try { + hc.start(); + HttpPost httpPost = new HttpPost(baseUrl); + +// httpPost.setHeader("Connection","close"); + + if (null != postString) { + LOG.debug("exeAsyncReq post postBody={" + postString + "}"); + StringEntity entity = new StringEntity(postString.toString(), utf8Charset); + entity.setContentEncoding("UTF-8"); + entity.setContentType("application/json"); + httpPost.setEntity(entity); + } + + if (null != urlParams) { + + httpPost.setURI(new URI(httpPost.getURI().toString() + + "?" + urlParams)); + } + + LOG.warn("exeAsyncReq getparams:" + httpPost.getURI()); + + hc.execute(httpPost, callback); + + } catch (Exception e) { + e.printStackTrace(); + } + } + + + /** + * 向指定的url发送一次异步post请求,参数是字符串 + * + * @param baseUrl 请求地址 + * @param urlParams 请求参数,格式是List + * @param callback 回调方法,格式是FutureCallback + * @return 返回结果, 请求失败时返回null + * @apiNote http接口处用 @RequestParam接收参数 + */ + public static void httpAsyncPost(String baseUrl, List postBody, + List urlParams, FutureCallback callback) throws Exception { + if (baseUrl == null || "".equals(baseUrl)) { + LOG.warn("we don't have base url, check config"); + throw new Exception("missing base url"); + } + + try { + CloseableHttpAsyncClient hc = HttpClientFactory.getInstance().getHttpAsyncClientPool() + .getAsyncHttpClient(); + + hc.start(); + + HttpPost httpPost = new HttpPost(baseUrl); + +// httpPost.setHeader("Connection","close"); + + if (null != postBody) { + LOG.debug("exeAsyncReq post postBody={" + postBody + "}"); + UrlEncodedFormEntity entity = new UrlEncodedFormEntity( + postBody, "UTF-8"); + httpPost.setEntity(entity); + } + + if (null != urlParams) { + + String getUrl = EntityUtils + .toString(new UrlEncodedFormEntity(urlParams)); + + httpPost.setURI(new URI(httpPost.getURI().toString() + + "?" + getUrl)); + } + + LOG.warn("exeAsyncReq getparams:" + httpPost.getURI()); + + + hc.execute(httpPost, callback); + + } catch (Exception e) { + e.printStackTrace(); + } + } + + /** + * 往总线post文件数据---------20191106-字节版本--回调包含标签-AsyncHttpClientPostFileCallback版本 + * 向指定的url发送一次异步put请求,参数是字符串--注意由于一部的需求特殊,这里用的put方法 + * + * @param baseUrl 请求地址 + * @param callback 回调方法,格式是FutureCallback + * @return 返回结果, 请求失败时返回null + * @apiNote http接口处用 @RequestParam接收参数 + */ + public static void httpAsyncPostFileToZx(String baseUrl, byte[] fileIs, AsyncHttpClientPostFileCallback callback) throws Exception { + if (baseUrl == null || "".equals(baseUrl)) { + LOG.warn("we don't have base url, check config"); + throw new Exception("missing base url"); + } + + HttpPut httpPut; + + CloseableHttpAsyncClient hc = null; + + try { + hc = HttpClientFactory.getInstance().getHttpAsyncClientPool().getAsyncHttpClient(); + hc.start(); + + HttpClientContext localContext = HttpClientContext.create(); + BasicCookieStore cookieStore = new BasicCookieStore(); + + httpPut = new HttpPut(baseUrl); + +// httpPost.setHeader("Transfer-Encoding","close"); +// httpPost.setHeader("Content-Type", "binary/octet-stream"); + httpPut.setHeader("Content-Type", "binary/octet-stream"); + + if (null != fileIs) { + httpPut.setEntity(new ByteArrayEntity(fileIs)); + } + LOG.info("File httpPut.getURI()===>>>" + httpPut.getURI()); + + localContext.setAttribute(HttpClientContext.COOKIE_STORE, cookieStore); + + hc.execute(httpPut, localContext, callback); + } catch (Exception e) { + e.printStackTrace(); + } + } + + /** + * 往总线post文件数据---------20191106-字节版本--回调包含标签-AsyncPostMailFilesCallback版本 + * 向指定的url发送一次异步put请求,参数是字符串--注意由于一部的需求特殊,这里用的put方法 + * + * @param baseUrl 请求地址 + * @param callback 回调方法,格式是FutureCallback + * @return 返回结果, 请求失败时返回null + * @apiNote http接口处用 @RequestParam接收参数 + */ + public static void httpAsyncPostFileToZx(String baseUrl, byte[] fileIs, AsyncPostMailFilesCallback callback) throws Exception { + if (baseUrl == null || "".equals(baseUrl)) { + LOG.warn("we don't have base url, check config"); + throw new Exception("missing base url"); + } + + HttpPut httpPut; + + CloseableHttpAsyncClient hc = null; + + try { + hc = HttpClientFactory.getInstance().getHttpAsyncClientPool().getAsyncHttpClient(); + hc.start(); + + HttpClientContext localContext = HttpClientContext.create(); + BasicCookieStore cookieStore = new BasicCookieStore(); + + httpPut = new HttpPut(baseUrl); + + +// httpPost.setHeader("Transfer-Encoding","close"); +// httpPost.setHeader("Content-Type", "binary/octet-stream"); + httpPut.setHeader("Content-Type", "binary/octet-stream"); + if (null != fileIs) { + httpPut.setEntity(new ByteArrayEntity(fileIs)); + } + LOG.info("File httpPut.getURI()===>>>" + httpPut.getURI()); + + localContext.setAttribute(HttpClientContext.COOKIE_STORE, cookieStore); + + hc.execute(httpPut, localContext, callback); + } catch (Exception e) { + e.printStackTrace(); + } +// return callback; + } + + /** + * 向指定的url发送一次异步get请求,参数是String + * 从minio异步获取文件,请求方式为GET + * 获取文件成功后将其POST至ZX获取文件ID + * 最后将ID补全入信息 + * + * @param baseUrl 请求地址 + * @param callback 回调方法,格式是FutureCallback + * @return 返回结果, 请求失败时返回null + * @apiNote http接口处用 @RequestParam接收参数 + */ + public static void httpAsyncGetFile(String baseUrl, FutureCallback callback) throws Exception { + + if (baseUrl == null || "".equals(baseUrl)) { + LOG.warn("we don't have base url, check config"); + throw new Exception("missing base url"); + } + CloseableHttpAsyncClient hc = HttpClientFactory.getInstance().getHttpAsyncClientPool().getAsyncHttpClient(); + try { + hc.start(); + HttpGet httpGet = new HttpGet(baseUrl); + +// httpGet.setHeader("Connection", "close");//关闭长连接-20191204新增 + httpGet.setHeader(HTTP.CONN_DIRECTIVE, HTTP.CONN_CLOSE);//关闭长连接-20191204新增-使用字符代替 + +// if (null != urlParams || "".equals(urlParams)) { +// +// httpGet.setURI(new URI(httpGet.getURI().toString() +// + "?" + urlParams)); +// } else { +// httpGet.setURI(new URI(httpGet.getURI().toString())); +// } + +// LOG.info("exeAsyncReq getparams:" + httpGet.getURI()); + LOG.info("File httpGet.getURI()===>>>" + httpGet.getURI()); + hc.execute(httpGet, callback); + } catch (Exception e) { + e.printStackTrace(); + } +// Thread.sleep(50); +// return callback.getAsyncHttpClientPostFileCallback(); +// return null; + } + + /** + * 向指定的url发送一次异步get请求,参数是String + * + * @param baseUrl 请求地址 + * @param urlParams 请求参数,格式是String + * @param callback 回调方法,格式是FutureCallback + * @return 返回结果, 请求失败时返回null + * @apiNote http接口处用 @RequestParam接收参数 + */ + public static void httpAsyncGet(String baseUrl, String urlParams, FutureCallback callback) throws Exception { + + if (baseUrl == null || "".equals(baseUrl)) { + LOG.warn("we don't have base url, check config"); + throw new Exception("missing base url"); + } + CloseableHttpAsyncClient hc = HttpClientFactory.getInstance().getHttpAsyncClientPool() + .getAsyncHttpClient(); + try { + + + hc.start(); + + HttpGet httpGet = new HttpGet(baseUrl); + +// httpGet.setHeader("Connection","close"); + + if (null != urlParams || "".equals(urlParams)) { + + httpGet.setURI(new URI(httpGet.getURI().toString() + + "?" + urlParams)); + } else { + httpGet.setURI(new URI(httpGet.getURI().toString())); + } + + LOG.warn("exeAsyncReq getparams:" + httpGet.getURI()); + + + hc.execute(httpGet, callback); + + } catch (Exception e) { + e.printStackTrace(); + } + + } + + + /** + * 向指定的url发送一次异步get请求,参数是List + * + * @param baseUrl 请求地址 + * @param urlParams 请求参数,格式是List + * @param callback 回调方法,格式是FutureCallback + * @return 返回结果, 请求失败时返回null + * @apiNote http接口处用 @RequestParam接收参数 + */ + public static void httpAsyncGet(String baseUrl, List urlParams, FutureCallback callback) throws Exception { + if (baseUrl == null || "".equals(baseUrl)) { + LOG.warn("we don't have base url, check config"); + throw new Exception("missing base url"); + } + + try { + CloseableHttpAsyncClient hc = HttpClientFactory.getInstance().getHttpAsyncClientPool() + .getAsyncHttpClient(); + + hc.start(); + + HttpPost httpGet = new HttpPost(baseUrl); + +// httpGet.setHeader("Connection","close"); + + if (null != urlParams) { + + String getUrl = EntityUtils + .toString(new UrlEncodedFormEntity(urlParams)); + + httpGet.setURI(new URI(httpGet.getURI().toString() + + "?" + getUrl)); + } + + LOG.warn("exeAsyncReq getparams:" + httpGet.getURI()); + + + hc.execute(httpGet, callback); + + } catch (Exception e) { + e.printStackTrace(); + } + } + + + public static String OkSyncPost(String url, String json) throws IOException { + + OkHttpClient okClient = HttpClientFactory.getInstance().getOkClientPool().getHttpClient(); + + RequestBody body = RequestBody.create(JSON, json); + Request request = new Request.Builder() + .url(url) + .post(body) + .build(); + try (Response response = okClient.newCall(request).execute()) { + + return response.body().string(); + } + } + + public static void OkAsyncPost(String url, String json) throws IOException { + OkHttpClient okClient = HttpClientFactory.getInstance().getOkClientPool().getHttpClient(); + + RequestBody body = RequestBody.create(JSON, json); + Request request = new Request.Builder() + .url(url) + .post(body) + .build(); + Call call = okClient.newCall(request); + call.enqueue(new Callback() { + @Override + public void onFailure(Call call, IOException e) { + e.printStackTrace(); + } + + @Override + public void onResponse(Call call, Response response) throws IOException { + + LOG.warn("OkAsyncPost回调:" + response.body().string()); + } + }); + + } + + + public static void OkAsyncPost(String url, Map map) throws IOException { + OkHttpClient okClient = HttpClientFactory.getInstance().getOkClientPool().getHttpClient(); + + FormBody.Builder formBodyBuilder = new FormBody.Builder(); + for (Map.Entry entry : map.entrySet()) { + formBodyBuilder.add(entry.getKey(), entry.getValue()); + } + Request request = new Request.Builder() + .url(url) + .post(formBodyBuilder.build()) + .build(); + Call call = okClient.newCall(request); + call.enqueue(new Callback() { + @Override + public void onFailure(Call call, IOException e) { + e.printStackTrace(); + } + + @Override + public void onResponse(Call call, Response response) throws IOException { + + LOG.warn("OkAsyncPost回调:" + response.body().string()); + } + }); + + } + + public static void OkAsyncPost(String url, Map map, Callback callback) throws IOException { + OkHttpClient okClient = HttpClientFactory.getInstance().getOkClientPool().getHttpClient(); + + FormBody.Builder formBodyBuilder = new FormBody.Builder(); + for (Map.Entry entry : map.entrySet()) { + formBodyBuilder.add(entry.getKey(), entry.getValue()); + } + + Request request = new Request.Builder() + .url(url) + .post(formBodyBuilder.build()) + .build(); + Call call = okClient.newCall(request); + call.enqueue(callback); + + } + + public static String OkSyncGet(String url) throws IOException { + + OkHttpClient okClient = HttpClientFactory.getInstance().getOkClientPool().getHttpClient(); + + Request request = new Request.Builder() + .url(url) + .build(); + try (Response response = okClient.newCall(request).execute()) { + + return response.body().string(); + } + } + + public static void OkAsyncGet(String url) throws IOException { + + OkHttpClient okClient = HttpClientFactory.getInstance().getOkClientPool().getHttpClient(); + + Request request = new Request.Builder() + .url(url) + .build(); + Call call = okClient.newCall(request); + call.enqueue(new Callback() { + @Override + public void onFailure(Call call, IOException e) { + e.printStackTrace(); + } + + @Override + public void onResponse(Call call, Response response) throws IOException { + + LOG.warn("OkAsyncGet回调:" + response.body().string()); + } + }); + } + + + /** + * 获取单个文件并传递单个文件时使用 + * + * @param getFileUrl + * @param callback + */ + public static void getFileAndPostFile(String getFileUrl, FutureCallback callback) { + try { + pool.execute(new Runnable() { + @Override + public void run() { + try { + httpAsyncGetFile(getFileUrl, callback); +// LOG.info(Thread.currentThread().getName() + "===>run====>>>" + "success,now time is" + System.currentTimeMillis()); + } catch (Exception e) { + LOG.error("getFileAndPostFile multithreading is error===>" + e); + } + } + }); + } catch (Exception e) { + LOG.error("getFileAndPostFile method is error===>" + e); + } + } + + /** + * 生产AVRO数据入ZX(单条)--数据不包含schema + * 静态,适用于异步与多线程的版本 + * + * @param urlProducer + * @param topic + * @param dataJson + * @param userAgent + * @param msgSessionCookie + * @return + */ + public static void asyncProducerAvroToZX(String urlProducer, String topic, String dataJson, String userAgent, String msgSessionCookie, int count) { + HttpPost httpPost = null; + urlProducer = urlProducer.trim(); + byte[] resultArray = null; + CloseableHttpAsyncClient httpClient = null; + try { + httpClient = HttpClientFactory.getInstance().getHttpAsyncClientPool().getAsyncHttpClient(); + httpClient.start(); + // get schema + Schema schemaDataAvro = getSchemaFromHashMap(topic);//状态数据对象monitor-msg(一部状态数据对象)和INFLUX-SAPP-BPS-STAT-LOG(广东状态数据对象)会走此路 + ByteArrayOutputStream outAvro = new ByteArrayOutputStream(); + BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outAvro, null); + DatumWriter fileWriter = new SpecificDatumWriter(schemaDataAvro); + outAvro.reset(); + + GenericRecord recordAvro = new GenericData.Record(schemaDataAvro); + recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(topic, recordAvro, dataJson, schemaDataAvro);//单条,主数据 + logger.info("封装数据对象或文件标签AvroRecord后数据为===>" + recordAvro.toString() + "<===封装数据对象或文件标签AvroRecord后数据为"); + fileWriter.write(recordAvro, encoder); + + if (encoder != null) { + encoder.flush(); + } + + if (outAvro != null) { + resultArray = outAvro.toByteArray(); + + outAvro.flush(); + outAvro.close(); + outAvro = null; + } + logger.info("数据对象或文件标签resultArray长度为:" + resultArray.length); + logger.debug("原始数据==>" + dataJson + "<==," + + "原始数据checksum==>" + MD5Utils.md5Encode(dataJson) + "<==," + + "数据对象或文件标签resultArray长度为:" + resultArray.length + "," + + "数据对象或文件标签字节数组:" + Arrays.toString(resultArray)); + + if (!("NTC-COLLECT-FILE-LOG".equals(topic))) { + //传入的resultArray为数据对象的数组,返回的是拼接的整体的数组 + resultArray = getTagRecordAndMergeAllArray(topic, dataJson, resultArray); + } else { + //说明为NTC-COLLECT-FILE-LOG,此时不需要进行d_tag的获取,因为数据里不包含d_tag,数据本身就是d_tag,前面处理的resultArray就作为NTC-COLLECT-FILE-LOG的d_tag使用 + resultArray = alreadyGetFileTagRecordSoOnlyGetMergeAllArray(topic, resultArray); + } + + httpPost = new HttpPost(urlProducer); + // set header + httpPost.addHeader("User-Agent", userAgent); + + httpPost.setHeader(HTTP.CONN_DIRECTIVE, HTTP.CONN_CLOSE);//关闭长连接-20191225新增-使用字符代替 + //一部状态回传需要此Cookie打开 +// if ("monitor-msg".equals(topic)) { +// httpPost.addHeader("Cookie", msgSessionCookie);//广东文件消息测试时加上Cookie会验证不通过,即那边显示为两个Cookie,不加Cookie则验证通过 +// } + + httpPost.addHeader("Cookie", msgSessionCookie);//不设置Cookie时,广东测试出现报错,打开广东的Cookie设置测试一下,经测试,可用 + + try { + String md5Avro = MD5Utils.md5Encode(resultArray); + httpPost.addHeader("Checksum", md5Avro); + logger.info("请求端Checksum MD5 avro 加密为:" + md5Avro); + logger.debug("原始数据==>" + dataJson + "<==," + + "原始数据checksum==>" + MD5Utils.md5Encode(dataJson) + "<==," + + "对应请求端Checksum MD5 avro 加密为:" + md5Avro);//20200521新增,自证清白 + } catch (Exception e) { + logger.error("MD5Utils.md5Encode Method is error===>>> " + e); + } +// httpPost.addHeader("Content-Type", "application/avro+json;charset=UTF-8"); + httpPost.addHeader("Content-Type", "binary/octet-stream"); + + //设置调度标签要求的参数,文件消息需要设置FilePath--------------------↓--------------------20191210新增 + httpPost.addHeader("X-Tag", getXTAG(dataJson, topic));//从json中获取-单条 + + String filePathByTopicName = GetFilePathByTopicUtils.getFilePathByTopicName(dataJson, topic); + if (!("-".equals(filePathByTopicName)) && !("[\"null\"]".equals(filePathByTopicName))) { + httpPost.addHeader("FilePath", filePathByTopicName);//从json中获取 + } + //设置调度标签要求的参数,文件消息需要设置FilePath-------------------↑---------------------20191210新增 + + ByteArrayEntity payload = new ByteArrayEntity(resultArray); + payload.setContentEncoding("utf-8"); + //payload.setContentType("text/xml; charset=UTF-8"); + // anti avro + httpPost.setEntity(payload); + + logger.info("最终加载内容字节数组长度: " + resultArray.length); +// logger.debug("封装数据==>" + dataJson + "<==最终加载内容字节数组长度: " + resultArray.length);//20200428进一步细化日志 + logger.debug("原始数据==>" + dataJson + "<==," + + "原始数据checksum==>" + MD5Utils.md5Encode(dataJson) + "<==," + + "最终加载内容字节数组长度: " + resultArray.length + "," + + "最终加载内容字节数组:" + Arrays.toString(resultArray));//20200521进一步细化日志 + + //执行请求 + AsyncPostMsgCallBack asyncPostMsgCallBack = new AsyncPostMsgCallBack(urlProducer, topic, dataJson, userAgent, msgSessionCookie, count); + httpClient.execute(httpPost, asyncPostMsgCallBack); + + } catch (MalformedURLException e) { + //执行URL url = new URL()的异常 + e.printStackTrace(); + } catch (ClientProtocolException e) { + // 执行httpClient.execute(httpGet)的异常 + e.printStackTrace(); + } catch (IOException e) { + // 执行httpClient.execute(httpGet)的异常 + logger.error("asyncProducerAvroToZX is IOException===>>>" + e.getMessage() + "<<<===IOException Message is==>" + dataJson + "<==");//进一步细化日志 + e.printStackTrace(); + } catch (Exception e) { + //handle response here... try other servers + logger.error("asyncProducerAvroToZX is Exception===>>>" + e.getMessage() + "<<<===Exception Message is==>" + dataJson + "<==");//进一步细化日志 + e.printStackTrace(); + } + } + + /** + * 使用hm缓存Schema + * + * @param topic + * @return + */ + private static Schema getSchemaFromHashMap(String topic) { + if (schemaHashMap.containsKey(topic)) { + return schemaHashMap.get(topic); + } else { + String schemaStr = GetAvroSchemaByTopicUtils.getAvroSchemaByTopicName(topic); + Schema parseSchema = new Schema.Parser().parse(schemaStr); + schemaHashMap.put(topic, parseSchema); + return parseSchema; + } + } + + private static HashMap schemaHashMap = new HashMap();//用于存放Schema + private static Logger logger = Logger.getLogger(HttpClientUtil.class); + + /** + * 获取数据中的日志标签并将所有相关数据字节数组化后拼接返回 + * + * @param topic + * @param dataJson + * @param dataResultArray + * @return + */ + private static byte[] getTagRecordAndMergeAllArray(String topic, String dataJson, byte[] dataResultArray) { + byte[] dTagByteArray = null;//用于存放数据标签的avro的二进制流 + String tagTopicName = "log-tag";//除NTC-COLLECT-FILE-LOG外都是log-tag,因为都是在文件消息内部的d_tag获取的 + if ("monitor-msg".equals(topic) || "INFLUX-SAPP-BPS-STAT-LOG".equals(topic)) {//20200113暂定,一部和广东共用状态数据标签d_tag,但数据对象不同 + tagTopicName = "status-tag"; + } + try { + // get schema + Schema schemaDTagAvro = getSchemaFromHashMap(tagTopicName);//使用缓存优化性能 + ByteArrayOutputStream outAvro = new ByteArrayOutputStream(); + BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outAvro, null); + DatumWriter fileWriter = new SpecificDatumWriter(schemaDTagAvro); + outAvro.reset(); + + GenericRecord recordAvro = new GenericData.Record(schemaDTagAvro); + + //以下为临时,为了增加一个状态的临时d_tag,因为d_tag前端目前(20191222)传不了-----------↓-------------- + if (!("monitor-msg".equals(topic)) && !("INFLUX-SAPP-BPS-STAT-LOG".equals(topic))) {//20200113暂定,一部和广东共用状态数据标签d_tag,但数据对象不同 + //说明当前消息不是状态回传 + Map map = JSONObject.parseObject(dataJson, Map.class); + Object d_tag = map.get("d_tag"); + if (d_tag != null) { + dataJson = d_tag.toString();//获取消息上的d_tag用于序列化成数据标签 + } + } else { + //说明为一部状态,目前前端没有,所以先自己创建一个状态的d_tag---用于一部;//20200113暂定,一部和广东共用状态数据标签d_tag,但数据对象不同 + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + String timeFormat = sdf.format(new Date(System.currentTimeMillis()));//格式2019-11-18 19:56:37 + dataJson = "{\"tag_version\":\"1.0\",\"data_subtype\":16387,\"data_type\":4,\"producer_id\":24832,\"timestamp\":\"yyyy-MM-dd HH:mm:ss\"}"; + Map map = JSONObject.parseObject(dataJson, Map.class); + map.put("timestamp", timeFormat); + dataJson = JSONObject.toJSONString(map); + } + //以下为临时,为了增加一个状态的临时d_tag,因为d_tag前端目前(20191222)传不了-----------↑-------------- + + recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(tagTopicName, recordAvro, dataJson, schemaDTagAvro);//单条-"log-tag"或者"status-tag",直接传入Schema + logger.info("封装日志标签或状态标签AvroRecord后数据为===>" + recordAvro.toString() + "<===封装日志标签或状态标签AvroRecord后数据为"); + fileWriter.write(recordAvro, encoder); + + if (encoder != null) { + encoder.flush(); + } + + if (outAvro != null) { + dTagByteArray = outAvro.toByteArray(); + + outAvro.flush(); + outAvro.close(); + outAvro = null; + } + logger.info("日志标签或状态标签dTagByteArray长度为:" + dTagByteArray.length); + logger.debug("标签数据==>" + dataJson + "<==," + + "标签数据checksum==>" + MD5Utils.md5Encode(dataJson) + "<==," + + "日志标签或状态标签dTagByteArray长度为:" + dTagByteArray.length + "," + + "日志标签或状态标签字节数组:" + Arrays.toString(dTagByteArray));//20200521进一步细化日志 + + //获取数据对象类型编码,写成字节数组 + byte[] dataObjectTypeByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataObjectTypeCodeByTopicName(topic), 2);//长度为2 + + //将数据标签长度,写成字节数组 + byte[] dataTagLengthByteArray = sumHex(dTagByteArray.length, 2);//长度为2 + + //数据总长度(16+数据标签长度+数据对象长度),写成字节数组 + byte[] dataSumLengthByteArray = sumHex(16 + dTagByteArray.length + dataResultArray.length, 4);//长度为4 + + //数据标签SchemaID,写成字节数组 + byte[] dataTagSchemaIDByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataTagSchemaIDByTopicName(topic), 4);//长度为4 + + //数据对象SchemaID.写成字节数组 + byte[] dataObjectSchemaIDByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataObjectSchemaIDByTopicName(topic), 4);//长度为4 + + //拼接这些字节数组并返回,顺序:数据对象类型+数据标签长度+数据总长度+数据标签SchemaID+数据对象SchemaID+数据标签数组+数据对象数组 + return byteMerger(dataObjectTypeByteArray, dataTagLengthByteArray, dataSumLengthByteArray, dataTagSchemaIDByteArray, dataObjectSchemaIDByteArray, dTagByteArray, dataResultArray); + } catch (Exception e) { + logger.error("HttpClientUtil getTagRecordAndMergeAllArray is error===>" + e.getMessage() + "<===message is==>" + dataJson + "<==");//细化日志 +// e.printStackTrace(); + return dataResultArray;//报错时返回数据对象数组,即本身不变 + } + } + + /** + * 由于已经获取了文件标签(是刚进入方法就作为数据对象处理的数据标签d_tag,因为NTC-COLLECT-FILE-LOG数据本身就是数据标签d_tag),所以只需要将所有相关数据字节数组化后拼接返回 + */ + private static byte[] alreadyGetFileTagRecordSoOnlyGetMergeAllArray(String topic, byte[] dataResultArray) { + try { + //获取数据对象类型编码,写成字节数组 + byte[] dataObjectTypeByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataObjectTypeCodeByTopicName(topic), 2);//长度为2 + + //将数据标签长度,写成字节数组 + byte[] dataTagLengthByteArray = sumHex(dataResultArray.length, 2);//长度为2,由于是NTC-COLLECT-FILE-LOG.传来的数据对象dataResultArray其实就是最终上报数据的数据标签d_tag + + //数据总长度(16+数据标签长度+数据对象长度),写成字节数组 + byte[] dataSumLengthByteArray = sumHex(16 + dataResultArray.length, 4);//长度为4 + + //数据标签SchemaID,写成字节数组 + byte[] dataTagSchemaIDByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataTagSchemaIDByTopicName(topic), 4);//长度为4 + + //数据对象SchemaID.写成字节数组 + byte[] dataObjectSchemaIDByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataObjectSchemaIDByTopicName(topic), 4);//长度为4 + + //拼接这些字节数组,顺序:数据对象类型+数据标签长度+数据总长度+数据标签SchemaID+数据对象SchemaID+数据标签数组,注意没有数据对象数组 + return byteMerger(dataObjectTypeByteArray, dataTagLengthByteArray, dataSumLengthByteArray, dataTagSchemaIDByteArray, dataObjectSchemaIDByteArray, dataResultArray); + } catch (Exception e) { + logger.error("HttpClientUtil alreadyGetFileTagRecordSoOnlyGetMergeAllArray is error===>" + e.getMessage() + "<===");//细化日志 +// e.printStackTrace(); + return dataResultArray;//报错时返回数据对象数组,即本身不变 + } + } + + /** + * 描述:把一个整数转为指定位数byte数组 + * + * @param tu5 数字 + * @param length 位数 + * @return byte[] + */ + public static byte[] sumHex(int tu5, int length) { + byte[] bytes5 = new byte[length]; + while (length > 0) { + length--; +// bytes5[length] = (byte)(tu5 >> 8*(bytes5.length-length-1) & 0xFF);//大端 + bytes5[bytes5.length - length - 1] = (byte) (tu5 >> 8 * (bytes5.length - length - 1) & 0xFF);//小端 + } + return bytes5; + } + + /** + * 合并字节数组 + * + * @param byteList + * @return + */ + private static byte[] byteMerger(byte[]... byteList) { + int lengthByte = 0; + for (int i = 0; i < byteList.length; i++) { + lengthByte += byteList[i].length; + } + byte[] allByte = new byte[lengthByte]; + int countLength = 0; + for (int i = 0; i < byteList.length; i++) { + byte[] b = byteList[i]; + System.arraycopy(b, 0, allByte, countLength, b.length); + countLength += b.length; + } + return allByte; + } + + /** + * 获取X-Tag用于设置请求头 + * + * @param dataJson + * @return + */ + private static String getXTAG(String dataJson, String topic) { + if ("monitor-msg".equals(topic) || "INFLUX-SAPP-BPS-STAT-LOG".equals(topic)) { + return RealtimeCountConfig.MONITOR_NOFILE_MSG_X_TAG; + } + + Map map = JSONObject.parseObject(dataJson, Map.class); +// String xTag = (String) map.get("x_tag"); + Object x_tag = map.get("x_tag"); + if (x_tag != null) { + String xTag = x_tag.toString(); + if (StringUtil.isNotBlank(xTag)) { + return xTag; + } else { + return "-"; + } + } else { + return "-"; + } + } + +} \ No newline at end of file diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/HttpSyncClient.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/HttpSyncClient.java new file mode 100644 index 0000000..2dbfa1d --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/HttpSyncClient.java @@ -0,0 +1,82 @@ +package cn.ac.iie.cusflume.sink.HttpAsyncUtils; + +import org.apache.http.client.config.RequestConfig; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.config.SocketConfig; +import org.apache.http.conn.socket.ConnectionSocketFactory; +import org.apache.http.conn.socket.PlainConnectionSocketFactory; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.conn.ssl.TrustSelfSignedStrategy; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.ssl.SSLContexts; +import org.apache.log4j.Logger; + +import javax.net.ssl.HostnameVerifier; +import javax.net.ssl.SSLContext; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; + +/** + * 同步的HTTP请求对象,支持post与get方法以及可设置代理 + */ +public class HttpSyncClient { + + private static Logger logger = Logger.getLogger(HttpSyncClient.class); + + + private PoolingHttpClientConnectionManager poolConnManager; + private final int maxTotalPool = 2000;// 连接池最大连接数 + private final int maxConPerRoute = 200;// 每个主机的并发最多只有20 + private final int socketTimeout = 20000;// 设置等待数据超时时间5秒钟 根据业务调整 + private final int connectionRequestTimeout = 30000; + private final int connectTimeout = 20000;// 连接超时 + + // 同步httpclient + private CloseableHttpClient httpClient; + + public HttpSyncClient() { + try { + this.httpClient = init(); + } catch (Exception e) { + e.printStackTrace(); + } + } + + public CloseableHttpClient init() throws KeyStoreException, NoSuchAlgorithmException, KeyManagementException { + + SSLContext sslcontext = SSLContexts.custom().loadTrustMaterial(null, + new TrustSelfSignedStrategy()) + .build(); + HostnameVerifier hostnameVerifier = SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER; + SSLConnectionSocketFactory sslsf = new SSLConnectionSocketFactory( + sslcontext, hostnameVerifier); + Registry socketFactoryRegistry = RegistryBuilder.create() + .register("http", PlainConnectionSocketFactory.getSocketFactory()) + .register("https", sslsf) + .build(); + poolConnManager = new PoolingHttpClientConnectionManager(socketFactoryRegistry); + // Increase max total connection to 200 + poolConnManager.setMaxTotal(maxTotalPool); + // Increase default max connection per route to 20 + poolConnManager.setDefaultMaxPerRoute(maxConPerRoute); + SocketConfig socketConfig = SocketConfig.custom().setSoTimeout(socketTimeout).build(); + poolConnManager.setDefaultSocketConfig(socketConfig); + + RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(connectionRequestTimeout) + .setConnectTimeout(connectTimeout).setSocketTimeout(socketTimeout).build(); + CloseableHttpClient httpClient = HttpClients.custom() + .setConnectionManager(poolConnManager).setDefaultRequestConfig(requestConfig).build(); + if (poolConnManager != null && poolConnManager.getTotalStats() != null) { + logger.info("now client pool " + poolConnManager.getTotalStats().toString()); + } + return httpClient; + } + + public CloseableHttpClient getHttpClient() { + return httpClient; + } +} \ No newline at end of file diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/OkClient.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/OkClient.java new file mode 100644 index 0000000..d0c83de --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/OkClient.java @@ -0,0 +1,27 @@ +package cn.ac.iie.cusflume.sink.HttpAsyncUtils; + +import okhttp3.OkHttpClient; + +public class OkClient { + + private static OkHttpClient client = new OkHttpClient(); + + public OkHttpClient getHttpClient(){ + return client; + } + + String bowlingJson(String player1, String player2) { + return "{'winCondition':'HIGH_SCORE'," + + "'name':'Bowling'," + + "'round':4," + + "'lastSaved':1367702411696," + + "'dateStarted':1367702378785," + + "'players':[" + + "{'name':'" + player1 + "','history':[10,8,6,7,8],'color':-13388315,'total':39}," + + "{'name':'" + player2 + "','history':[6,10,5,10,10],'color':-48060,'total':41}" + + "]}"; + } + + + +} \ No newline at end of file diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/mail/AsyncGetMailFilesCallback.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/mail/AsyncGetMailFilesCallback.java new file mode 100644 index 0000000..9414163 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/mail/AsyncGetMailFilesCallback.java @@ -0,0 +1,364 @@ +package cn.ac.iie.cusflume.sink.HttpAsyncUtils.mail; + +import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil; +import cn.ac.iie.cusflume.sink.avroUtils.AvroMonitorTimerTask; +import cn.ac.iie.cusflume.sink.bean.configBean.ConfigInfo; +import cn.ac.iie.cusflume.sink.daoUtils.KafkaDB; +import com.alibaba.fastjson.JSONObject; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.ParseException; +import org.apache.http.client.utils.HttpClientUtils; +import org.apache.http.concurrent.FutureCallback; +import org.apache.http.util.EntityUtils; +import org.apache.log4j.Logger; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + +/** + * 用于处理 NTC_COLLECT_MAIL_LOG + * 异步httpclient回调对象---GET文件 + * 执行成功,开始将get到的文件post至zx + * 执行失败,重试3次 + */ +public class AsyncGetMailFilesCallback implements FutureCallback { + private static Logger LOG = Logger.getLogger(AsyncGetMailFilesCallback.class); + + private KafkaDB kafkaDBAsyncGet; + + private String postFileUrl; + private String postMsgUrl; + private String getFileUrl;//此处初始用于存放eml_file_url,多文件的url存放在attachmentsUrl,先处理eml_file_url成功后再处理attachmentsUrl + private AsyncPostMailFilesCallback asyncPostMailFilesCallback; + private ConfigInfo configInfo; + private String sendMsg;//用于get文件失败后发往kafka + private int count;//重试次数,初始为0 + + //为MAIL新增 + private int dealUrlCount;//已处理计数,初始为0 + private int urlCount;//需要处理的总数 + private LinkedList attachmentsUrl;//需要处理的attachmentsUrl + + //初始调用时count=0,dealUrlCount=0,urlCount=1+attachmentsUrl.size() + //后续调用count=0,dealUrlCount=0按照情况增加,urlCount不变 + public AsyncGetMailFilesCallback(ConfigInfo configInfo, String getFileUrl, String sendMsg, AsyncPostMailFilesCallback asyncPostMailFilesCallback, int count, int urlCount, int dealUrlCount, LinkedList attachmentsUrl) { + this.configInfo = configInfo; + this.postFileUrl = configInfo.getPostFileUrl();//通过configInfo赋值 + this.postMsgUrl = configInfo.getPostMsgUrl();//通过configInfo赋值 + this.getFileUrl = getFileUrl;//初次存储的是eml_file_url,后续每次存储的都是当次需要请求的文件路径 + this.sendMsg = sendMsg; + this.asyncPostMailFilesCallback = asyncPostMailFilesCallback; + this.count = count;//建议初始为0 + this.urlCount = urlCount; + this.dealUrlCount = dealUrlCount;//建议初始为0 + this.attachmentsUrl = attachmentsUrl; + +// //初始化入kafka程序 +// kafkaDBAsyncGet = KafkaDB.getInstance(); + } + + public String getPostFileUrl() { + return postFileUrl; + } + + public void setPostFileUrl(String postFileUrl) { + this.postFileUrl = postFileUrl; + } + + public String getPostMsgUrl() { + return postMsgUrl; + } + + public void setPostMsgUrl(String postMsgUrl) { + this.postMsgUrl = postMsgUrl; + } + + public String getGetFileUrl() { + return getFileUrl; + } + + public void setGetFileUrl(String getFileUrl) { + this.getFileUrl = getFileUrl; + } + + public AsyncPostMailFilesCallback getAsyncPostMailFilesCallback() { + return asyncPostMailFilesCallback; + } + + public void setAsyncPostMailFilesCallback(AsyncPostMailFilesCallback asyncPostMailFilesCallback) { + this.asyncPostMailFilesCallback = asyncPostMailFilesCallback; + } + + public ConfigInfo getConfigInfo() { + return configInfo; + } + + public void setConfigInfo(ConfigInfo configInfo) { + this.configInfo = configInfo; + } + + public String getSendMsg() { + return sendMsg; + } + + public void setSendMsg(String sendMsg) { + this.sendMsg = sendMsg; + } + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } + + public int getDealUrlCount() { + return dealUrlCount; + } + + public void setDealUrlCount(int dealUrlCount) { + this.dealUrlCount = dealUrlCount; + } + + public int getUrlCount() { + return urlCount; + } + + public void setUrlCount(int urlCount) { + this.urlCount = urlCount; + } + + public LinkedList getAttachmentsUrl() { + return attachmentsUrl; + } + + public void setAttachmentsUrl(LinkedList attachmentsUrl) { + this.attachmentsUrl = attachmentsUrl; + } + + @Override + public String toString() { + return "AsyncGetMailFilesCallback{" + + "kafkaDBAsyncGet=" + kafkaDBAsyncGet + + ", postFileUrl='" + postFileUrl + '\'' + + ", postMsgUrl='" + postMsgUrl + '\'' + + ", getFileUrl='" + getFileUrl + '\'' + + ", asyncPostMailFilesCallback=" + asyncPostMailFilesCallback + + ", configInfo=" + configInfo + + ", sendMsg='" + sendMsg + '\'' + + ", count=" + count + + ", dealUrlCount=" + dealUrlCount + + ", urlCount=" + urlCount + + ", attachmentsUrl=" + attachmentsUrl + + '}'; + } + + /** + * 请求完成后调用该函数 + */ + @Override + public void completed(HttpResponse response) { + try { +// LOG.warn("response:{}", getHttpContent(response)); + //拉取文件成功后,将文件转为流 +// InputStream result = IOUtils.toBufferedInputStream(response.getEntity().getContent()); + //新版使用字节数组 + byte[] result = IOUtils.toByteArray(response.getEntity().getContent()); + int statusCode = response.getStatusLine().getStatusCode(); + if (statusCode == 200) { + dealUrlCount++; +// asyncPostMailFilesCallback.setResultIs(result);//添加获取的文件流,用于post文件失败时重试使用--------老版 + if (dealUrlCount >= 2) {//说明拉取的是attachments的file +// LinkedList attachmentsResultIsList = asyncPostMailFilesCallback.getAttachmentsResultIsList(); + LinkedList attachmentsResultIsList = asyncPostMailFilesCallback.getAttachmentsResultIsList(); + if (attachmentsResultIsList != null && attachmentsResultIsList.size() > 0) {//说明已经存有其他流 + attachmentsResultIsList.add(result); + } else {//说明还没有存储流,new一个新的存放流 +// attachmentsResultIsList = new LinkedList(); + attachmentsResultIsList = new LinkedList(); + attachmentsResultIsList.add(result); + } + asyncPostMailFilesCallback.setAttachmentsResultIsList(attachmentsResultIsList); + LOG.info("AsyncGetMailFilesCallback getAttachmentsFile completed,response status:{" + statusCode + "}."); + } else if (dealUrlCount == 1) {//说明拉取的是eml_file的流 + asyncPostMailFilesCallback.setEmlFileResultIs(result); + LOG.info("AsyncGetMailFilesCallback getEmlFile completed,response status:{" + statusCode + "}."); + } + + //开始发送文件或者获取余下的文件 + if (dealUrlCount == urlCount) { + //文件全部获取成功,以流的形式开始发往总线,首先发送eml_file的流,后续的attachments的流进入到asyncPostMailFilesCallback内部进行发送并获取返回的id +// InputStream emlFileResultIs = asyncPostMailFilesCallback.getEmlFileResultIs(); + byte[] emlFileResultIs = asyncPostMailFilesCallback.getEmlFileResultIs(); + if (emlFileResultIs != null) { + //文件全部获取成功时,流才表示readyPostFile + AvroMonitorTimerTask.fileReadyPostSum++;//加上Eml_file + AvroMonitorTimerTask.fileReadyPostSum += asyncPostMailFilesCallback.getAttachmentsResultIsList().size();//加上attachments_file + + //文件全部获取成功时,获取成功的流才计算ReadyPostFileBytes +// ByteArrayOutputStream byteArrayOutputStream = AvroMonitorTimerTask.getByteArrayOutputStream(emlFileResultIs); +// emlFileResultIs = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());//重新填充被消耗的emlFileResultIs + + AvroMonitorTimerTask.fileBytesReadyPostSum += emlFileResultIs.length;//加上Eml_file字节数 + +// LinkedList byteArrayOutputStreamsList = AvroMonitorTimerTask.getByteArrayOutputStream(asyncPostMailFilesCallback.getAttachmentsResultIsList()); +// +// //重新填充被消耗的attachmentsResultIsList +// asyncPostMailFilesCallback.getAttachmentsResultIsList().clear(); +// for (int i = 0; i < byteArrayOutputStreamsList.size(); i++) { +// asyncPostMailFilesCallback.getAttachmentsResultIsList().add(new ByteArrayInputStream(byteArrayOutputStreamsList.get(i).toByteArray())); +// } + + for (int i = 0; i < asyncPostMailFilesCallback.getAttachmentsResultIsList().size(); i++) { + AvroMonitorTimerTask.fileBytesReadyPostSum += asyncPostMailFilesCallback.getAttachmentsResultIsList().get(i).length;//加上attachments_file字节数 + } + + HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), emlFileResultIs, asyncPostMailFilesCallback);//失败时进入到的这个asyncPostMailFilesCallback内部是有文件流数据的 + } else { + LOG.error("dealUrlCount == urlCount but emlFileResultIs is null,message is===>>>" + sendMsg); + } + } else if (dealUrlCount < urlCount) { + //说明文件没有全部获取成功,继续获取 + String attachmentUrl = attachmentsUrl.get(dealUrlCount - 1);//获取(dealUrlCount - 1)索引位置的url + AsyncGetMailFilesCallback asyncGetMailFilesCallback = new AsyncGetMailFilesCallback(configInfo, attachmentUrl, sendMsg, asyncPostMailFilesCallback, count, urlCount, dealUrlCount, attachmentsUrl);//asyncPostMailFilesCallback也包含configInfo + HttpClientUtil.httpAsyncGetFile(attachmentUrl, asyncGetMailFilesCallback);//回调时都是最新的数据 + } + } else if (statusCode == 404) { + //404-重试,注意此次重试大概率由于文件未生成 + retryFor404(); + } else { +// LOG.info("AsyncGetMailFilesCallback getFiles failed,response status:{" + statusCode + "},this msg is===>" + sendMsg + "<==="); + LOG.info("AsyncGetMailFilesCallback getFiles failed,response status:{" + statusCode + "},dealUrlCount is {" + dealUrlCount + "},this msg is===>" + sendMsg + "<==="); + } + + HttpClientUtils.closeQuietly(response); + } catch (Exception e) { + e.printStackTrace(); + } + } + + /** + * 由于404引起的重试 + */ + private void retryFor404() { + try { + count++; + if (count > 1) { + LOG.warn("AsyncGetMailFilesCallback getFile is failed,retry count=" + count); + LOG.info("Now AsyncGetMailFilesCallback dealUrlCount is===>{" + dealUrlCount + "}"); + } + if (count > 4) { + LOG.error("AsyncGetMailFilesCallback getFile is failed and already retry 3 times.This failed message is===>>>" + sendMsg + "<<<==="); + } else { + Map map = JSONObject.parseObject(sendMsg, Map.class); + int found_time = (int) map.get("found_time"); + long now_time = System.currentTimeMillis() / 1000; + long timeDiffer = now_time - found_time; + if (timeDiffer >= 20) {//直接重试 + //上一次请求失败的url存储在getFileUrl中 + AsyncGetMailFilesCallback asyncHttpClientGetFileCallback = new AsyncGetMailFilesCallback(configInfo, getFileUrl, sendMsg, asyncPostMailFilesCallback, count, urlCount, dealUrlCount, attachmentsUrl);//注意此处count不再为0 + HttpClientUtil.httpAsyncGetFile(getFileUrl, asyncHttpClientGetFileCallback); + } else {//sleep后重试 + Thread.sleep((20 - timeDiffer) * 1000); + + //上一次请求失败的url存储在getFileUrl中 + AsyncGetMailFilesCallback asyncHttpClientGetFileCallback = new AsyncGetMailFilesCallback(configInfo, getFileUrl, sendMsg, asyncPostMailFilesCallback, count, urlCount, dealUrlCount, attachmentsUrl);//注意此处count不再为0 + HttpClientUtil.httpAsyncGetFile(getFileUrl, asyncHttpClientGetFileCallback); + } + } + } catch (Exception e2) { + LOG.error("AsyncGetMailFilesCallback getFile retry is error===>>>" + e2); + } + } + + /** + * 请求取消后调用该函数 + */ + @Override + public void cancelled() { + LOG.warn("request is cancelled..."); + } + + /** + * 请求失败后调用该函数 + */ + @Override + public void failed(Exception e) { +// LOG.warn("AsyncGetMailFilesCallback getFile is failed,message is===>>>" + sendMsg + "<<<==="); + retryForFailed(e); + } + + /** + * 由于请求失败引起的重试 + * + * @param e + */ + private void retryForFailed(Exception e) { + try { + count++; + if (count > 1) { + LOG.warn("AsyncGetMailFilesCallback getFile is failed,retry count=" + count); + LOG.info("Now AsyncGetMailFilesCallback dealUrlCount is===>{" + dealUrlCount + "}"); + } + if (count > 4) { + LOG.error("AsyncGetMailFilesCallback getFile is failed and already retry 3 times.error===>>>" + e + "<<<===.This failed message is===>>>" + sendMsg + "<<<==="); +// if (StringUtils.isNotBlank(configInfo.getTopicName()) && StringUtils.isNotBlank(sendMsg)) { +// //将数据发往kafka,用于后续重试 +//// kafkaDBAsyncGet.postFailMsgToKafka(configInfo.getTopicName(), sendMsg); +// LOG.error("Send to Kafka(Temp)...test!!!"); +// } + } else { + Map map = JSONObject.parseObject(sendMsg, Map.class); + int found_time = (int) map.get("found_time"); + long now_time = System.currentTimeMillis() / 1000; + long timeDiffer = now_time - found_time; + if (timeDiffer >= 20) {//直接重试 + //上一次请求失败的url存储在getFileUrl中 + AsyncGetMailFilesCallback asyncHttpClientGetFileCallback = new AsyncGetMailFilesCallback(configInfo, getFileUrl, sendMsg, asyncPostMailFilesCallback, count, urlCount, dealUrlCount, attachmentsUrl);//注意此处count不再为0 + HttpClientUtil.httpAsyncGetFile(getFileUrl, asyncHttpClientGetFileCallback); + } else {//sleep后重试 + Thread.sleep((20 - timeDiffer) * 1000); + + //上一次请求失败的url存储在getFileUrl中 + AsyncGetMailFilesCallback asyncHttpClientGetFileCallback = new AsyncGetMailFilesCallback(configInfo, getFileUrl, sendMsg, asyncPostMailFilesCallback, count, urlCount, dealUrlCount, attachmentsUrl);//注意此处count不再为0 + HttpClientUtil.httpAsyncGetFile(getFileUrl, asyncHttpClientGetFileCallback); + } + } + } catch (Exception e2) { + LOG.error("AsyncGetMailFilesCallback getFile retry is error===>>>" + e2); + } + } + + + protected String getHttpContent(HttpResponse response) { + HttpEntity entity = response.getEntity(); + String body = null; + + if (entity == null) { + return null; + } + + try { + body = EntityUtils.toString(entity, "utf-8"); + } catch (ParseException e) { + LOG.warn("the response's content inputstream is corrupt", e); + } catch (IOException e) { + LOG.warn("the response's content inputstream is corrupt", e); + } + return body; + } + +// @Override +// public Object call() throws Exception { +// return asyncHttpClientPostFileCallback.getPostFileResBody(); +// } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/mail/AsyncPostMailFilesCallback.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/mail/AsyncPostMailFilesCallback.java new file mode 100644 index 0000000..ccf0729 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/mail/AsyncPostMailFilesCallback.java @@ -0,0 +1,386 @@ +package cn.ac.iie.cusflume.sink.HttpAsyncUtils.mail; + +import cn.ac.iie.cusflume.sink.CommonUtils.PutIdOnMsgByTopicUtils; +import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil; +import cn.ac.iie.cusflume.sink.avroUtils.AvroMonitorTimerTask; +import cn.ac.iie.cusflume.sink.avroUtils.DataCenterLoad; +import cn.ac.iie.cusflume.sink.bean.configBean.ConfigInfo; +import cn.ac.iie.cusflume.sink.bean.postFileBean.PostFileResBody; +import cn.ac.iie.cusflume.sink.bean.producer.Res.ProResBody; +import cn.ac.iie.cusflume.sink.daoUtils.KafkaDB; +import com.alibaba.fastjson.JSONObject; +import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.ParseException; +import org.apache.http.client.utils.HttpClientUtils; +import org.apache.http.concurrent.FutureCallback; +import org.apache.http.util.EntityUtils; +import org.apache.log4j.Logger; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.LinkedList; + +/** + * 用于处理NTC_COLLECT_MAIL_LOG + * 异步httpclient回调对象---POST文件 + * 在get文件成功后开始执行post文件操作 + * post文件成功时,获取到文件id加入到message中,开始执行post消息操作, + * 为此需要message(kafka获取)以及post消息url(配置文件获取), + * 还需要User-Agent以及X-Tag用于验证 + *

+ * post失败时,重试三次 + */ +public class AsyncPostMailFilesCallback implements FutureCallback { + private static Logger LOG = Logger.getLogger(AsyncPostMailFilesCallback.class); + + private DataCenterLoad dclAsyncPost; +// private KafkaDB kafkaDBAsyncPost; + + private String postFileUrl; + private String postMsgUrl; + // private InputStream resultIs;//get到的文件流,用于post文件失败时重试---这里MAIL未使用此对象 + private ConfigInfo configInfo;//1.存储post文件,信息url(配置文件获取);2.存储入库前验证文件,信息url(配置文件获取);3.存储文件,信息SessionCookie(请求总线获取);4.存储msg作为avro入库所需的topicName,userAgent,xTag,batchSize + + private String sendMsg;//用于post成功后补全id发往zx或者post失败发往kafka + + private PostFileResBody postFileResBody;//存储post文件成功后返回的响应信息,内部包含文件id,key名为id,这个不走构造方法,用于获取返回响应体时使用,也不需要保存 + private int count; + + //为MAIL新增 + private int dealIsCount;//存放已处理的流的个数 + private int isCount;//存放流的总数 + +// private InputStream emlFileResultIs;//存放get到的emlFile文件流,用于post文件失败时重试,注意这个不走构造方法--流模式 + private byte[] emlFileResultIs;//存放get到的emlFile文件流,用于post文件失败时重试,注意这个不走构造方法---字节数组模式 + +// private LinkedList attachmentsResultIsList;//存放get到的attachments集合文件流,用于post文件失败时重试,注意这个不走构造方法---流模式 + private LinkedList attachmentsResultIsList;//存放get到的attachments集合文件流,用于post文件失败时重试,注意这个不走构造方法---字节数组模式 + + private LinkedList attachmentsIdList;//用于临时存放获取的attachmentsId + + public AsyncPostMailFilesCallback(ConfigInfo configInfo, String sendMsg, int count, int isCount, int dealIsCount, LinkedList attachmentsIdList) { + this.configInfo = configInfo; + this.postFileUrl = configInfo.getPostFileUrl(); + this.postMsgUrl = configInfo.getPostMsgUrl(); + this.sendMsg = sendMsg;//存放对应于url的数据 + this.count = count;//初始为0 + this.isCount = isCount;//初始总数为url总数 + this.dealIsCount = dealIsCount;//初始为0 + this.attachmentsIdList = attachmentsIdList;//初始为空 + + //初始化入库程序 + dclAsyncPost = new DataCenterLoad(); +// kafkaDBAsyncPost = KafkaDB.getInstance(); + } + + public String getPostFileUrl() { + return postFileUrl; + } + + public void setPostFileUrl(String postFileUrl) { + this.postFileUrl = postFileUrl; + } + + public String getPostMsgUrl() { + return postMsgUrl; + } + + public void setPostMsgUrl(String postMsgUrl) { + this.postMsgUrl = postMsgUrl; + } + +// public InputStream getResultIs() { +// return resultIs; +// } +// +// public void setResultIs(InputStream resultIs) { +// this.resultIs = resultIs; +// } + + public ConfigInfo getConfigInfo() { + return configInfo; + } + + public void setConfigInfo(ConfigInfo configInfo) { + this.configInfo = configInfo; + } + + public String getSendMsg() { + return sendMsg; + } + + public void setSendMsg(String sendMsg) { + this.sendMsg = sendMsg; + } + + public PostFileResBody getPostFileResBody() { + return postFileResBody; + } + + public void setPostFileResBody(PostFileResBody postFileResBody) { + this.postFileResBody = postFileResBody; + } + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } + + public int getDealIsCount() { + return dealIsCount; + } + + public void setDealIsCount(int dealIsCount) { + this.dealIsCount = dealIsCount; + } + + public int getIsCount() { + return isCount; + } + + public void setIsCount(int isCount) { + this.isCount = isCount; + } + +// public InputStream getEmlFileResultIs() { + public byte[] getEmlFileResultIs() { + return emlFileResultIs; + } + +// public void setEmlFileResultIs(InputStream emlFileResultIs) { + public void setEmlFileResultIs(byte[] emlFileResultIs) { + this.emlFileResultIs = emlFileResultIs; + } + +// public LinkedList getAttachmentsResultIsList() { + public LinkedList getAttachmentsResultIsList() { + return attachmentsResultIsList; + } + +// public void setAttachmentsResultIsList(LinkedList attachmentsResultIsList) { + public void setAttachmentsResultIsList(LinkedList attachmentsResultIsList) { + this.attachmentsResultIsList = attachmentsResultIsList; + } + + public LinkedList getAttachmentsIdList() { + return attachmentsIdList; + } + + public void setAttachmentsIdList(LinkedList attachmentsIdList) { + this.attachmentsIdList = attachmentsIdList; + } + + @Override + public String toString() { + return "AsyncPostMailFilesCallback{" + + "postFileUrl='" + postFileUrl + '\'' + + ", postMsgUrl='" + postMsgUrl + '\'' + + ", configInfo=" + configInfo + + ", sendMsg='" + sendMsg + '\'' + + ", postFileResBody=" + postFileResBody + + ", count=" + count + + ", dealIsCount=" + dealIsCount + + ", isCount=" + isCount + + ", attachmentsIdList=" + attachmentsIdList + + '}'; + } + + /** + * 请求完成后调用该函数 + */ + @Override + public void completed(HttpResponse response) { + try { +// LOG.info("filename:" + source.getFilename() + " status: " + response.getStatusLine().getStatusCode() + " response:" + getHttpContent(response)); + int statusCode = response.getStatusLine().getStatusCode(); + if (statusCode == 200) { + dealIsCount++; + String postResBody = getHttpContent(response); + if (dealIsCount >= 2) {//说明获取的id是attachments的id + //post文件成功后可以获取文件id + if (StringUtils.isNotBlank(postResBody)) { + LOG.info("AsyncPostMailFilesCallback post attachmentFile completed,post file statuscode is:{" + statusCode + "},this attachmentFile resBody is " + postResBody); + this.postFileResBody = JSONObject.parseObject(postResBody, PostFileResBody.class); +// String attachmentsFileId = postFileResBody.getData().get("id");//获取文件id------旧 + String attachmentsFileId = postFileResBody.getMsg();//获取文件id------新-20191115-返回体格式-{"code": "200", "msg": "2019111507094164188567821505", "data": "不需要额外数据"} + if (StringUtils.isNotBlank(attachmentsFileId)) { + if (attachmentsIdList != null) {//已经存放attachmentsId,至少已经初始化attachmentsIdList + attachmentsIdList.add(attachmentsFileId); + } else {//未初始化 + this.attachmentsIdList = new LinkedList(); + attachmentsIdList.add(attachmentsFileId); + } + } else { + LOG.warn("AsyncPostMailFilesCallback post attachments_file success and postResBody(response body) is not null but attachmentsFileId is null,this postResBody is===>>>" + postResBody); + } + } else { + LOG.warn("AsyncPostMailFilesCallback post attachments_file success but postResBody(response body) is null."); + } + } else if (dealIsCount == 1) {//说明获取的id是eml_file + //post文件成功后补全eml_file_id + if (StringUtils.isNotBlank(postResBody)) { + LOG.info("AsyncPostMailFilesCallback post eml_file completed,post file statuscode is:{" + statusCode + "},this eml_file resBody is " + postResBody); + this.postFileResBody = JSONObject.parseObject(postResBody, PostFileResBody.class); +// String emlFileId = postFileResBody.getData().get("id");//获取文件id---------旧方法 + String emlFileId = postFileResBody.getMsg();//获取文件id---------新方法---返回体格式---{"code": "200", "msg": "2019111507094164188567821505", "data": "不需要额外数据"} +// putIdOnMsg(fileId,configInfo.getTopicName()); + if (StringUtils.isNotBlank(emlFileId)) { + this.sendMsg = PutIdOnMsgByTopicUtils.putIdOnMsgByTopicName(configInfo.getTopicName(), sendMsg, emlFileId, "eml_file");//补全数据 + } else { + LOG.info("AsyncPostMailFilesCallback post eml_file completed and eml_file resBody is not blank but emlFileId is null ,this eml_file resBody is " + postResBody); + } +// //往总线发送 +// //String urlProducer, String topicName, String jsonData, int batchSize, String userAgent, String msgSessionCookie +// dclAsyncPost.avroDataLoad(configInfo.getPostMsgUrl(), configInfo.getTopicName(), sendMsg, configInfo.getBatchSize(), configInfo.getUserAgent(), configInfo.getMsgSessionCookie()); + } else { + LOG.warn("AsyncPostMailFilesCallback post eml_file success but postResBody(response body) is null."); + } + } + + //开始补全msg并推送msg至ZX,或者推送余下文件至ZX返回id + if (dealIsCount == isCount) {//说明文件已经全部推送并且id已经全部获取 + + //全部获取到id后标识文件推送完成,开始计算成功推送的文件和文件大小 + AvroMonitorTimerTask.fileSuccessSum++;//eml_file的计数 + AvroMonitorTimerTask.fileSuccessSum += attachmentsIdList.size();//attachments_file的计数 + //计算成功推送的文件的字节大小 +// ByteArrayOutputStream byteArrayOutputStream = AvroMonitorTimerTask.getByteArrayOutputStream(emlFileResultIs); +// this.emlFileResultIs = new ByteArrayInputStream(byteArrayOutputStream.toByteArray()); + AvroMonitorTimerTask.fileBytesSuccessSum += emlFileResultIs.length;//eml_file字节数的计数 + +// LinkedList byteArrayOutputStreamsList = AvroMonitorTimerTask.getByteArrayOutputStream(attachmentsResultIsList); +// +// //重新填充被消耗的attachmentsResultIsList +// attachmentsResultIsList.clear(); +// for (int i = 0; i < byteArrayOutputStreamsList.size(); i++) { +// attachmentsResultIsList.add(new ByteArrayInputStream(byteArrayOutputStreamsList.get(i).toByteArray())); +// } + + for (int i = 0; i < attachmentsResultIsList.size(); i++) { + AvroMonitorTimerTask.fileBytesSuccessSum += attachmentsResultIsList.get(i).length;//attachments_file字节数的计数 + } + + //将临时保存的attachmentsIdList转化为字符串补全入message + String attachmentsIdStr = attachmentsIdList.toString(); + this.sendMsg = PutIdOnMsgByTopicUtils.putIdOnMsgByTopicName(configInfo.getTopicName(), sendMsg, attachmentsIdStr, "attachments");//补全数据 + + //计算推送消息 + AvroMonitorTimerTask.msgReadyPostSum++;//多个文件对应一条消息 + + //开始推送消息进入总线 + ProResBody proResBody = dclAsyncPost.avroDataLoad(configInfo.getPostMsgUrl(), configInfo.getTopicName(), sendMsg, configInfo.getBatchSize(), configInfo.getUserAgent(), configInfo.getMsgSessionCookie()); + LOG.info("Send message with many fileId to zx over,this responseBody is===>" + proResBody.toString()); + } else if (dealIsCount < isCount) {//继续发送余下文件流获取id + // 注意此处dealIsCount必然是>=1的,所以不需要考虑eml_file,因为eml_file必然已经完成,直接发送attachmentsResultIsList中的流获取id +// InputStream attachmentResultIs = attachmentsResultIsList.get(dealIsCount - 1); + byte[] attachmentResultIs = attachmentsResultIsList.get(dealIsCount - 1); + //更新AsyncPostMailFilesCallback类的内容 + AsyncPostMailFilesCallback asyncPostMailFilesCallback = updateFieldsAsyncPostMailFilesCallback(); + //继续发送余下的流获取id + HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), attachmentResultIs, asyncPostMailFilesCallback); + } + } else if (statusCode == 403) {//空文件,不再重试发送 + LOG.info("AsyncPostMailFilesCallback post Files statusCode is 403 so get the fileIs but this minio file is empty.This message is===>>>" + sendMsg + "<<<==="); + } else { + LOG.info("AsyncPostMailFilesCallback post Files statusCode is " + statusCode + " so get the fileIs but this minio file has some problem.This message is===>>>" + sendMsg + "<<<==="); + } + + HttpClientUtils.closeQuietly(response); +// TimeTask.successsum ++; + } catch (Exception e) { + e.printStackTrace(); + } + } + + private AsyncPostMailFilesCallback updateFieldsAsyncPostMailFilesCallback() { + AsyncPostMailFilesCallback asyncHttpClientPostFileCallback = new AsyncPostMailFilesCallback(configInfo, sendMsg, count, isCount, dealIsCount, attachmentsIdList); + asyncHttpClientPostFileCallback.setEmlFileResultIs(emlFileResultIs); + asyncHttpClientPostFileCallback.setAttachmentsResultIsList(attachmentsResultIsList); + return asyncHttpClientPostFileCallback; + } + + /** + * 请求取消后调用该函数 + */ + @Override + public void cancelled() { + LOG.info("AsyncPostMailFilesCallback is cancelled... ..."); +// LOG.error("filename: " + source.getFilename() + " cancelled"); + } + + /** + * 请求失败后调用该函数 + */ + @Override + public void failed(Exception e) { +// LOG.warn("AsyncPostMailFilesCallback postFile is failed,message is===>>>" + sendMsg + "<<<==="); + try { + count++; + if (count > 1) { + LOG.warn("AsyncPostMailFilesCallback post zxFile is failed,retry count=" + count); + LOG.info("Now AsyncPostMailFilesCallback dealIsCount is===>{" + dealIsCount + "}"); + } + if (count > 4) { + LOG.error("AsyncPostMailFilesCallback post zxFile is failed and already retry 3 times.error===>>>" + e + "<<<===.This failed message is===>>>" + sendMsg + "<<<===.Now attachmentsIdList is===>{" + attachmentsIdList.toString() + "}<==="); + } else { + if (dealIsCount == 0) {//说明失败的是emlFileResultIs,是从AsyncGetMailFilesCallback传送过来的 + if (configInfo != null && StringUtils.isNotBlank(sendMsg) && emlFileResultIs != null) { + //更新AsyncPostMailFilesCallback内部存储的信息 + AsyncPostMailFilesCallback asyncPostMailFilesCallback = updateFieldsAsyncPostMailFilesCallback();//注意此处count不再为0 + //重试时也是以上一次失败时存储的流的形式发往总线 + HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), emlFileResultIs, asyncPostMailFilesCallback); + } else { + LOG.error("AsyncPostMailFilesCallback when post zxFile the emlFileResultIs is null,this msg is===>>>" + sendMsg); + } + } else if (dealIsCount >= 1) {//说明失败的是attachmentsResultIsList中的流 + //假设dealIsCount=1,说明eml更新成功并获得id,但attachments第0索引的失败了,此时重试应该使用第0索引的流,即dealIsCount-1 + //假设dealIsCount=3,说明eml,attachments第0,1索引的流都成功了,但索引2失败了,此时应该重试索引2,即dealIsCount-1 +// InputStream attachmentResultIs = attachmentsResultIsList.get(dealIsCount - 1); + byte[] attachmentResultIs = attachmentsResultIsList.get(dealIsCount - 1); + if (configInfo != null && StringUtils.isNotBlank(sendMsg) && attachmentResultIs != null) { + //更新AsyncPostMailFilesCallback类的内容 + AsyncPostMailFilesCallback asyncPostMailFilesCallback = updateFieldsAsyncPostMailFilesCallback(); + //继续发送余下的流获取id,这个流的索引是根据dealIsCount获取的 + HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), attachmentResultIs, asyncPostMailFilesCallback); + } else { + LOG.error("AsyncPostMailFilesCallback when post zxFile the attachmentResultIs is null,this msg is===>>>" + sendMsg); + } + } + + } + } catch (Exception e2) { + LOG.error("AsyncPostMailFilesCallback retry is error===>>>" + e2); + } + } + + // protected void getHttpContent(HttpResponse response) { + protected String getHttpContent(HttpResponse response) { + + HttpEntity entity = response.getEntity(); + String body = null; + if (entity == null) { + return null; + } + try { + body = EntityUtils.toString(entity, "utf-8"); + } catch (ParseException e) { + LOG.warn("the response's content inputstream is corrupt", e); + } catch (IOException e) { + LOG.warn("the response's content inputstream is corrupt", e); + } + return body; + } +} + + + + + + diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/msgCallBack/AsyncPostMsgCallBack.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/msgCallBack/AsyncPostMsgCallBack.java new file mode 100644 index 0000000..da23dfd --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/msgCallBack/AsyncPostMsgCallBack.java @@ -0,0 +1,198 @@ +package cn.ac.iie.cusflume.sink.HttpAsyncUtils.msgCallBack; + +import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil; +import cn.ac.iie.cusflume.sink.YbHttpAvroSinkFile; +import cn.ac.iie.cusflume.sink.avroUtils.AvroMonitorTimerTask; +import cn.ac.iie.cusflume.sink.avroUtils.MD5Utils; +import cn.ac.iie.cusflume.sink.bean.redirectBean.ResRedirBody; +import com.alibaba.fastjson.JSONObject; +import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.client.utils.HttpClientUtils; +import org.apache.http.concurrent.FutureCallback; +import org.apache.http.util.EntityUtils; +import org.apache.log4j.Logger; + +import java.util.Arrays; +import java.util.Map; + +/** + * 被回调的对象,给异步的httpclient使用 + */ +public class AsyncPostMsgCallBack implements FutureCallback { + private static Logger logger = Logger.getLogger(AsyncPostMsgCallBack.class); + + private String postMsgUrl; + private String topicName; + private String dataJson; + private String userAgent; + private String msgSessionCookie; + private int count; + + public AsyncPostMsgCallBack(String postMsgUrl, String topicName, String dataJson, String userAgent, String msgSessionCookie, int count) { + this.postMsgUrl = postMsgUrl; + this.topicName = topicName; + this.dataJson = dataJson; + this.userAgent = userAgent; + this.msgSessionCookie = msgSessionCookie; + this.count = count; + } + + public String getPostMsgUrl() { + return postMsgUrl; + } + + public void setPostMsgUrl(String postMsgUrl) { + this.postMsgUrl = postMsgUrl; + } + + public String getTopicName() { + return topicName; + } + + public void setTopicName(String topicName) { + this.topicName = topicName; + } + + public String getDataJson() { + return dataJson; + } + + public void setDataJson(String dataJson) { + this.dataJson = dataJson; + } + + public String getUserAgent() { + return userAgent; + } + + public void setUserAgent(String userAgent) { + this.userAgent = userAgent; + } + + public String getMsgSessionCookie() { + return msgSessionCookie; + } + + public void setMsgSessionCookie(String msgSessionCookie) { + this.msgSessionCookie = msgSessionCookie; + } + + public int getCount() { + return count; + } + + public void setCount(int count) { + this.count = count; + } + + /** + * 请求完成后调用该函数 + */ + @Override + public void completed(HttpResponse response) { +// ProResBody proResBody = null; + try { + int statuCode = response.getStatusLine().getStatusCode(); + HttpEntity entity = response.getEntity(); + String ret = EntityUtils.toString(entity); + logger.info("返回的生产原始响应体String数据为:" + ret); + + /** + * 不直接进行对象转换,除非数据加载不成功 + */ + Map map = JSONObject.parseObject(ret, Map.class); + int resRedirBodyCode = (int) map.get("code"); +// int resRedirBodyCode = resRedirBody.getCode(); + + logger.debug("生产数据==>" + dataJson + "<==," + + "生产数据checksum==>" + MD5Utils.md5Encode(dataJson) + "<==," + + "返回的生产原始响应体String数据为:" + ret); + /** + * 20200818-接口细化响应码 + */ + if (statuCode == 200 && resRedirBodyCode == 200) { + logger.info("数据加载成功,返回码: " + statuCode); + logger.debug("生产数据==>" + dataJson + "<==," + + "生产数据checksum==>" + MD5Utils.md5Encode(dataJson) + "<==," + + "数据加载成功,返回码: " + statuCode); + + AvroMonitorTimerTask.msgSuccessSum++; + EntityUtils.consume(entity); + } else { + switch (resRedirBodyCode) { + case 300: + logger.info("AsyncPostMsgCallBack==>重定向响应体-redirect-ret==>" + ret + "<==,statuCode:" + statuCode + ",resRedirBodyCode:300,当前服务节点负载过高,将向其他通信地址发送请求."); + //若不包含对应字段,则不进行对象转换,减少报错 + if (ret.contains("redirect")) { + ResRedirBody resRedirBody = JSONObject.parseObject(ret, ResRedirBody.class); + String redirectUrl = resRedirBody.getData().getRedirect(); + if (StringUtils.isNotBlank(redirectUrl)) { + YbHttpAvroSinkFile.changeUrl(redirectUrl); + } + } else { + logger.error("AsyncPostMsgCallBack==>服务端响应体中ResRedirBody.data.redirect字段不存在或格式不正确!!!"); + } + + YbHttpAvroSinkFile.redirectContents.add(dataJson); + break; + case 301: + logger.info("AsyncPostMsgCallBack==>statuCode:" + statuCode + ",resRedirBodyCode:301,当前所有服务端节点均负载过高,暂无可用资源,请等待."); + YbHttpAvroSinkFile.redirectContents.add(dataJson); + break; + case 410: + logger.info("AsyncPostMsgCallBack==>statuCode:" + statuCode + ",resRedirBodyCode:410,Cookie已过期或Cookie错误,将开始更新Cookie."); +// YbHttpAvroSinkFile.redirectContents.add(dataJson); + YbHttpAvroSinkFile.updateCookie(); + break; + case 500: + logger.info("AsyncPostMsgCallBack==>statuCode:" + statuCode + ",resRedirBodyCode:500,处理请求过程出现系统错误."); + YbHttpAvroSinkFile.updateCookie(); + break; + default: + logger.error("AsyncPostMsgCallBack==>数据加载失败,响应体:" + ret + "---statuCode:" + statuCode + "---resRedirBodyCode:" + resRedirBodyCode + "---失败数据为:\n" + dataJson); + AvroMonitorTimerTask.msgFailedSum++; + break; + } + EntityUtils.consume(entity); + } + + } catch (Exception e) { + logger.error("AsyncPostMsgCallBack Get response from ZX is error===>>>" + e.getMessage() + "<<<===Message is==>" + dataJson + "<==");//细化日志 + e.printStackTrace(); + } + + HttpClientUtils.closeQuietly(response); + } + + /** + * 请求取消后调用该函数 + */ + @Override + public void cancelled() { + logger.error("AsyncPostMagCallBack Request is cancelled"); + } + + /** + * 请求失败后调用该函数 + */ + @Override + public void failed(Exception e) { + count++; + logger.info("AsyncPostMagCallBack Request is Failed,This Failed data is ==>" + dataJson + "<==,Retry count=" + count); + if (count > 3) { + AvroMonitorTimerTask.msgFailedSum++; + logger.error("dataJson:" + dataJson + " send failed finally,error:" + e.toString()); + } else { + HttpClientUtil.asyncProducerAvroToZX(postMsgUrl, topicName, dataJson, userAgent, msgSessionCookie, count);//failed失败时重试 + } + } + +} + + + + + + diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/ybBean/PutFileInfo.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/ybBean/PutFileInfo.java new file mode 100644 index 0000000..9008a32 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/HttpAsyncUtils/ybBean/PutFileInfo.java @@ -0,0 +1,31 @@ +package cn.ac.iie.cusflume.sink.HttpAsyncUtils.ybBean; + +public class PutFileInfo { + private String hostIp; + private String authorization; + + + public String getHostIp() { + return hostIp; + } + + public void setHostIp(String hostIp) { + this.hostIp = hostIp; + } + + public String getAuthorization() { + return authorization; + } + + public void setAuthorization(String authorization) { + this.authorization = authorization; + } + + @Override + public String toString() { + return "PutFileInfo{" + + "hostIp='" + hostIp + '\'' + + ", authorization='" + authorization + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/YbHttpAvroSinkFile.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/YbHttpAvroSinkFile.java new file mode 100644 index 0000000..5ba13ff --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/YbHttpAvroSinkFile.java @@ -0,0 +1,493 @@ +package cn.ac.iie.cusflume.sink; + +import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil; +import cn.ac.iie.cusflume.sink.avroUtils.AvroMonitorTimerTask; +import cn.ac.iie.cusflume.sink.avroUtils.DataCenterLoad; +import cn.ac.iie.cusflume.sink.bean.ac.Res.AcResBody; +import cn.ac.iie.cusflume.sink.bean.configBean.ConfigInfo; +import cn.ac.iie.cusflume.sink.daoUtils.RealtimeCountConfig; +import com.google.common.base.Preconditions; +import com.google.common.base.Throwables; +import org.apache.commons.lang.StringUtils; +import org.apache.flume.*; +import org.apache.flume.conf.Configurable; +import org.apache.flume.sink.AbstractSink; +import org.apache.log4j.Logger; + +import java.util.*; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +public class YbHttpAvroSinkFile extends AbstractSink implements Configurable { + + private static Logger logger = Logger.getLogger(YbHttpAvroSinkFile.class); + + protected static ExecutorService pool = Executors.newFixedThreadPool(RealtimeCountConfig.HTTP_ASYNC_PARALLELISM); + + private static DataCenterLoad dcl; + + private static String postMsgUrl;//发送消息路径,配置文件获取,发送文件与发送消息皆需要 + + private String postFileUrl;//发送文件路径,配置文件获取,仅发送文件时需要---若只发送消息,则此路径与postMsgUrl设置相同即可 + + private static int batchSize;//配置文件获取,每次从channel中取出的数据条数 + + //http验证 + private static String checkMsgUrl;//消息认证地址,配置文件获取,发送文件与发送消息皆需要 + + private String checkFileUrl;//文件认证地址,配置文件获取,仅发送文件时需要---若只发送消息,则此路径与checkUrl设置相同即可---暂未设置配置文件获取步骤 + + private static String userAgent;//业务系统编码systemId,该字段为系统的唯一编码,配置文件获取 + + private static String xTag;//标签编码tag,在总线中唯一标识该标签,配置文件获取--20191217笔记--貌似现在已经不需要这个参数作为头部了 + + private static String msgSessionCookie;//消息会话标识,由响应返回 + private static String fileSessionCookie;//文件会话标识,由响应返回,仅发送文件时需要---若只发送消息,则fileSessionCookie会一直为空-即仅广东需要 + + private static String monitorSessionCookie;//状态回传会话标识,由响应返回 + + private static String monitorMsgXTag = RealtimeCountConfig.MONITOR_MSG_SINK_FILE_XTAG;//20191217更新-因为此参数为总线提供,此处固定,提取到realtime_service_config.properties配置文件中 + + private static ConfigInfo configInfo = new ConfigInfo();//用于存放文件与消息的验证,发送以及会话标识 + + //http入库 + private static String topicName;//配置文件获取,表示入库表结构为哪一个topic,后面也将根据此topic获取avro的schema + + private static boolean monitorStart = false; + + private static boolean checkTimerStart = false;//定时获取Cookie启动器 + + private static boolean redirectContentsPostStart = false;//定时post重定向数据集合 + + /** + * 用于存储由于服务器资源不足所造成的未发送数据 + */ + public static List redirectContents; + + /** + * 用于存放验证以及连接的url的各组成部分,方便调取 + */ + private static HashMap urlToolHm; + + public YbHttpAvroSinkFile() { + logger.warn("YbHttpAvroSinkFile start ... ..."); + } + + @Override + public synchronized void start() { + super.start(); + dcl = new DataCenterLoad(); + + redirectContents = new ArrayList<>();//初始化 + /** + * 拆解初始化获取的url后缀,用于填充urlToolHm,用于后续动态负载均衡中的url变更 + */ + if (StringUtils.isNotBlank(checkMsgUrl) && StringUtils.isNotBlank(postMsgUrl)) { + urlToolHm = new HashMap<>(); + makeUrlSplitMap(checkMsgUrl, "check"); + makeUrlSplitMap(postMsgUrl, "post"); + } else { + logger.error("Starting YbHttpAvroSinkFile is error==>checkMsgUrl and postMsgUrl can not be null!!!!"); + } + + logger.warn("Starting YbHttpAvroSinkFile ... ..."); + } + + @Override + public void configure(Context context) { + try { + postMsgUrl = context.getString("postMsgUrl", ""); + Preconditions.checkNotNull("".equals(postMsgUrl), "postMsgUrl must be set!!"); + logger.info("Read Post Message URL from configuration : " + postMsgUrl); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("Endpoint Message URL invalid", e); + } catch (Exception e) { + logger.error("Get postMsgUrl is error : " + e); + } + + try { + batchSize = context.getInteger("batchSize", 100); + Preconditions.checkNotNull(batchSize > 0, "batchSize must be a positive number!!"); + logger.info("Read BatchSize from configuration : " + batchSize); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("BatchSize invalid", e); + } catch (Exception e) { + logger.error("Get batchSize is error : " + e); + } + + try { + checkMsgUrl = context.getString("checkMsgUrl", ""); + Preconditions.checkNotNull("".equals(checkMsgUrl), "checkMsgUrl must be set!!"); + logger.info("Read Check Message Url from configuration : " + checkMsgUrl); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("Check Message URL invalid", e); + } catch (Exception e) { + logger.error("Get checkMsgUrl is error : " + e); + } + + try { + userAgent = context.getString("userAgent", ""); + Preconditions.checkNotNull("".equals(userAgent), "userAgent must be set!!"); + logger.info("Read UserAgent from configuration : " + userAgent); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("UserAgent invalid", e); + } catch (Exception e) { + logger.error("Get userAgent is error : " + e); + } + + try { + xTag = context.getString("xTag", ""); + Preconditions.checkNotNull("".equals(xTag), "xTag must be set!!"); + logger.info("Read XTag from configuration : " + xTag); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("XTag invalid", e); + } catch (Exception e) { + logger.error("Get xTag is error : " + e); + } + + try { + topicName = context.getString("topicName", ""); + Preconditions.checkNotNull("".equals(topicName), "topicName must be set!!"); + logger.info("Read topicName from configuration : " + topicName); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("topicName invalid", e); + } catch (Exception e) { + logger.error("Get topicName is error : " + e); + } + + } + + + @Override + public synchronized void stop() { + super.stop(); + logger.warn("Stopping YbHttpAvroSinkFile ... ..."); + } + + @Override + public Status process() throws EventDeliveryException { + Status result = Status.READY; + Channel channel = getChannel(); + Transaction transaction = null; +// AcResBody acCheckResBody = null; +// ProResBody producerResBody = null; + try { + transaction = channel.getTransaction(); + transaction.begin(); + Event event = null; + String content = null; + List contents = new ArrayList<>(); + for (int i = 0; i < batchSize; i++) { + event = channel.take(); + if (event != null) { + content = new String(event.getBody()); + contents.add(content); + } else { + result = Status.BACKOFF; + break; + } + } + if (contents.size() > 0) { + switch (topicName) { + //作为单条发送-新分类-20191219 + /** + * 非文件消息 + */ + case "NTC-CONN-RECORD-LOG": + case "NTC-COLLECT-DNS-LOG": + case "NTC-COLLECT-SSL-LOG": + /** + * 文件消息 + */ + case "NTC-COLLECT-FILE-LOG"://发送独立出来的文件标签 + case "NTC-COLLECT-HTTP-DOC-LOG": + case "NTC-COLLECT-HTTP-AV-LOG"://schema等同于NTC-COLLECT-HTTP-DOC-LOG + case "NTC-COLLECT-FTP-DOC-LOG": + case "NTC-COLLECT-MAIL-LOG": + case "NTC-COLLECT-TELNET-LOG": + /** + * 状态消息 + */ + case "INFLUX-SAPP-BPS-STAT-LOG"://读取回写的influxDB合计数据用作状态上传 + sendMsgLog(transaction, contents);//20191209移除文件发送,仅处理消息 + break; + default: + logger.error("YbHttpAvroSinkFile can't find this topic:" + topicName + ".Please confirm this topicName is correct!!!"); + break; + } + } else { + transaction.commit(); + } + } catch (Exception e) { + try { + if (transaction != null) { + transaction.rollback(); + } + } catch (Exception e2) { + logger.error("Exception in rollback. Rollback might not have been successful.", e2); + } + logger.error("Failed to commit transaction.Transaction rolled back.", e); + Throwables.propagate(e); + } finally { + if (transaction != null) { + transaction.close(); + logger.debug("close Transaction"); + } + } + return result; + } + + /** + * 获取消息SessionCookie + */ + private static void getMsgSessionCookie() { + AcResBody acCheckMsgResBody; + int requestType = 10; + int statusCode = 0; + acCheckMsgResBody = acCheck(checkMsgUrl, userAgent, xTag, requestType);//getMsgSessionCookie() + if (acCheckMsgResBody != null) { + statusCode = acCheckMsgResBody.getCode(); + } + if (statusCode == 200) { + if (StringUtils.isNotBlank(acCheckMsgResBody.getSessionId())) { + logger.warn("AC msg successfully,msg sessionId is ===>" + acCheckMsgResBody.getSessionId()); + msgSessionCookie = acCheckMsgResBody.getSessionId(); + } + } else if (statusCode == 0) { + logger.error("This statusCode is 0,so AC msg from ZX acCheckMsgResBody maybe null or not be set"); + } else { + logger.error("AC msg from ZX is error,statusCode is " + statusCode + "(case)=" + acCheckMsgResBody.getCode() + "(getMethod)<==="); + logger.error("This " + statusCode + " ResponseBody(contain sessionId) is ===>" + acCheckMsgResBody.toString() + "<==="); + } + updateConfigInfo();//getMsgSessionCookie()更新 +// return producerResBody; + } + + /** + * 获取状态回传SessionCookie + */ + private static void getMonitorSessionCookie() { + AcResBody acCheckMsgMonitorResBody; + int requestType = 10; + int statusCode = 0; + acCheckMsgMonitorResBody = acCheck(checkMsgUrl, userAgent, monitorMsgXTag, requestType);//getMonitorSessionCookie() + if (acCheckMsgMonitorResBody != null) { + statusCode = acCheckMsgMonitorResBody.getCode(); + } + if (statusCode == 200) { + if (StringUtils.isNotBlank(acCheckMsgMonitorResBody.getSessionId())) { + logger.warn("getMonitorSessionCookie-Thread.currentThread().getName()===>" + Thread.currentThread().getName()); + logger.warn("AC msgMonitor successfully,msgMonitor sessionId is ===>" + acCheckMsgMonitorResBody.getSessionId()); + monitorSessionCookie = acCheckMsgMonitorResBody.getSessionId(); + } + } else if (statusCode == 0) { + logger.error("This statusCode is 0,so AC msgMonitor from ZX acCheckMsgMonitorResBody maybe null or not be set"); + } else { + logger.error("AC msgMonitor from ZX is error,statusCode is " + statusCode + "(case)=" + acCheckMsgMonitorResBody.getCode() + "(getMethod)<==="); + logger.error("This " + statusCode + " ResponseBody(contain sessionId) is ===>" + acCheckMsgMonitorResBody.toString() + "<==="); + } + updateConfigInfo();//getMonitorSessionCookie()更新 + } + + private static ConfigInfo updateConfigInfo() { + configInfo.setCheckMsgUrl(checkMsgUrl); + configInfo.setPostMsgUrl(postMsgUrl); + configInfo.setMsgSessionCookie(msgSessionCookie); + + configInfo.setMonitorSessionCookie(monitorSessionCookie);//缓存monitorSessionCookie + + configInfo.setTopicName(topicName); + configInfo.setUserAgent(userAgent); + configInfo.setxTag(xTag); + configInfo.setBatchSize(batchSize); + + return configInfo; + } + + /** + * 往zx发送文件数据的消息,即发送文件的message数据(结构化数据) + * 本来是作为文件消息发送,现该方法主要用于单条发送数据-20191224 + * + * @param transaction + * @param contents + */ + private void sendMsgLog(Transaction transaction, List contents) { + try { + //获取状态回传sessionID + if (StringUtils.isBlank(monitorSessionCookie)) { + getMonitorSessionCookie();//sendMsgLog-首次获取monitorSessionCookie + + if (!checkTimerStart) { + checkCookieEveryWeek();//sendMsgLog-第一次启动检测到monitorSessionCookie为空时启动任务但不进行验证,后续间隔一段时间后开始验证,每次申请monitorSessionCookie和msgSessionCookie两个Cookie + checkTimerStart = true; + logger.warn("CheckMsgAndFileCookie Timer is started......"); + } + + if (!monitorStart) {//消息定时上报 + AvroMonitorTimerTask.monitorMsg(monitorSessionCookie, postMsgUrl, "monitor-msg", 1, userAgent, topicName);//sendMsgLog-日志消息 + monitorStart = true; + logger.warn("MonitorMsg Timer is started......"); + } + } + + AvroMonitorTimerTask.msgTotalSum += contents.size();//消息使用,文件+消息不使用 + //检查认证是否存在 + if (StringUtils.isBlank(msgSessionCookie)) { + getMsgSessionCookie();//sendMsgLog-msgSessionCookie为空,首次接入验证 + + /** + * 开启定时扫描重定向数据集合 + */ + if (!redirectContentsPostStart) { + postRedirectDataEveryMin(); + redirectContentsPostStart = true; + logger.warn("RedirectContents Timer Post is started......"); + } + + AvroMonitorTimerTask.msgReadyPostSum += contents.size(); + for (String content : contents) { + HttpClientUtil.asyncProducerAvroToZX(postMsgUrl, topicName, content, userAgent, msgSessionCookie, 0);//初始发送count计数为0 + } + } else {//sessionCookie不为空 + logger.info("AC msg sessionId already exists,msg sessionId is ===>" + msgSessionCookie); + AvroMonitorTimerTask.msgReadyPostSum += contents.size(); + for (String content : contents) { + pool.execute(new Runnable() { + @Override + public void run() { + try { + HttpClientUtil.asyncProducerAvroToZX(postMsgUrl, topicName, content, userAgent, msgSessionCookie, 0);//初始发送count计数为0 + } catch (Exception e) { + logger.error("sendMsgLog multi-thread is error==>" + e + "<==,Thread is==>" + Thread.currentThread().getName() + "<==."); + } + } + }); + } + } + + } catch (Exception e) { + logger.error("YbHttpAvroSinkFile sendFileMsg is error===>" + e + "<==="); + transaction.commit(); + } finally { + if (transaction != null) { + transaction.commit(); + } + } + } + + /** + * 总线入库前验证 + * + * @param checkMsgUrl + * @param userAgent + * @param xTag + * @param requestType + * @return + */ + private static AcResBody acCheck(String checkMsgUrl, String userAgent, String xTag, int requestType) { + return dcl.zxAcCheck(checkMsgUrl, userAgent, xTag, requestType); + } + + /** + * 验证定时器,每隔一段时间验证一次-验证获取新的Cookie + */ + private void checkCookieEveryWeek() { + Timer timer = new Timer(); + timer.scheduleAtFixedRate(new TimerTask() { + @Override + public void run() { + try { + getMsgSessionCookie();//定时进行消息Cookie验证更新-7天 + getMonitorSessionCookie();//定时进行状态上报Cookie验证更新-7天 + } catch (Exception e) { + logger.error("CheckCookieEveryWeek to zx everyWeek is error===>>>" + e + "<==="); + } + } + }, 1000 * 60 * 60 * 24 * 7, 1000 * 60 * 60 * 24 * 7);//每隔7天执行一次 + } + + /** + * 重定向数据集上传定时器,每隔一段时间扫描并上传一次-用于上传因为服务器资源紧张暂未上传的数据 + */ + private void postRedirectDataEveryMin() { + Timer timer = new Timer(); + timer.scheduleAtFixedRate(new TimerTask() { + @Override + public void run() { + try { + if (redirectContents.size() > 0) { + List tmpListFreq = new ArrayList<>(redirectContents); + redirectContents.clear(); + AvroMonitorTimerTask.msgReadyPostSum += tmpListFreq.size(); + for (String content : tmpListFreq) { + HttpClientUtil.asyncProducerAvroToZX(postMsgUrl, topicName, content, userAgent, msgSessionCookie, 0);//postRedirectDataEveryMin定时器-初始发送count计数为0 + } + logger.info("PostRedirectDataEveryMin post to zx RedirectData size==>" + tmpListFreq.size() + "<==."); + } + } catch (Exception e) { + logger.error("PostRedirectDataEveryMin to zx everyMin is error===>>>" + e + "<==="); + } + } + }, 1000 * 60, 1000 * 60);//每隔1分钟执行一次 + } + + /** + * 动态负载均衡变更cookie-20200818 + * + * @param redirectUrlPort + */ + public static void changeUrl(String redirectUrlPort) { + + /** + * 变更postMsgUrl与checkMsgUrl + */ + postMsgUrl = redirectUrlPort + urlToolHm.get("post_suf_path"); + checkMsgUrl = redirectUrlPort + urlToolHm.get("check_suf_path"); + + /** + * 变更url后需要重新获取cookie + */ + updateCookie(); + + logger.info("YbHttpAvroSinkFile->changeUrl->change postMsgUrl:" + postMsgUrl + ",change checkMsgUrl:" + checkMsgUrl); + } + + /** + * 动态负载均衡更新cookie + */ + public static void updateCookie() { + getMonitorSessionCookie();//动态负载均衡修改url,重新获取cookie + getMsgSessionCookie();//动态负载均衡修改url,重新获取cookie + + logger.info("YbHttpAvroSinkFile->updateCookie update cookie,postMsgUrl:" + postMsgUrl + + ",checkMsgUrl:" + checkMsgUrl + + ",获取monitorSessionCookie:" + monitorSessionCookie + + ",获取msgSessionCookie:" + msgSessionCookie); + } + + /** + * 用于切分url,便于后续动态负载均衡的url变更 + * + * @param oldUrlPath + * @param urlType + */ + private static void makeUrlSplitMap(String oldUrlPath, String urlType) { + String[] split = oldUrlPath.replace("http://", "").replace("https://", "").split("/", 2); + + if (split.length == 2) { + /* + 验证地址:{"check_pre_https":"https://","check_pre_http":"http://","check_old_url_port":"192.168.126.4:22451","check_suf_path":"/v1/system/connect"} + 生产地址:{"post_suf_path":"/v1/data/sendData","post_pre_http":"http://","post_old_url_port":"192.168.126.4:22451","post_pre_https":"https://"} + 一般会存放到一起:{"post_suf_path":"/v1/data/sendData","check_pre_https":"https://","post_pre_http":"http://","check_pre_http":"http://","check_old_url_port":"192.168.126.4:22451","post_old_url_port":"192.168.126.4:22451","check_suf_path":"/v1/system/connect","post_pre_https":"https://"} + */ + urlToolHm.put(urlType + "_pre_http", "http://"); + urlToolHm.put(urlType + "_pre_https", "https://"); + urlToolHm.put(urlType + "_old_url_port", split[0]); + urlToolHm.put(urlType + "_suf_path", "/" + split[1]); + } else { + logger.error("makeUrlSplitMap-->String[] split size is not correct(split.length must be 2)."); + } + + } + +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/AvroMonitorTimerTask.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/AvroMonitorTimerTask.java new file mode 100644 index 0000000..505407c --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/AvroMonitorTimerTask.java @@ -0,0 +1,349 @@ +package cn.ac.iie.cusflume.sink.avroUtils; + + +import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil; +import cn.ac.iie.cusflume.sink.avroUtils.avroMonitorBean.*; +import cn.ac.iie.cusflume.sink.bean.producer.Res.ProResBody; +import cn.ac.iie.cusflume.sink.daoUtils.InfluxdbUtils; +import cn.ac.iie.cusflume.sink.daoUtils.RealtimeCountConfig; +import org.apache.commons.io.IOUtils; +import org.apache.log4j.Logger; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +public class AvroMonitorTimerTask { + + private static Logger logger = Logger.getLogger(AvroMonitorTimerTask.class); + + //消息计数 + public static long msgSuccessSum = 0; + public static long msgFailedSum = 0; + public static long msgTotalSum = 0; + public static long msgReadyPostSum = 0; + + //文件计数 + public static long fileSuccessSum = 0; + public static long fileFailedSum = 0; + public static long fileTotalSum = 0; + public static long fileReadyPostSum = 0; + + //文件字节数计数 + public static long fileBytesSuccessSum = 0; + public static long fileBytesFailedSum = 0; + public static long fileBytesTotalSum = 0; + public static long fileBytesReadyPostSum = 0; + + public static boolean startFileMonitor = false;//默认false + + /** + * 消息---Java自定义定时器 + */ + public static void monitorMsg(String monitorSessionCookie, String postMonitorUrl, String monitorCategory, int batchSize, String userAgent, String topicType) {//20200428新增 + Timer timer = new Timer(); + timer.scheduleAtFixedRate(new TimerTask() { + @Override + public void run() { + AvroMonitorTimerTask.msgTotalSum++; + AvroMonitorTimerTask.msgReadyPostSum++; + + try { + if ("yb".equals(RealtimeCountConfig.MONITOR_TYPE)) {//只有当类型为一部(yb)时才进行状态上报 + String sendMsg = getJson(RealtimeCountConfig.MONITOR_SYSTEM_COMPONENT_CODE, RealtimeCountConfig.MONITOR_SYSTEM_COMPONENT_CODE_FLUME, topicType);//新版-20200428 + logger.info("Send monitor message is===>>>" + sendMsg + "<<<==="); + HttpClientUtil.asyncProducerAvroToZX(postMonitorUrl, monitorCategory, sendMsg, userAgent, monitorSessionCookie, 0);//静态方法无返回值用于多线程,初始发送count计数为0 + } + } catch (Exception e) { + logger.error("Send monitorMsg to zx is error===>>>" + e + "<==="); + } + /** + * 20200427移除一部"YB monitorMsg"的info日志,加入"last min"日志,方便脚本在warn状态下采集 + * 此时RealtimeCountConfig.MONITOR_TYPE填写yb,则消息上报与warn统计信息都有 + * 若填写gd,则只有warn统计信息,不会有状态上报 + */ +// if ("gd".equals(RealtimeCountConfig.MONITOR_TYPE)) {//只有当类型为广东(gd)时才进行warn类型日志计数打印,此时需要搭配外部脚本计数 + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmm"); + String timeFormat = sdf.format(new Date(System.currentTimeMillis())); + long lastMinTime = Long.parseLong(timeFormat) - 1; + //发送消息统计情况-warn类型便于脚本收集信息打印 + logger.warn("last min " + lastMinTime + " monitorMsg count==>msgSuccessSum:{ " + msgSuccessSum + " },==>msgFailedSum:{ " + msgFailedSum + " },==>msgReadyPostSum:{ " + msgReadyPostSum + " },==>msgTotalSum:{ " + msgTotalSum + " }."); +// } + //重置为0 + msgSuccessSum = 0; + msgFailedSum = 0; + msgTotalSum = 0; + msgReadyPostSum = 0; + } + }, 60000, 60000); + } + + /** + * 消息和文件-单线程版 + */ + public static void monitorMsgAndFile() { + Timer timer = new Timer(); +// InfluxdbUtils influxdbUtils = new InfluxdbUtils(); + timer.scheduleAtFixedRate(new TimerTask() { + @Override + public void run() { + SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmm"); + String timeFormat = sdf.format(new Date(System.currentTimeMillis())); + long lastMinTime = Long.parseLong(timeFormat) - 1; + + //发送消息及文件的统计情况-仅计算将要推送与成功推送的文件个数与字节数 + logger.warn("last min " + lastMinTime + " monitorMsgAndFile count==>msgSuccessSum:{ " + msgSuccessSum + " },==>msgFailedSum:{ " + msgFailedSum + " },==>msgReadyPostSum:{ " + msgReadyPostSum + " }," + + "==>fileSuccessSum:{ " + fileSuccessSum + " },==>fileReadyPostSum:{ " + fileReadyPostSum + " }," + + "==>fileBytesSuccessSum:{ " + fileBytesSuccessSum + " },==>fileBytesReadyPostSum:{ " + fileBytesReadyPostSum + " }."); + + //重置为0 + msgSuccessSum = 0;//根据响应计算-索引14 + msgFailedSum = 0;//根据响应计算-索引16 + msgReadyPostSum = 0;//直接在推送处++,多个文件对应一个消息-索引18 + + fileSuccessSum = 0;//同字节数-索引20 + fileReadyPostSum = 0;//同字节数-索引22 + + fileBytesSuccessSum = 0;//根据ID个数计算(id是推送成功后返回的)-索引24 + fileBytesReadyPostSum = 0;//根据流的个数计算-索引26 + } +// }, 60000, 300000);//正式 + }, 60000, 60000);//测试 + } + + private static String getJson(String systemComponentCode, String systemComponentCodeFlume, String topicType) { + SystemStatus systemStatus = new SystemStatus(); + /** + * 第一部分-设置时间-time + */ + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + String timeFormat = sdf.format(new Date(System.currentTimeMillis()));//格式2019-11-18 19:56:37 + systemStatus.setTime(timeFormat); + + /** + * 第二部分-设置系统状态-system + */ + RealTimeStatus realTimeStatus = new RealTimeStatus(); + realTimeStatus.setId(systemComponentCode); + realTimeStatus.setStatus("在线"); + + //无问题时不设置状态 + if (msgFailedSum >= 100000L) { + List rtAlarmsList = new LinkedList(); + RtAlarm rtAlarm = new RtAlarm(); + rtAlarm.setLevel("低"); + rtAlarm.setTime(timeFormat); + rtAlarm.setType("运行出现失败数据"); + rtAlarm.setDetail("系统正常运行但是出现了失败数据"); + rtAlarmsList.add(rtAlarm); + realTimeStatus.setAlarms(rtAlarmsList); + } else { + List rtAlarmsList = new LinkedList(); + realTimeStatus.setAlarms(rtAlarmsList); + } + + LinkedList rtStatValuesList = new LinkedList<>(); + RtStatValue rtStatValue = new RtStatValue(); + rtStatValue.setName("当前批次数据量"); + + LinkedList rtNameValuesList = new LinkedList<>(); + //总数 + RtNameValue totalCount = new RtNameValue(); + totalCount.setName("总数据量"); + totalCount.setValue(msgTotalSum + ""); + //成功 + RtNameValue successCount = new RtNameValue(); + successCount.setName("成功"); + successCount.setValue(msgSuccessSum + ""); + //准备发送 + RtNameValue readyPostCount = new RtNameValue(); + readyPostCount.setName("准备导入"); + readyPostCount.setValue(msgReadyPostSum + ""); + //失败 + RtNameValue failCount = new RtNameValue(); + failCount.setName("失败"); + failCount.setValue(msgFailedSum + ""); + + rtNameValuesList.add(totalCount); + rtNameValuesList.add(successCount); + rtNameValuesList.add(readyPostCount); + rtNameValuesList.add(failCount); + rtStatValue.setContent(rtNameValuesList); + + rtStatValuesList.add(rtStatValue); + realTimeStatus.setStats(rtStatValuesList); + systemStatus.setSystem(realTimeStatus); + + + /** + * 第三部分-设置组件状态-components + */ + LinkedList realTimeStatusesList = new LinkedList<>(); +// RealTimeStatus realTimeStatus1 = new RealTimeStatus(); +// realTimeStatus1.setId(xTag); +// realTimeStatus1.setStatus("专用设备"); +// +// LinkedList rtAlarmsList2 = new LinkedList<>(); +// RtAlarm rtAlarm2 = new RtAlarm(); +// rtAlarm2.setLevel("Low"); +// rtAlarm2.setTime(timeFormat); +// rtAlarm2.setType("Working"); +// rtAlarm2.setDetail("Component is Working"); +// rtAlarmsList2.add(rtAlarm2); +// realTimeStatus1.setAlarms(rtAlarmsList2); +// +// LinkedList rtStatValuesList2 = new LinkedList<>(); +// RtStatValue rtStatValue2 = new RtStatValue(); +// rtStatValue2.setName("Last 5 MinS Message Data Counts"); +// +// LinkedList rtNameValuesList2 = new LinkedList<>(); +// //总数 +// RtNameValue totalComponentCount = new RtNameValue(); +// totalComponentCount.setName("TotalComponentMsg"); +// totalComponentCount.setValue(msgTotalSum + ""); +// //成功 +// RtNameValue successComponentCount = new RtNameValue(); +// successComponentCount.setName("SuccessfulComponentMsg"); +// successComponentCount.setValue(msgSuccessSum + ""); +// //准备发送 +// RtNameValue readyPostComponentCount = new RtNameValue(); +// readyPostComponentCount.setName("ReadyPostComponentMsg"); +// readyPostComponentCount.setValue(msgReadyPostSum + ""); +// //失败 +// RtNameValue failComponentCount = new RtNameValue(); +// failComponentCount.setName("FailedComponentMsg"); +// failComponentCount.setValue(msgFailedSum + ""); +// +// rtNameValuesList2.add(totalComponentCount); +// rtNameValuesList2.add(successComponentCount); +// rtNameValuesList2.add(readyPostComponentCount); +// rtNameValuesList2.add(failComponentCount); +// +// rtStatValue2.setContent(rtNameValuesList2); +// +// rtStatValuesList2.add(rtStatValue2); +// realTimeStatus1.setStats(rtStatValuesList2); +// realTimeStatusesList.add(realTimeStatus1); + + /** + * 20200428新增--↓--增加Flume状态 + */ + + RealTimeStatus realTimeStatus1 = new RealTimeStatus(); + realTimeStatus1.setId(systemComponentCodeFlume); + realTimeStatus1.setStatus("正常"); + + //无问题时不设置状态 + if (msgFailedSum >= 100000L) { + LinkedList rtAlarmsList2 = new LinkedList<>(); + RtAlarm rtAlarm2 = new RtAlarm(); + rtAlarm2.setLevel("低"); + rtAlarm2.setTime(timeFormat); + rtAlarm2.setType("设备处理数据出现异常"); + rtAlarm2.setDetail("设备正常运行但出现了失败数据,数据种类:" + topicType); + rtAlarmsList2.add(rtAlarm2); + realTimeStatus1.setAlarms(rtAlarmsList2); + } else { + LinkedList rtAlarmsList2 = new LinkedList<>(); + realTimeStatus1.setAlarms(rtAlarmsList2); + } + + + LinkedList rtStatValuesList2 = new LinkedList<>(); + RtStatValue rtStatValue2 = new RtStatValue(); + rtStatValue2.setName("设备处理信息"); + + LinkedList rtNameValuesList2 = new LinkedList<>(); + //数据种类-其实就是topic种类 + RtNameValue componentType = new RtNameValue(); + componentType.setName("数据种类"); + componentType.setValue(topicType); +// //成功 +// RtNameValue successComponentCount = new RtNameValue(); +// successComponentCount.setName("SuccessfulComponentMsg"); +// successComponentCount.setValue(msgSuccessSum + ""); +// //准备发送 +// RtNameValue readyPostComponentCount = new RtNameValue(); +// readyPostComponentCount.setName("ReadyPostComponentMsg"); +// readyPostComponentCount.setValue(msgReadyPostSum + ""); +// //失败 +// RtNameValue failComponentCount = new RtNameValue(); +// failComponentCount.setName("FailedComponentMsg"); +// failComponentCount.setValue(msgFailedSum + ""); + + rtNameValuesList2.add(componentType); +// rtNameValuesList2.add(successComponentCount); +// rtNameValuesList2.add(readyPostComponentCount); +// rtNameValuesList2.add(failComponentCount); + + rtStatValue2.setContent(rtNameValuesList2); + + rtStatValuesList2.add(rtStatValue2); + realTimeStatus1.setStats(rtStatValuesList2); + realTimeStatusesList.add(realTimeStatus1); + + /** + * 20200428新增--↑--增加Flume状态 + */ + systemStatus.setComponents(realTimeStatusesList);//中间全部注释表示不设置Component,未注释时(有代码时)表示设置Component + +// return JSONObject.toJSONString(systemStatus); + return systemStatus.toString(); + } + + public static long getFileBytesFromInputStream(InputStream fileIS) { + try { + byte[] bytes = IOUtils.toByteArray(fileIS); + return bytes.length; + } catch (Exception e) { + logger.error("GetFileBytes is error,return 0,error is===>" + e); + return 0; + } + } + + public static ByteArrayOutputStream getByteArrayOutputStream(InputStream input) throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + byte[] buffer = new byte[1024]; + int len; + while ((len = input.read(buffer)) > -1) { + baos.write(buffer, 0, len); + } + baos.flush(); + baos.close(); + return baos; + } + + + public static LinkedList getByteArrayOutputStream(LinkedList attachmentsResultIsList) throws IOException { + LinkedList byteArrayOutputStreamsList = new LinkedList<>(); + for (int i = 0; i < attachmentsResultIsList.size(); i++) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + byte[] buffer = new byte[1024]; + int len; + while ((len = attachmentsResultIsList.get(i).read(buffer)) > -1) { + baos.write(buffer, 0, len); + } + baos.flush(); + baos.close(); + byteArrayOutputStreamsList.add(baos); + } + + return byteArrayOutputStreamsList; + } + + public static void main(String[] args) { + msgSuccessSum = 2; + msgFailedSum = 1; + msgTotalSum = 3; + msgReadyPostSum = 3; +// String sendMsg = AvroMonitorTimerTask.getJson("userAgent", "xTag"); + String sendMsg = AvroMonitorTimerTask.getJson(RealtimeCountConfig.MONITOR_SYSTEM_COMPONENT_CODE, RealtimeCountConfig.MONITOR_SYSTEM_COMPONENT_CODE_FLUME, "NTC-CONN-RECORD-LOG"); + System.out.println(sendMsg); + } + +} \ No newline at end of file diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/DataCenterLoad.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/DataCenterLoad.java new file mode 100644 index 0000000..79927e1 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/DataCenterLoad.java @@ -0,0 +1,251 @@ +package cn.ac.iie.cusflume.sink.avroUtils; + +import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil; +import cn.ac.iie.cusflume.sink.avroUtils.avroSchema.GetAvroSchemaByTopicUtils; +import cn.ac.iie.cusflume.sink.bean.ac.Res.AcResBody; +import cn.ac.iie.cusflume.sink.bean.producer.Res.ProResBody; +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileWriter; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericDatumWriter; +import org.apache.avro.generic.GenericRecord; +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.log4j.Logger; + +import java.io.BufferedInputStream; +import java.io.ByteArrayOutputStream; +import java.io.FileOutputStream; +import java.io.InputStream; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +//import org.apache.log4j.Logger; + + +public class DataCenterLoad { + //private static final Logger logger = Logger.getLogger(DataCenterLoad.class); + private static final String url = HttpManager.getInfoLoadInstance().getAddress(); + private static Logger logger = Logger.getLogger(DataCenterLoad.class); + + public DataCenterLoad() { + } + + //总线入库认证 + public AcResBody zxAcCheck(String checkMsgUrl, String userAgent, String xTag, int requestType) { + AcResBody acResBody = null; + try { + acResBody = HttpManager.getInfoLoadInstance().checkAcByZx(checkMsgUrl, userAgent, xTag, requestType); + } catch (Exception e) { + logger.error("DataCenterLoad zxAcCheck is error,so this return acResBody maybe null,error is===>" + e + "<==="); + e.printStackTrace(); + } + return acResBody; + } + + /** + * flume zx验证之测试Avro数据导入---用于消息传输(批) + * + * @param urlProducer + * @param topicName + * @param jsonDataList + * @param batchSize + * @param userAgent + * @param msgSessionCookie + * @return + */ + public ProResBody avroDataLoad(String urlProducer, String topicName, List jsonDataList, int batchSize, String userAgent, String msgSessionCookie) { + String time5 = generateTimeWithInterval(); + List avroBatchList = new ArrayList(); + //根据topic获取对应schema + String topicAvroSchema = GetAvroSchemaByTopicUtils.getAvroSchemaByTopicName(topicName);//批次 + int nums = 0; + ProResBody proResBody = null; + try { + for (String jsonData : jsonDataList) { + avroBatchList.add(jsonData); + nums++; +// if (nums >= batchSize) { +// //logger.info("start to post data to zx---------> " + jsonDataList); +// HttpManager.getInfoLoadInstance().producerAvroToZX(urlProducer, topicName, topicAvroSchema, avroBatchList, userAgent, msgSessionCookie); +// avroBatchList.clear(); +// nums = 0; +// } + } + if (nums != 0) { + proResBody = HttpManager.getInfoLoadInstance().producerAvroToZX(urlProducer, topicName, topicAvroSchema, avroBatchList, userAgent, msgSessionCookie); + avroBatchList.clear(); + nums = 0; + } + } catch (Exception e) { + e.printStackTrace(); + } + return proResBody; + } + + /** + * flume zx验证之测试Avro数据导入---用于文件补全后传输(单条) + * 也用于一部状态回传(定时单条) + * 单条时传入的topicAvroSchema为"" + * 此为带返回值用于同步请求的版本 + * + * @param urlProducer + * @param topicName + * @param jsonData + * @param batchSize + * @param userAgent + * @param msgSessionCookie + * @return + */ + public ProResBody avroDataLoad(String urlProducer, String topicName, String jsonData, int batchSize, String userAgent, String msgSessionCookie) {//原本的方法用于同步单线程 + //根据topic获取对应schema +// String topicAvroSchema = GetAvroSchemaByTopicUtils.getAvroSchemaByTopicName(topicName);//单条 + String topicAvroSchema = "";//单条-20191224修改,不再从这里获取,HttpManager中统一获取 + ProResBody proResBody = null; + try { + proResBody = HttpManager.getInfoLoadInstance().producerAvroToZX(urlProducer, topicName, topicAvroSchema, jsonData, userAgent, msgSessionCookie);//原本的方法有返回值用于同步单线程 + } catch (Exception e) { + e.printStackTrace(); + } + return proResBody; + } + + //flume zx验证之测试json数据导入 + public void jsonDataLoad(String urlProducer, List jsonDataList, int batchSize, String userAgent, String sessionCookie) { + StringBuffer sb = new StringBuffer(); + String time5 = generateTimeWithInterval(); + int nums = 0; + for (String jsonData : jsonDataList) { + try { + //换另一种行分隔符与字段分隔符 + String aItem = jsonData + "\t" + time5; + sb.append(aItem + "\n"); + nums++; + if (nums >= batchSize) { + String data = sb.substring(0, sb.length() - 1); + //输出的topic + logger.info("start to post data to zx---------> " + data); +// HttpManager.getInfoLoadInstance().postToDataCenter(url, "DF_PZ_FLOW_REPORT", data);//原本的方法 + HttpManager.getInfoLoadInstance().producerJsonToZX(urlProducer, "SESSION-TEST-COMPLETED-LOG", data, userAgent, sessionCookie); +// CSVAlarm.getInfoLoadInstance().csvDataLoad(url, "DF_PZ_FLOW_REPORT", data);//自己根据获得接口又使用的方法 + sb.setLength(0); + nums = 0; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + try { + if (nums != 0) { + String data = sb.substring(0, sb.length() - 1); +// HttpManager.getInfoLoadInstance().postToDataCenter(url, "DF_PZ_FLOW_REPORT", data);//这里topic位置需要根据实际情况修改 + HttpManager.getInfoLoadInstance().producerJsonToZX(urlProducer, "SESSION-TEST-COMPLETED-LOG", data, userAgent, sessionCookie); +// CSVAlarm.getInfoLoadInstance().csvDataLoad(url, "DF_PZ_FLOW_REPORT", data);//自己根据获得接口又使用的方法 + sb.setLength(0); + nums = 0; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + //新写的数据导入方法20190109: + public void dfPzFlowBatchStorage(Map pzMap) { + //String sql = " insert into DF_PZ_REPORT(STAT_ID, ACTIVE_SYS, CFG_ID, SERVICE, SUM, REPORT_TIME) " + + // " VALUES(SEQ_DF_PZ_REPORT.NEXTVAL, ?, ?, ?, ?, ?)"; + StringBuffer sb = new StringBuffer(); + String time5 = generateTimeWithInterval(); + int nums = 0; + for (String key : pzMap.keySet()) { + try { + String[] options = key.split("~=~"); + if (options[0] != null && options[0] != "" && options[1] != null && options[1] != "" && options[2] != null && options[2] != "" && options[3] != null && options[3] != "" && options[4] != null && options[4] != "" && options[5] != null && options[5] != "") { + //若包含log名称,则因为log名称在索引0处,所以这里从1开始 +// String aItem = options[1] + "\t" + options[2] + "\t" + options[3] + "\t" + options[4] + "\t" + options[5] + "\t" + options[6] + "\t" + pzMap.get(key) + "\t" + time5; + //若不包含log名称,则直接从0开始 +// String aItem = options[0] + "\t" + options[1] + "\t" + options[2] + "\t" + options[3] + "\t" + options[4] + "\t" + options[5] + "\t" + pzMap.get(key) + "\t" + time5; +// sb.append(aItem + "\n"); + //换另一种行分隔符与字段分隔符 + String aItem = options[0] + "," + options[1] + "," + options[2] + "," + options[3] + "," + options[4] + "," + options[5] + "," + pzMap.get(key) + "," + time5; + sb.append(aItem + "\n"); + nums++; + if (nums >= 1000) { + String data = sb.substring(0, sb.length() - 1); + //输出的topic + logger.info("start to post data to dc---------> " + data); + System.out.println("start to post data to dc---------> " + data); + HttpManager.getInfoLoadInstance().postToDataCenter(url, "DF_PZ_FLOW_REPORT", data);//原本的方法 +// CSVAlarm.getInfoLoadInstance().csvDataLoad(url, "DF_PZ_FLOW_REPORT", data);//自己根据获得接口又使用的方法 + sb.setLength(0); + nums = 0; + } + } + } catch (Exception e) { + e.printStackTrace(); + } + } + try { + if (nums != 0) { + String data = sb.substring(0, sb.length() - 1); + HttpManager.getInfoLoadInstance().postToDataCenter(url, "DF_PZ_FLOW_REPORT", data);//这里topic位置需要根据实际情况修改 +// CSVAlarm.getInfoLoadInstance().csvDataLoad(url, "DF_PZ_FLOW_REPORT", data);//自己根据获得接口又使用的方法 + sb.setLength(0); + nums = 0; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + //原本的: + public void dfPzBatchStorage(Map pzMap) { + //String sql = " insert into DF_PZ_REPORT(STAT_ID, ACTIVE_SYS, CFG_ID, SERVICE, SUM, REPORT_TIME) " + + // " VALUES(SEQ_DF_PZ_REPORT.NEXTVAL, ?, ?, ?, ?, ?)"; + StringBuffer sb = new StringBuffer(); + String time5 = generateTimeWithInterval(); + int nums = 0; + for (String key : pzMap.keySet()) { + try { + String[] options = key.split("~=~"); + String aItem = options[2] + "\t" + options[1] + "\t" + options[3] + "\t" + pzMap.get(key) + "\t" + time5; + sb.append(aItem + "\n"); + nums++; + if (nums >= 1000) { + String data = sb.substring(0, sb.length() - 1); + HttpManager.getInfoLoadInstance().postToDataCenter(url, "t_xa_df_pz_report_dt", data); + sb.setLength(0); + nums = 0; + } + } catch (Exception e) { + e.printStackTrace(); + } + } + try { + if (nums != 0) { + String data = sb.substring(0, sb.length() - 1); + HttpManager.getInfoLoadInstance().postToDataCenter(url, "t_xa_df_pz_report_dt", data); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + + private String generateTimeWithInterval() { + Long stamp = System.currentTimeMillis() + 300000L; + Long stamp5 = stamp / 300000 * 300000; + SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + return df.format(stamp5); + } + + +// public static void main(String[] args) { +// String topicAvroSchema = GetAvroSchemaByTopicUtils.getAvroSchemaByTopicName("NTC-CONN-RECORD-LOG"); +// System.out.println(topicAvroSchema); +// } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/HttpManager.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/HttpManager.java new file mode 100644 index 0000000..e479b4d --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/HttpManager.java @@ -0,0 +1,939 @@ +package cn.ac.iie.cusflume.sink.avroUtils; + +import cn.ac.iie.cusflume.sink.CommonUtils.GetDataDictionaryCodeByTopicUtils; +import cn.ac.iie.cusflume.sink.CommonUtils.GetFilePathByTopicUtils; +import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil; +import cn.ac.iie.cusflume.sink.avroUtils.avroRecord.GetAvroRecordByTopicUtils; +import cn.ac.iie.cusflume.sink.avroUtils.avroSchema.GetAvroSchemaByTopicUtils; +import cn.ac.iie.cusflume.sink.bean.ac.Req.AcReqBody; +import cn.ac.iie.cusflume.sink.bean.ac.Res.AcResBody; +import cn.ac.iie.cusflume.sink.bean.producer.Res.ProResBody; +import cn.ac.iie.cusflume.sink.daoUtils.RealtimeCountConfig; +import com.alibaba.fastjson.JSONObject; +import com.zdjizhi.utils.StringUtil; +import org.apache.avro.Schema; +import org.apache.avro.file.DataFileWriter; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericDatumWriter; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.io.BinaryEncoder; +import org.apache.avro.io.DatumWriter; +import org.apache.avro.io.EncoderFactory; +import org.apache.avro.specific.SpecificDatumWriter; +import org.apache.http.*; +import org.apache.http.client.ClientProtocolException; +import org.apache.http.client.HttpRequestRetryHandler; +import org.apache.http.client.config.CookieSpecs; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.conn.ConnectTimeoutException; +import org.apache.http.conn.socket.ConnectionSocketFactory; +import org.apache.http.conn.socket.PlainConnectionSocketFactory; +import org.apache.http.conn.ssl.*; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.impl.client.LaxRedirectStrategy; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.protocol.HttpContext; +import org.apache.http.util.EntityUtils; +import org.apache.log4j.Logger; + +import javax.net.ssl.*; +import java.io.*; +import java.net.MalformedURLException; +import java.net.UnknownHostException; +import java.nio.charset.Charset; +import java.security.GeneralSecurityException; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.text.SimpleDateFormat; +import java.util.*; + +//import org.apache.http.Header; + + +public class HttpManager { + // 创建httpclient连接池 + private PoolingHttpClientConnectionManager httpClientConnectionManager = null; + private CloseableHttpClient httpClient = null; + //类初始化时,自动实例化,饿汉单例模式 + private static final HttpManager manager = new HttpManager(); + private static Logger logger = Logger.getLogger(HttpManager.class); + + private static HashMap schemaHashMap = new HashMap();//用于存放Schema + + public static HttpManager getInfoLoadInstance() { + return manager; + } + + private HttpManager() { + //初始化httpClient + initHttpClient(); + System.setProperty("sun.net.inetaddr.ttl", "300"); + System.setProperty("sun.net.inetaddr.negative.ttl", "10"); + } + + /** + * 新版initHttpClient()---20200425注释 + */ + public void initHttpClient() { + try { + SSLContext ctx = SSLContext.getInstance(SSLConnectionSocketFactory.TLS); + X509TrustManager tm = new X509TrustManager() { + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { + } + + @Override + public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { + } + + @Override + public X509Certificate[] getAcceptedIssuers() { + return null; + } + }; + ctx.init(null, new TrustManager[]{tm}, null); + SSLConnectionSocketFactory socketFactory = new SSLConnectionSocketFactory(ctx, NoopHostnameVerifier.INSTANCE); + Registry socketFactoryRegistry = RegistryBuilder.create() + .register("http", PlainConnectionSocketFactory.INSTANCE) + .register("https", socketFactory) + .build(); + /** + * 新版httpClientConnectionManager + */ + //创建httpclient连接池 + httpClientConnectionManager = new PoolingHttpClientConnectionManager(socketFactoryRegistry); + //设置连接池最大数量 + httpClientConnectionManager.setMaxTotal(2000); + //设置单个路由最大连接数量 + httpClientConnectionManager.setDefaultMaxPerRoute(400); + } catch (Exception e) { + e.printStackTrace(); + } + + httpClient = getHttpClient(); + } + + //请求重试机制 + HttpRequestRetryHandler myRetryHandler = new HttpRequestRetryHandler() { + @Override + public boolean retryRequest(IOException exception, int executionCount, HttpContext context) { + if (executionCount >= 2) { + // 超过两次则不再重试请求 + logger.error("http连接已重试" + executionCount + "次, 重试失败"); + return false; + } + if (exception instanceof InterruptedIOException) { + // Timeout + logger.info("InterruptedIOException, retry connection...");//新增-降低了上述的日志级别 + return true; + } + if (exception instanceof UnknownHostException) { + // Unknown host + return false; + } + if (exception instanceof ConnectTimeoutException) { + logger.error("ConnectTimeoutException, 重试连接。。。"); + // Connection refused + return true; + } + if (exception instanceof SSLException) { + // SSL handshake exception + return false; + } + HttpClientContext clientContext = HttpClientContext.adapt(context); + HttpRequest request = clientContext.getRequest(); + boolean idempotent = !(request instanceof HttpEntityEnclosingRequest); + if (idempotent) { + logger.error("request is idempotent, 重试连接。。。"); + // Retry if the request is considered idempotent + return true; + } + return false; + } + }; + + public CloseableHttpClient getHttpClient() { + // 创建全局的requestConfig + RequestConfig requestConfig = RequestConfig.custom() + .setConnectTimeout(3000) + .setSocketTimeout(3000) + //.setCookieSpec(CookieSpecs.BEST_MATCH) + .build(); + // 声明重定向策略对象 + LaxRedirectStrategy redirectStrategy = new LaxRedirectStrategy(); + + /** + * 原版 + */ + CloseableHttpClient httpClient = HttpClients.custom() + .setConnectionManager(httpClientConnectionManager) + .setDefaultRequestConfig(requestConfig) + .setRedirectStrategy(redirectStrategy) + .setRetryHandler(myRetryHandler) + .build(); + + return httpClient; + } + + public String getAddress() { + //ArrayList addrs = new ArrayList(); +// String[] addrs = "http://10.208.133.172:10080,http://10.208.133.173:10080".split(","); + String[] addrs = "http://10.208.133.172:10080,http://10.208.133.173:10080".split(","); + + Random rnd = new Random(); + Integer addrIndex = rnd.nextInt(addrs.length); + return addrs[addrIndex].trim(); + } + + /** + * 生产AVRO数据入ZX(批次)--数据不包含schema + * + * @param urlProducer + * @param topic + * @param schemaStr + * @param avroBatchList + * @param userAgent + * @param msgSessionCookie + * @return + */ + public ProResBody producerAvroToZX(String urlProducer, String topic, String schemaStr, List avroBatchList, String userAgent, String msgSessionCookie) { + CloseableHttpResponse response = null; + HttpPost httpPost = null; + urlProducer = urlProducer.trim(); + ProResBody proResBody = null; + byte[] resultArray = null;//用于存放avro的二进制流 +// JsonAvroConverter converter = new JsonAvroConverter(); + try { + // get schema + Schema schemaAvro = new Schema.Parser().parse(schemaStr); + ByteArrayOutputStream outAvro = new ByteArrayOutputStream(); + BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outAvro, null); + DatumWriter fileWriter = new SpecificDatumWriter(schemaAvro); + outAvro.reset(); + for (String dataJson : avroBatchList) { + GenericRecord recordAvro = new GenericData.Record(schemaAvro); +// GenericRecord recordAvro = converter.convertToGenericDataRecord(dataJson.getBytes(), schemaAvro); +// recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(topic, recordAvro, dataJson, schemaStr);//批量 + recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(topic, recordAvro, dataJson, schemaAvro);//批量 + + logger.info("封装AvroRecord后数据为===>" + recordAvro.toString() + "<===封装AvroRecord后数据"); + + fileWriter.write(recordAvro, encoder); + } + + if (encoder != null) { + encoder.flush(); + } + + if (outAvro != null) { + resultArray = outAvro.toByteArray(); + + outAvro.flush(); + outAvro.close(); + outAvro = null; + } + + httpPost = new HttpPost(urlProducer); + // set header + httpPost.addHeader("User-Agent", userAgent); + httpPost.addHeader("Cookie", msgSessionCookie); + + try { + String md5Avro = MD5Utils.md5Encode(resultArray); + httpPost.addHeader("Checksum", md5Avro); + logger.info("请求端Checksum MD5 avro 加密为:" + md5Avro); + } catch (Exception e) { + logger.error("MD5Utils.md5Encode Method is error,this data is " + resultArray); + e.printStackTrace(); + } +// httpPost.addHeader("Content-Type", "binary/octet-stream"); +// httpPost.addHeader("Content-Type", "application/avro+json;charset=UTF-8"); + httpPost.addHeader("Content-Type", "binary/octet-stream"); + + //新增调度标签--------------20191210----------↓----------- + //注意批次发送这里不需要传FilePath头部 +// Map map = JSONObject.parseObject(avroBatchList.get(0), Map.class); +// String xTag = (String) map.get("x_tag"); + httpPost.addHeader("X-Tag", getXTAG(avroBatchList.get(0), topic));//批次,第一条,因为必然存在至少一条 +// httpPost.addHeader("FilePath", "-");//因为无文件,所以设置为空;//20191216新增注释:批次发送的不设置FilePath头部 + //新增调度标签--------------20191210----------↑----------- + + ByteArrayEntity payload = new ByteArrayEntity(resultArray); + payload.setContentEncoding("utf-8"); + //payload.setContentType("text/xml; charset=UTF-8"); + // anti avro + httpPost.setEntity(payload); + + //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%往下 + logger.info("加载内容字节数组长度: " + resultArray.length); + //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%往上 + + //执行请求 + response = httpClient.execute(httpPost); + try { + int statuCode = response.getStatusLine().getStatusCode(); + HttpEntity entity = response.getEntity(); + String ret = EntityUtils.toString(entity); + logger.info("返回的生产原始响应体String数据为:" + ret); + proResBody = JSONObject.parseObject(ret, ProResBody.class); + logger.info("封装入对象的生产响应体ProResBody为:" + JSONObject.toJSONString(proResBody)); + if (statuCode == 200) { + logger.info("数据加载成功, 返回码: " + statuCode); + AvroMonitorTimerTask.msgSuccessSum += avroBatchList.size(); + EntityUtils.consume(entity); + } else { + EntityUtils.consume(entity); +// logger.error("数据加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + records.get(0).toString());//单条测试用 + logger.error("数据加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + avroBatchList);//测试用 + AvroMonitorTimerTask.msgFailedSum += avroBatchList.size(); +// logger.error("数据加载失败: " + ret + " --- code: " + statuCode + "\n"); + } + } catch (Exception e) { + logger.error("Get response from ZX is error===>>>" + e + "<<<==="); + e.printStackTrace(); + } + } catch (MalformedURLException e) { + //执行URL url = new URL()的异常 + e.printStackTrace(); + } catch (ClientProtocolException e) { + // 执行httpClient.execute(httpGet)的异常 + e.printStackTrace(); + } catch (IOException e) { + // 执行httpClient.execute(httpGet)的异常 + logger.error("producerAvroToZX is IOException===>>>" + e + "<<<==="); + e.printStackTrace(); + } catch (Exception e) { + //handle response here... try other servers + logger.error("producerAvroToZX is Exception===>>>" + e + "<<<==="); + e.printStackTrace(); + } finally { + if (response != null) { + try { + response.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + httpPost.abort(); + } + return proResBody; + } + + /** + * 生产AVRO数据入ZX(单条)--数据不包含schema + * 带返回值,用于同步请求(非多线程) + * + * @param urlProducer + * @param topic + * @param schemaStr + * @param dataJson + * @param userAgent + * @param msgSessionCookie + * @return + */ + public ProResBody producerAvroToZX(String urlProducer, String topic, String schemaStr, String dataJson, String userAgent, String msgSessionCookie) { + CloseableHttpResponse response = null; + HttpPost httpPost = null; + urlProducer = urlProducer.trim(); + ProResBody proResBody = null; + byte[] resultArray = null;//用于存放avro的二进制流 +// JsonAvroConverter converter = new JsonAvroConverter(); + try { + // get schema + +// Schema schemaDataAvro = new Schema.Parser().parse(schemaStr);//旧版-20191224移除,改用hm获取 + Schema schemaDataAvro = getSchemaFromHashMap(topic);//新版-20191224-使用缓存优化性能 + ByteArrayOutputStream outAvro = new ByteArrayOutputStream(); + BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outAvro, null); + DatumWriter fileWriter = new SpecificDatumWriter(schemaDataAvro); + outAvro.reset(); + + GenericRecord recordAvro = new GenericData.Record(schemaDataAvro); +// GenericRecord recordAvro = converter.convertToGenericDataRecord(dataJson.getBytes(), schemaAvro); +// recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(topic, recordAvro, dataJson, schemaStr);//单条 + recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(topic, recordAvro, dataJson, schemaDataAvro);//单条 + logger.info("封装数据对象或文件标签AvroRecord后数据为===>" + recordAvro.toString() + "<===封装数据对象或文件标签AvroRecord后数据为"); + fileWriter.write(recordAvro, encoder); + + if (encoder != null) { + encoder.flush(); + } + + if (outAvro != null) { + resultArray = outAvro.toByteArray(); + + outAvro.flush(); + outAvro.close(); + outAvro = null; + } + logger.info("数据对象或文件标签resultArray长度为:" + resultArray.length); + + if (!("NTC-COLLECT-FILE-LOG".equals(topic))) { + //传入的resultArray为数据对象的数组,返回的是拼接的整体的数组 + resultArray = getTagRecordAndMergeAllArray(topic, dataJson, resultArray); + } else { + //说明为NTC-COLLECT-FILE-LOG,此时不需要进行d_tag的获取,因为数据里不包含,前面处理的resultArray就作为NTC-COLLECT-FILE-LOG的d_tag使用 + resultArray = alreadyGetFileTagRecordSoOnlyGetMergeAllArray(topic, resultArray); + } + + httpPost = new HttpPost(urlProducer); + // set header + httpPost.addHeader("User-Agent", userAgent); + + //一部状态回传需要此Cookie打开 +// if ("monitor-msg".equals(topic)) { +// httpPost.addHeader("Cookie", msgSessionCookie);//广东文件消息测试时加上Cookie会验证不通过,即那边显示为两个Cookie,不加Cookie则验证通过 +// } + + httpPost.addHeader("Cookie", msgSessionCookie);//不设置Cookie时,广东测试出现报错,打开广东的Cookie设置测试一下,经测试,可用 + + try { + String md5Avro = MD5Utils.md5Encode(resultArray); + httpPost.addHeader("Checksum", md5Avro); + logger.info("请求端Checksum MD5 avro 加密为:" + md5Avro); + } catch (Exception e) { + logger.error("MD5Utils.md5Encode Method is error,this data is " + resultArray); + e.printStackTrace(); + } +// httpPost.addHeader("Content-Type", "binary/octet-stream"); +// httpPost.addHeader("Content-Type", "application/avro+json;charset=UTF-8"); + httpPost.addHeader("Content-Type", "binary/octet-stream"); + + //设置调度标签要求的参数,文件消息需要设置FilePath--------------------↓--------------------20191210新增 + httpPost.addHeader("X-Tag", getXTAG(dataJson, topic));//从json中获取-单条 + + String filePathByTopicName = GetFilePathByTopicUtils.getFilePathByTopicName(dataJson, topic); + if (!("-".equals(filePathByTopicName)) && !("[\"null\"]".equals(filePathByTopicName))) { + httpPost.addHeader("FilePath", filePathByTopicName);//从json中获取 + } + //设置调度标签要求的参数,文件消息需要设置FilePath-------------------↑---------------------20191210新增 + + //尝试消除 Invalid cookie header: "Set-Cookie: SESSION=""; expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/". Invalid 'expires' attribute: Thu, 01 Jan 1970 00:00:00 GMT + RequestConfig defaultConfig = RequestConfig.custom().setCookieSpec(CookieSpecs.STANDARD).build(); + httpPost.setConfig(defaultConfig); + + //测试用 + Header[] allHeaders = httpPost.getAllHeaders(); + for (int i = 0; i < allHeaders.length; i++) { + logger.info("allHeaders[" + i + "].getName()====>>>" + allHeaders[i].getName() + "##### allHeaders[" + i + "].getValue()=======>>>" + allHeaders[i].getValue()); + } + + ByteArrayEntity payload = new ByteArrayEntity(resultArray); + payload.setContentEncoding("utf-8"); + //payload.setContentType("text/xml; charset=UTF-8"); + // anti avro + httpPost.setEntity(payload); + + //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%往下 + logger.info("最终加载内容字节数组长度: " + resultArray.length); + //%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%往上 + + //执行请求 + response = httpClient.execute(httpPost); + try { + int statuCode = response.getStatusLine().getStatusCode(); + HttpEntity entity = response.getEntity(); + String ret = EntityUtils.toString(entity); + logger.info("返回的生产原始响应体String数据为:" + ret); + proResBody = JSONObject.parseObject(ret, ProResBody.class); + logger.info("封装入对象的生产响应体ProResBody为:" + JSONObject.toJSONString(proResBody)); + if (statuCode == 200) { + logger.info("数据加载成功, 返回码: " + statuCode); + AvroMonitorTimerTask.msgSuccessSum++; + EntityUtils.consume(entity); + } else { + EntityUtils.consume(entity); +// logger.error("数据加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + records.get(0).toString());//单条测试用 + logger.error("数据加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + dataJson);//测试用 + AvroMonitorTimerTask.msgFailedSum++; +// logger.error("数据加载失败: " + ret + " --- code: " + statuCode + "\n"); + } + } catch (Exception e) { + logger.error("Get response from ZX is error===>>>" + e + "<<<==="); + e.printStackTrace(); + } + } catch (MalformedURLException e) { + //执行URL url = new URL()的异常 + e.printStackTrace(); + } catch (ClientProtocolException e) { + // 执行httpClient.execute(httpGet)的异常 + e.printStackTrace(); + } catch (IOException e) { + // 执行httpClient.execute(httpGet)的异常 + logger.error("producerAvroToZX is IOException===>>>" + e + "<<<==="); + e.printStackTrace(); + } catch (Exception e) { + //handle response here... try other servers + logger.error("producerAvroToZX is Exception===>>>" + e + "<<<==="); + logger.error("message is " + dataJson); + e.printStackTrace(); + } finally { + if (response != null) { + try { + response.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + httpPost.abort(); + } + return proResBody; + } + + /** + * 使用hm缓存Schema + * + * @param topic + * @return + */ + private Schema getSchemaFromHashMap(String topic) { + if (schemaHashMap.containsKey(topic)) { + return schemaHashMap.get(topic); + } else { + String schemaStr = GetAvroSchemaByTopicUtils.getAvroSchemaByTopicName(topic); + Schema parseSchema = new Schema.Parser().parse(schemaStr); + schemaHashMap.put(topic, parseSchema); + return parseSchema; + } + } + + /** + * 获取X-Tag用于设置请求头 + * + * @param dataJson + * @return + */ + private String getXTAG(String dataJson, String topic) { + if ("monitor-msg".equals(topic)) { + return RealtimeCountConfig.MONITOR_NOFILE_MSG_X_TAG; + } + + Map map = JSONObject.parseObject(dataJson, Map.class); +// String xTag = (String) map.get("x_tag"); + Object x_tag = map.get("x_tag"); + if (x_tag != null) { + String xTag = x_tag.toString(); + if (StringUtil.isNotBlank(xTag)) { + return xTag; + } else { + return "-"; + } + } else { + return "-"; + } + } + + //我自己修改的部分-20190910-JSON版本 + public void producerJsonToZX(String urlProducer, String topic, String data, String userAgent, String MsgSessionCookie) { + CloseableHttpResponse response = null; + HttpPost httpPost = null; + urlProducer = urlProducer.trim(); + ProResBody proResBody; + try { + httpPost = new HttpPost(urlProducer); +// httpPost.addHeader("Connection","keep-alive");//可能影响入库 +// httpPost.addHeader("Accept-Encoding", "gzip, deflate");//可能影响入库 + //httpPost.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36"); + httpPost.addHeader("User-Agent", userAgent); + httpPost.addHeader("Cookie", MsgSessionCookie); + try { + httpPost.addHeader("Checksum", MD5Utils.md5Encode(data)); + logger.info("请求端Checksum MD5 加密为:" + MD5Utils.md5Encode(data)); + } catch (Exception e) { + logger.error("MD5Utils.md5Encode Method is error,this data is " + data); + e.printStackTrace(); + } + httpPost.addHeader("Content-Type", "binary/octet-stream"); + httpPost.addHeader("Topic", topic); + StringEntity payload = new StringEntity(data, Charset.forName("utf-8"));//原版 +// StringEntity payload = new StringEntity(data);//自己修改测试版20190219 + //payload.setContentType("text/xml; charset=UTF-8"); + payload.setContentEncoding("utf-8");//原版,测试中暂不使用20190219 + httpPost.setEntity(payload); + logger.info("加载内容: " + data); + //执行请求 + response = httpClient.execute(httpPost); + try { + int statuCode = response.getStatusLine().getStatusCode(); + HttpEntity entity = response.getEntity(); + String ret = EntityUtils.toString(entity); + logger.info("原本的返回的响应体String数据为:" + ret); +// ret = ret.replace("\\\"", "\"") +// .replace("\"{", "{") +// .replace("}\"}", "}}");//注意这里和AC时不一样 + proResBody = JSONObject.parseObject(ret, ProResBody.class); + logger.info("封装入对象的响应体ProResBody:" + proResBody.toString()); + if (statuCode == 200) { + logger.info("数据加载成功, 返回码: " + statuCode); + EntityUtils.consume(entity); + } else { + EntityUtils.consume(entity); + logger.error("数据加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + data); + } + } catch (Exception e) { + e.printStackTrace(); + } + + } catch (MalformedURLException e) { + //执行URL url = new URL()的异常 + e.printStackTrace(); + } catch (ClientProtocolException e) { + // 执行httpClient.execute(httpGet)的异常 + e.printStackTrace(); + } catch (IOException e) { + // 执行httpClient.execute(httpGet)的异常 + e.printStackTrace(); + } finally { + if (response != null) { + try { + response.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + httpPost.abort(); + } + } + + /** + * ZX认证,获取sessionId + * + * @param checkMsgUrl + * @param userAgent + * @param xTag + * @param requestType + * @return + */ + public AcResBody checkAcByZx(String checkMsgUrl, String userAgent, String xTag, int requestType) { + CloseableHttpResponse response = null; + HttpPost httpPost = null; + AcResBody acResBody = null; + String url = checkMsgUrl.trim(); + + //bean类型 + AcReqBody acReqBody = new AcReqBody(); + acReqBody.setRequestType(requestType); + String data = JSONObject.toJSONString(acReqBody);//data为json版本 + + try { + httpPost = new HttpPost(url); + httpPost.addHeader("User-Agent", userAgent); + httpPost.addHeader("X-Tag", xTag);//根据最新文档,目前已经不需要此头-20191217 + httpPost.addHeader("Content-Type", "application/json"); + StringEntity payload = new StringEntity(data, Charset.forName("utf-8")); + //payload.setContentType("text/xml; charset=UTF-8"); + payload.setContentEncoding("utf-8"); + httpPost.setEntity(payload); + logger.info("入库验证加载内容请求体: " + data); + //执行请求 + response = httpClient.execute(httpPost); + try { + int statuCode = response.getStatusLine().getStatusCode(); + HttpEntity entity = response.getEntity(); + String ret = EntityUtils.toString(entity); + logger.info("返回的验证原始响应体的String数据===>" + ret);//{"code":200,"msg":"请求成功","data":{"status":0,"reason":"审核通过"}} + acResBody = JSONObject.parseObject(ret, AcResBody.class); + + Header[] allHeaders = response.getAllHeaders(); + for (Header header : allHeaders) { + if ("Set-Cookie".equals(header.getName())) { + acResBody.setSessionId(header.getValue().split(";")[0]); + } + } + + logger.info("封装的验证响应体对象AcResBody数据为===>" + acResBody.toString()); + if (statuCode == 200) { + logger.info("数据加载成功, 返回码: " + statuCode); + EntityUtils.consume(entity); + } else { + EntityUtils.consume(entity); + logger.error("数据加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + data); + } + } catch (Exception e) { + e.printStackTrace(); + } + } catch (MalformedURLException e) { + //执行URL url = new URL()的异常 + e.printStackTrace(); + } catch (ClientProtocolException e) { + // 执行httpClient.execute(httpGet)的异常 + e.printStackTrace(); + } catch (IOException e) { + // 执行httpClient.execute(httpGet)的异常 + e.printStackTrace(); + } finally { + if (response != null) { + try { + response.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + httpPost.abort(); + } + return acResBody; + } + + //老版 + public void postToDataCenter(String url, String topic, String data) { + CloseableHttpResponse response = null; + HttpPost httpPost = null; + url = url.trim(); + try { + httpPost = new HttpPost(url); +// httpPost.addHeader("Connection","keep-alive");//可能影响入库 +// httpPost.addHeader("Accept-Encoding", "gzip, deflate");//可能影响入库 + //httpPost.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36"); + +// httpPost.addHeader("User", RealtimeCountConfig.DATACENTER_USERNAME); +// httpPost.addHeader("Password", RealtimeCountConfig.DATACENTER_PASSWORD); + httpPost.addHeader("Topic", topic); + httpPost.addHeader("Schema-Version", "2");//测试新增20190219,原版没有 + httpPost.addHeader("Format", "csv"); +// httpPost.addHeader("Row-Split", "\\n"); +// httpPost.addHeader("Field-Split", "\\t"); + httpPost.addHeader("Row-Split", "\\n"); + httpPost.addHeader("Field-Split", ","); +// StringEntity payload = new StringEntity(data, Charset.forName("utf-8"));//原版 + StringEntity payload = new StringEntity(data);//自己修改测试版20190219 + //payload.setContentType("text/xml; charset=UTF-8"); +// payload.setContentEncoding("utf-8");//原版,测试中暂不使用20190219 + httpPost.setEntity(payload); + logger.info("数据中心加载内容: " + data); + //执行请求 + response = httpClient.execute(httpPost); + try { + int statuCode = response.getStatusLine().getStatusCode(); + //Header[] headers = response.getAllHeaders(); + //logger.info("<>:"); + //System.out.println("<>:"); + //for(int i=0; i avroBatchList) { + long countThisListStr = 0L; + for (String str : avroBatchList) { + countThisListStr += str.getBytes().length; + } + return countThisListStr; + } + + /** + * 获取数据中的日志标签并将所有相关数据字节数组化后拼接返回 + * + * @param topic + * @param dataJson + * @param dataResultArray + * @return + */ + private byte[] getTagRecordAndMergeAllArray(String topic, String dataJson, byte[] dataResultArray) { + byte[] dTagByteArray = null;//用于存放数据标签的avro的二进制流 + String tagTopicName = "log-tag";//除NTC-COLLECT-FILE-LOG外都是log-tag,因为都是在文件消息内部的d_tag获取的 + if ("monitor-msg".equals(topic)) { + tagTopicName = "status-tag"; + } + try { + // get schema + //---------20191224移除,改用hm获取-----------↓------ +// String schemaTag = GetAvroSchemaByTopicUtils.getAvroSchemaByTopicName(tagTopicName);//单条-"log-tag"或者"status-tag" +// Schema schemaDTagAvro = new Schema.Parser().parse(schemaTag);//旧版 + //---------20191224移除-----------↑------ + + Schema schemaDTagAvro = getSchemaFromHashMap(tagTopicName);//新版-20191224-使用缓存优化性能 + ByteArrayOutputStream outAvro = new ByteArrayOutputStream(); + BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outAvro, null); + DatumWriter fileWriter = new SpecificDatumWriter(schemaDTagAvro); + outAvro.reset(); + + GenericRecord recordAvro = new GenericData.Record(schemaDTagAvro); + + //以下为临时,为了增加一个状态的临时d_tag,因为d_tag前端目前(20191222)传不了-----------↓-------------- + if (!("monitor-msg".equals(topic))) { + //说明当前消息不是状态回传 + Map map = JSONObject.parseObject(dataJson, Map.class); + Object d_tag = map.get("d_tag"); + if (d_tag != null) { + dataJson = d_tag.toString();//获取消息上的d_tag用于序列化成数据标签 + } + } else { + //说明为状态,目前前端没有,所以先自己创建一个状态的d_tag + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + String timeFormat = sdf.format(new Date(System.currentTimeMillis()));//格式2019-11-18 19:56:37 + dataJson = "{\"tag_version\":\"1.0\",\"data_subtype\":16387,\"data_type\":4,\"producer_id\":24832,\"timestamp\":\"yyyy-MM-dd HH:mm:ss\"}"; + Map map = JSONObject.parseObject(dataJson, Map.class); + map.put("timestamp", timeFormat); + dataJson = JSONObject.toJSONString(map); + } + //以下为临时,为了增加一个状态的临时d_tag,因为d_tag前端目前(20191222)传不了-----------↑-------------- + +// recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(tagTopicName, recordAvro, dataJson, schemaTag);//单条-"log-tag"或者"status-tag" + recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(tagTopicName, recordAvro, dataJson, schemaDTagAvro);//单条-"log-tag"或者"status-tag" + logger.info("封装日志标签或状态标签AvroRecord后数据为===>" + recordAvro.toString() + "<===封装日志标签或状态标签AvroRecord后数据为"); + fileWriter.write(recordAvro, encoder); + + if (encoder != null) { + encoder.flush(); + } + + if (outAvro != null) { + dTagByteArray = outAvro.toByteArray(); + + outAvro.flush(); + outAvro.close(); + outAvro = null; + } + logger.info("日志标签dTagByteArray长度为:" + dTagByteArray.length); + + //获取数据对象类型编码,写成字节数组 + byte[] dataObjectTypeByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataObjectTypeCodeByTopicName(topic), 2);//长度为2 + + //将数据标签长度,写成字节数组 + byte[] dataTagLengthByteArray = sumHex(dTagByteArray.length, 2);//长度为2 + + //数据总长度(16+数据标签长度+数据对象长度),写成字节数组 + byte[] dataSumLengthByteArray = sumHex(16 + dTagByteArray.length + dataResultArray.length, 4);//长度为4 + + //数据标签SchemaID,写成字节数组 + byte[] dataTagSchemaIDByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataTagSchemaIDByTopicName(topic), 4);//长度为4 + + //数据对象SchemaID.写成字节数组 + byte[] dataObjectSchemaIDByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataObjectSchemaIDByTopicName(topic), 4);//长度为4 + + //拼接这些字节数组并返回,顺序:数据对象类型+数据标签长度+数据总长度+数据标签SchemaID+数据对象SchemaID+数据标签数组+数据对象数组 + return byteMerger(dataObjectTypeByteArray, dataTagLengthByteArray, dataSumLengthByteArray, dataTagSchemaIDByteArray, dataObjectSchemaIDByteArray, dTagByteArray, dataResultArray); + } catch (Exception e) { + logger.error("HttpManager getTagRecordAndMergeAllArray is error===>" + e + "<==="); + e.printStackTrace(); + return dataResultArray;//报错时返回数据对象数组,即本身不变 + } + } + + /** + * 由于已经获取了文件标签,所以只需要将所有相关数据字节数组化后拼接返回 + */ + private byte[] alreadyGetFileTagRecordSoOnlyGetMergeAllArray(String topic, byte[] dataResultArray) { + try { + //获取数据对象类型编码,写成字节数组 + byte[] dataObjectTypeByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataObjectTypeCodeByTopicName(topic), 2);//长度为2 + + //将数据标签长度,写成字节数组 + byte[] dataTagLengthByteArray = sumHex(dataResultArray.length, 2);//长度为2,由于是NTC-COLLECT-FILE-LOG.传来的数据对象其实就是数据标签 + + //数据总长度(16+数据标签长度+数据对象长度),写成字节数组 + byte[] dataSumLengthByteArray = sumHex(16 + dataResultArray.length, 4);//长度为4 + + //数据标签SchemaID,写成字节数组 + byte[] dataTagSchemaIDByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataTagSchemaIDByTopicName(topic), 4);//长度为4 + + //数据对象SchemaID.写成字节数组 + byte[] dataObjectSchemaIDByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataObjectSchemaIDByTopicName(topic), 4);//长度为4 + + //拼接这些字节数组,顺序:数据对象类型+数据标签长度+数据总长度+数据标签SchemaID+数据对象SchemaID+数据标签数组+数据对象数组 + return byteMerger(dataObjectTypeByteArray, dataTagLengthByteArray, dataSumLengthByteArray, dataTagSchemaIDByteArray, dataObjectSchemaIDByteArray, dataResultArray); + } catch (Exception e) { + logger.error("HttpManager alreadyGetFileTagRecordSoOnlyGetMergeAllArray is error===>" + e + "<==="); + e.printStackTrace(); + return dataResultArray;//报错时返回数据对象数组,即本身不变 + } + } + + /** + * 描述:把一个整数转为指定位数byte数组 + * + * @param tu5 数字 + * @param length 位数 + * @return byte[] + */ + public static byte[] sumHex(int tu5, int length) { + byte[] bytes5 = new byte[length]; + while (length > 0) { + length--; +// bytes5[length] = (byte)(tu5 >> 8*(bytes5.length-length-1) & 0xFF);//大端 + bytes5[bytes5.length - length - 1] = (byte) (tu5 >> 8 * (bytes5.length - length - 1) & 0xFF);//小端 + } + return bytes5; + } + + /** + * 合并字节数组 + * + * @param byteList + * @return + */ + private static byte[] byteMerger(byte[]... byteList) { + int lengthByte = 0; + for (int i = 0; i < byteList.length; i++) { + lengthByte += byteList[i].length; + } + byte[] allByte = new byte[lengthByte]; + int countLength = 0; + for (int i = 0; i < byteList.length; i++) { + byte[] b = byteList[i]; + System.arraycopy(b, 0, allByte, countLength, b.length); + countLength += b.length; + } + return allByte; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/MD5Utils.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/MD5Utils.java new file mode 100644 index 0000000..6ccfc60 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/MD5Utils.java @@ -0,0 +1,102 @@ +package cn.ac.iie.cusflume.sink.avroUtils; + +import org.apache.log4j.Logger; + +import java.security.MessageDigest; + +/** + * 描述:转换MD5工具类 + * + * @author Administrator + * @create 2018-08-13 15:11 + */ +public class MD5Utils { + private static Logger logger = Logger.getLogger(MD5Utils.class); + + // public static String md5Encode(String msg) throws Exception { + public static String md5Encode(byte[] msgBytes) throws Exception { + try { +// byte[] msgBytes = msg.getBytes("utf-8"); + /* + * 声明使用Md5算法,获得MessaDigest对象 + */ + MessageDigest md5 = MessageDigest.getInstance("MD5"); + /* + * 使用指定的字节更新摘要 + */ + md5.update(msgBytes); + /* + * 完成哈希计算,获得密文 + */ + byte[] digest = md5.digest(); + /* + * 以上两行代码等同于 + * byte[] digest = md5.digest(msgBytes); + */ + return byteArr2hexString(digest); + } catch (Exception e) { + logger.error("Error in conversion MD5! This msgBytes is " + msgBytes); +// e.printStackTrace(); + return ""; + } + } + + public static String md5Encode(String msg) throws Exception { + try { + byte[] msgBytes = msg.getBytes("utf-8"); + /* + * 声明使用Md5算法,获得MessaDigest对象 + */ + MessageDigest md5 = MessageDigest.getInstance("MD5"); + /* + * 使用指定的字节更新摘要 + */ + md5.update(msgBytes); + /* + * 完成哈希计算,获得密文 + */ + byte[] digest = md5.digest(); + /* + * 以上两行代码等同于 + * byte[] digest = md5.digest(msgBytes); + */ + return byteArr2hexString(digest); + } catch (Exception e) { + logger.error("Error in conversion MD5! This msg is " + msg); +// e.printStackTrace(); + return ""; + } + } + + /** + * 将byte数组转化为16进制字符串形式 + * + * @param bys 字节数组 + * @return 字符串 + */ + public static String byteArr2hexString(byte[] bys) { + StringBuffer hexVal = new StringBuffer(); + int val = 0; + for (byte by : bys) { + //将byte转化为int 如果byte是一个负数就必须要和16进制的0xff做一次与运算 + val = ((int) by) & 0xff; + if (val < 16) { + hexVal.append("0"); + } + hexVal.append(Integer.toHexString(val)); + } + + return hexVal.toString(); + + } + + +// public static void main(String[] args) { +// try { +// String bbb = MD5Utils.md5Encode("aaa"); +// System.out.println(bbb); +// } catch (Exception e) { +// e.printStackTrace(); +// } +// } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/RealTimeStatus.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/RealTimeStatus.java new file mode 100644 index 0000000..150952b --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/RealTimeStatus.java @@ -0,0 +1,287 @@ +package cn.ac.iie.cusflume.sink.avroUtils.avroMonitorBean; + +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class RealTimeStatus extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"RealTimeStatus\",\"fields\":[{\"name\":\"id\",\"type\":\"string\"},{\"name\":\"status\",\"type\":\"string\"},{\"name\":\"alarms\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"RtAlarm\",\"fields\":[{\"name\":\"level\",\"type\":\"string\"},{\"name\":\"time\",\"type\":\"string\"},{\"name\":\"type\",\"type\":\"string\"},{\"name\":\"detail\",\"type\":\"string\"}]}}},{\"name\":\"stats\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"RtStatValue\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"content\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"RtNameValue\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"string\"}]}}}]}}}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public CharSequence id; + @Deprecated public CharSequence status; + @Deprecated public java.util.List alarms; + @Deprecated public java.util.List stats; + + /** + * Default constructor. + */ + public RealTimeStatus() {} + + /** + * All-args constructor. + */ + public RealTimeStatus(CharSequence id, CharSequence status, java.util.List alarms, java.util.List stats) { + this.id = id; + this.status = status; + this.alarms = alarms; + this.stats = stats; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public Object get(int field$) { + switch (field$) { + case 0: return id; + case 1: return status; + case 2: return alarms; + case 3: return stats; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, Object value$) { + switch (field$) { + case 0: id = (CharSequence)value$; break; + case 1: status = (CharSequence)value$; break; + case 2: alarms = (java.util.List)value$; break; + case 3: stats = (java.util.List)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'id' field. + */ + public CharSequence getId() { + return id; + } + + /** + * Sets the value of the 'id' field. + * @param value the value to set. + */ + public void setId(CharSequence value) { + this.id = value; + } + + /** + * Gets the value of the 'status' field. + */ + public CharSequence getStatus() { + return status; + } + + /** + * Sets the value of the 'status' field. + * @param value the value to set. + */ + public void setStatus(CharSequence value) { + this.status = value; + } + + /** + * Gets the value of the 'alarms' field. + */ + public java.util.List getAlarms() { + return alarms; + } + + /** + * Sets the value of the 'alarms' field. + * @param value the value to set. + */ + public void setAlarms(java.util.List value) { + this.alarms = value; + } + + /** + * Gets the value of the 'stats' field. + */ + public java.util.List getStats() { + return stats; + } + + /** + * Sets the value of the 'stats' field. + * @param value the value to set. + */ + public void setStats(java.util.List value) { + this.stats = value; + } + + /** Creates a new RealTimeStatus RecordBuilder */ + public static Builder newBuilder() { + return new Builder(); + } + + /** Creates a new RealTimeStatus RecordBuilder by copying an existing Builder */ + public static Builder newBuilder(Builder other) { + return new Builder(other); + } + + /** Creates a new RealTimeStatus RecordBuilder by copying an existing RealTimeStatus instance */ + public static Builder newBuilder(RealTimeStatus other) { + return new Builder(other); + } + + /** + * RecordBuilder for RealTimeStatus instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private CharSequence id; + private CharSequence status; + private java.util.List alarms; + private java.util.List stats; + + /** Creates a new Builder */ + private Builder() { + super(RealTimeStatus.SCHEMA$); + } + + /** Creates a Builder by copying an existing Builder */ + private Builder(Builder other) { + super(other); + } + + /** Creates a Builder by copying an existing RealTimeStatus instance */ + private Builder(RealTimeStatus other) { + super(RealTimeStatus.SCHEMA$); + if (isValidValue(fields()[0], other.id)) { + this.id = data().deepCopy(fields()[0].schema(), other.id); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.status)) { + this.status = data().deepCopy(fields()[1].schema(), other.status); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.alarms)) { + this.alarms = data().deepCopy(fields()[2].schema(), other.alarms); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.stats)) { + this.stats = data().deepCopy(fields()[3].schema(), other.stats); + fieldSetFlags()[3] = true; + } + } + + /** Gets the value of the 'id' field */ + public CharSequence getId() { + return id; + } + + /** Sets the value of the 'id' field */ + public Builder setId(CharSequence value) { + validate(fields()[0], value); + this.id = value; + fieldSetFlags()[0] = true; + return this; + } + + /** Checks whether the 'id' field has been set */ + public boolean hasId() { + return fieldSetFlags()[0]; + } + + /** Clears the value of the 'id' field */ + public Builder clearId() { + id = null; + fieldSetFlags()[0] = false; + return this; + } + + /** Gets the value of the 'status' field */ + public CharSequence getStatus() { + return status; + } + + /** Sets the value of the 'status' field */ + public Builder setStatus(CharSequence value) { + validate(fields()[1], value); + this.status = value; + fieldSetFlags()[1] = true; + return this; + } + + /** Checks whether the 'status' field has been set */ + public boolean hasStatus() { + return fieldSetFlags()[1]; + } + + /** Clears the value of the 'status' field */ + public Builder clearStatus() { + status = null; + fieldSetFlags()[1] = false; + return this; + } + + /** Gets the value of the 'alarms' field */ + public java.util.List getAlarms() { + return alarms; + } + + /** Sets the value of the 'alarms' field */ + public Builder setAlarms(java.util.List value) { + validate(fields()[2], value); + this.alarms = value; + fieldSetFlags()[2] = true; + return this; + } + + /** Checks whether the 'alarms' field has been set */ + public boolean hasAlarms() { + return fieldSetFlags()[2]; + } + + /** Clears the value of the 'alarms' field */ + public Builder clearAlarms() { + alarms = null; + fieldSetFlags()[2] = false; + return this; + } + + /** Gets the value of the 'stats' field */ + public java.util.List getStats() { + return stats; + } + + /** Sets the value of the 'stats' field */ + public Builder setStats(java.util.List value) { + validate(fields()[3], value); + this.stats = value; + fieldSetFlags()[3] = true; + return this; + } + + /** Checks whether the 'stats' field has been set */ + public boolean hasStats() { + return fieldSetFlags()[3]; + } + + /** Clears the value of the 'stats' field */ + public Builder clearStats() { + stats = null; + fieldSetFlags()[3] = false; + return this; + } + + @Override + public RealTimeStatus build() { + try { + RealTimeStatus record = new RealTimeStatus(); + record.id = fieldSetFlags()[0] ? this.id : (CharSequence) defaultValue(fields()[0]); + record.status = fieldSetFlags()[1] ? this.status : (CharSequence) defaultValue(fields()[1]); + record.alarms = fieldSetFlags()[2] ? this.alarms : (java.util.List) defaultValue(fields()[2]); + record.stats = fieldSetFlags()[3] ? this.stats : (java.util.List) defaultValue(fields()[3]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/RtAlarm.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/RtAlarm.java new file mode 100644 index 0000000..4e449db --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/RtAlarm.java @@ -0,0 +1,287 @@ +package cn.ac.iie.cusflume.sink.avroUtils.avroMonitorBean; + +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class RtAlarm extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"RtAlarm\",\"fields\":[{\"name\":\"level\",\"type\":\"string\"},{\"name\":\"time\",\"type\":\"string\"},{\"name\":\"type\",\"type\":\"string\"},{\"name\":\"detail\",\"type\":\"string\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public CharSequence level; + @Deprecated public CharSequence time; + @Deprecated public CharSequence type; + @Deprecated public CharSequence detail; + + /** + * Default constructor. + */ + public RtAlarm() {} + + /** + * All-args constructor. + */ + public RtAlarm(CharSequence level, CharSequence time, CharSequence type, CharSequence detail) { + this.level = level; + this.time = time; + this.type = type; + this.detail = detail; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public Object get(int field$) { + switch (field$) { + case 0: return level; + case 1: return time; + case 2: return type; + case 3: return detail; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, Object value$) { + switch (field$) { + case 0: level = (CharSequence)value$; break; + case 1: time = (CharSequence)value$; break; + case 2: type = (CharSequence)value$; break; + case 3: detail = (CharSequence)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'level' field. + */ + public CharSequence getLevel() { + return level; + } + + /** + * Sets the value of the 'level' field. + * @param value the value to set. + */ + public void setLevel(CharSequence value) { + this.level = value; + } + + /** + * Gets the value of the 'time' field. + */ + public CharSequence getTime() { + return time; + } + + /** + * Sets the value of the 'time' field. + * @param value the value to set. + */ + public void setTime(CharSequence value) { + this.time = value; + } + + /** + * Gets the value of the 'type' field. + */ + public CharSequence getType() { + return type; + } + + /** + * Sets the value of the 'type' field. + * @param value the value to set. + */ + public void setType(CharSequence value) { + this.type = value; + } + + /** + * Gets the value of the 'detail' field. + */ + public CharSequence getDetail() { + return detail; + } + + /** + * Sets the value of the 'detail' field. + * @param value the value to set. + */ + public void setDetail(CharSequence value) { + this.detail = value; + } + + /** Creates a new RtAlarm RecordBuilder */ + public static Builder newBuilder() { + return new Builder(); + } + + /** Creates a new RtAlarm RecordBuilder by copying an existing Builder */ + public static Builder newBuilder(Builder other) { + return new Builder(other); + } + + /** Creates a new RtAlarm RecordBuilder by copying an existing RtAlarm instance */ + public static Builder newBuilder(RtAlarm other) { + return new Builder(other); + } + + /** + * RecordBuilder for RtAlarm instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private CharSequence level; + private CharSequence time; + private CharSequence type; + private CharSequence detail; + + /** Creates a new Builder */ + private Builder() { + super(RtAlarm.SCHEMA$); + } + + /** Creates a Builder by copying an existing Builder */ + private Builder(Builder other) { + super(other); + } + + /** Creates a Builder by copying an existing RtAlarm instance */ + private Builder(RtAlarm other) { + super(RtAlarm.SCHEMA$); + if (isValidValue(fields()[0], other.level)) { + this.level = data().deepCopy(fields()[0].schema(), other.level); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.time)) { + this.time = data().deepCopy(fields()[1].schema(), other.time); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.type)) { + this.type = data().deepCopy(fields()[2].schema(), other.type); + fieldSetFlags()[2] = true; + } + if (isValidValue(fields()[3], other.detail)) { + this.detail = data().deepCopy(fields()[3].schema(), other.detail); + fieldSetFlags()[3] = true; + } + } + + /** Gets the value of the 'level' field */ + public CharSequence getLevel() { + return level; + } + + /** Sets the value of the 'level' field */ + public Builder setLevel(CharSequence value) { + validate(fields()[0], value); + this.level = value; + fieldSetFlags()[0] = true; + return this; + } + + /** Checks whether the 'level' field has been set */ + public boolean hasLevel() { + return fieldSetFlags()[0]; + } + + /** Clears the value of the 'level' field */ + public Builder clearLevel() { + level = null; + fieldSetFlags()[0] = false; + return this; + } + + /** Gets the value of the 'time' field */ + public CharSequence getTime() { + return time; + } + + /** Sets the value of the 'time' field */ + public Builder setTime(CharSequence value) { + validate(fields()[1], value); + this.time = value; + fieldSetFlags()[1] = true; + return this; + } + + /** Checks whether the 'time' field has been set */ + public boolean hasTime() { + return fieldSetFlags()[1]; + } + + /** Clears the value of the 'time' field */ + public Builder clearTime() { + time = null; + fieldSetFlags()[1] = false; + return this; + } + + /** Gets the value of the 'type' field */ + public CharSequence getType() { + return type; + } + + /** Sets the value of the 'type' field */ + public Builder setType(CharSequence value) { + validate(fields()[2], value); + this.type = value; + fieldSetFlags()[2] = true; + return this; + } + + /** Checks whether the 'type' field has been set */ + public boolean hasType() { + return fieldSetFlags()[2]; + } + + /** Clears the value of the 'type' field */ + public Builder clearType() { + type = null; + fieldSetFlags()[2] = false; + return this; + } + + /** Gets the value of the 'detail' field */ + public CharSequence getDetail() { + return detail; + } + + /** Sets the value of the 'detail' field */ + public Builder setDetail(CharSequence value) { + validate(fields()[3], value); + this.detail = value; + fieldSetFlags()[3] = true; + return this; + } + + /** Checks whether the 'detail' field has been set */ + public boolean hasDetail() { + return fieldSetFlags()[3]; + } + + /** Clears the value of the 'detail' field */ + public Builder clearDetail() { + detail = null; + fieldSetFlags()[3] = false; + return this; + } + + @Override + public RtAlarm build() { + try { + RtAlarm record = new RtAlarm(); + record.level = fieldSetFlags()[0] ? this.level : (CharSequence) defaultValue(fields()[0]); + record.time = fieldSetFlags()[1] ? this.time : (CharSequence) defaultValue(fields()[1]); + record.type = fieldSetFlags()[2] ? this.type : (CharSequence) defaultValue(fields()[2]); + record.detail = fieldSetFlags()[3] ? this.detail : (CharSequence) defaultValue(fields()[3]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/RtNameValue.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/RtNameValue.java new file mode 100644 index 0000000..53cc6bf --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/RtNameValue.java @@ -0,0 +1,187 @@ +package cn.ac.iie.cusflume.sink.avroUtils.avroMonitorBean; + +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class RtNameValue extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"RtNameValue\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"string\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public CharSequence name; + @Deprecated public CharSequence value; + + /** + * Default constructor. + */ + public RtNameValue() {} + + /** + * All-args constructor. + */ + public RtNameValue(CharSequence name, CharSequence value) { + this.name = name; + this.value = value; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public Object get(int field$) { + switch (field$) { + case 0: return name; + case 1: return value; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, Object value$) { + switch (field$) { + case 0: name = (CharSequence)value$; break; + case 1: value = (CharSequence)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'name' field. + */ + public CharSequence getName() { + return name; + } + + /** + * Sets the value of the 'name' field. + * @param value the value to set. + */ + public void setName(CharSequence value) { + this.name = value; + } + + /** + * Gets the value of the 'value' field. + */ + public CharSequence getValue() { + return value; + } + + /** + * Sets the value of the 'value' field. + * @param value the value to set. + */ + public void setValue(CharSequence value) { + this.value = value; + } + + /** Creates a new RtNameValue RecordBuilder */ + public static Builder newBuilder() { + return new Builder(); + } + + /** Creates a new RtNameValue RecordBuilder by copying an existing Builder */ + public static Builder newBuilder(Builder other) { + return new Builder(other); + } + + /** Creates a new RtNameValue RecordBuilder by copying an existing RtNameValue instance */ + public static Builder newBuilder(RtNameValue other) { + return new Builder(other); + } + + /** + * RecordBuilder for RtNameValue instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private CharSequence name; + private CharSequence value; + + /** Creates a new Builder */ + private Builder() { + super(RtNameValue.SCHEMA$); + } + + /** Creates a Builder by copying an existing Builder */ + private Builder(Builder other) { + super(other); + } + + /** Creates a Builder by copying an existing RtNameValue instance */ + private Builder(RtNameValue other) { + super(RtNameValue.SCHEMA$); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.value)) { + this.value = data().deepCopy(fields()[1].schema(), other.value); + fieldSetFlags()[1] = true; + } + } + + /** Gets the value of the 'name' field */ + public CharSequence getName() { + return name; + } + + /** Sets the value of the 'name' field */ + public Builder setName(CharSequence value) { + validate(fields()[0], value); + this.name = value; + fieldSetFlags()[0] = true; + return this; + } + + /** Checks whether the 'name' field has been set */ + public boolean hasName() { + return fieldSetFlags()[0]; + } + + /** Clears the value of the 'name' field */ + public Builder clearName() { + name = null; + fieldSetFlags()[0] = false; + return this; + } + + /** Gets the value of the 'value' field */ + public CharSequence getValue() { + return value; + } + + /** Sets the value of the 'value' field */ + public Builder setValue(CharSequence value) { + validate(fields()[1], value); + this.value = value; + fieldSetFlags()[1] = true; + return this; + } + + /** Checks whether the 'value' field has been set */ + public boolean hasValue() { + return fieldSetFlags()[1]; + } + + /** Clears the value of the 'value' field */ + public Builder clearValue() { + value = null; + fieldSetFlags()[1] = false; + return this; + } + + @Override + public RtNameValue build() { + try { + RtNameValue record = new RtNameValue(); + record.name = fieldSetFlags()[0] ? this.name : (CharSequence) defaultValue(fields()[0]); + record.value = fieldSetFlags()[1] ? this.value : (CharSequence) defaultValue(fields()[1]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/RtStatValue.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/RtStatValue.java new file mode 100644 index 0000000..7d41fe5 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/RtStatValue.java @@ -0,0 +1,187 @@ +package cn.ac.iie.cusflume.sink.avroUtils.avroMonitorBean; + +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class RtStatValue extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"RtStatValue\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"content\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"RtNameValue\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"string\"}]}}}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public CharSequence name; + @Deprecated public java.util.List content; + + /** + * Default constructor. + */ + public RtStatValue() {} + + /** + * All-args constructor. + */ + public RtStatValue(CharSequence name, java.util.List content) { + this.name = name; + this.content = content; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public Object get(int field$) { + switch (field$) { + case 0: return name; + case 1: return content; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, Object value$) { + switch (field$) { + case 0: name = (CharSequence)value$; break; + case 1: content = (java.util.List)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'name' field. + */ + public CharSequence getName() { + return name; + } + + /** + * Sets the value of the 'name' field. + * @param value the value to set. + */ + public void setName(CharSequence value) { + this.name = value; + } + + /** + * Gets the value of the 'content' field. + */ + public java.util.List getContent() { + return content; + } + + /** + * Sets the value of the 'content' field. + * @param value the value to set. + */ + public void setContent(java.util.List value) { + this.content = value; + } + + /** Creates a new RtStatValue RecordBuilder */ + public static Builder newBuilder() { + return new Builder(); + } + + /** Creates a new RtStatValue RecordBuilder by copying an existing Builder */ + public static Builder newBuilder(Builder other) { + return new Builder(other); + } + + /** Creates a new RtStatValue RecordBuilder by copying an existing RtStatValue instance */ + public static Builder newBuilder(RtStatValue other) { + return new Builder(other); + } + + /** + * RecordBuilder for RtStatValue instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private CharSequence name; + private java.util.List content; + + /** Creates a new Builder */ + private Builder() { + super(RtStatValue.SCHEMA$); + } + + /** Creates a Builder by copying an existing Builder */ + private Builder(Builder other) { + super(other); + } + + /** Creates a Builder by copying an existing RtStatValue instance */ + private Builder(RtStatValue other) { + super(RtStatValue.SCHEMA$); + if (isValidValue(fields()[0], other.name)) { + this.name = data().deepCopy(fields()[0].schema(), other.name); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.content)) { + this.content = data().deepCopy(fields()[1].schema(), other.content); + fieldSetFlags()[1] = true; + } + } + + /** Gets the value of the 'name' field */ + public CharSequence getName() { + return name; + } + + /** Sets the value of the 'name' field */ + public Builder setName(CharSequence value) { + validate(fields()[0], value); + this.name = value; + fieldSetFlags()[0] = true; + return this; + } + + /** Checks whether the 'name' field has been set */ + public boolean hasName() { + return fieldSetFlags()[0]; + } + + /** Clears the value of the 'name' field */ + public Builder clearName() { + name = null; + fieldSetFlags()[0] = false; + return this; + } + + /** Gets the value of the 'content' field */ + public java.util.List getContent() { + return content; + } + + /** Sets the value of the 'content' field */ + public Builder setContent(java.util.List value) { + validate(fields()[1], value); + this.content = value; + fieldSetFlags()[1] = true; + return this; + } + + /** Checks whether the 'content' field has been set */ + public boolean hasContent() { + return fieldSetFlags()[1]; + } + + /** Clears the value of the 'content' field */ + public Builder clearContent() { + content = null; + fieldSetFlags()[1] = false; + return this; + } + + @Override + public RtStatValue build() { + try { + RtStatValue record = new RtStatValue(); + record.name = fieldSetFlags()[0] ? this.name : (CharSequence) defaultValue(fields()[0]); + record.content = fieldSetFlags()[1] ? this.content : (java.util.List) defaultValue(fields()[1]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/SystemStatus.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/SystemStatus.java new file mode 100644 index 0000000..c4266e0 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroMonitorBean/SystemStatus.java @@ -0,0 +1,237 @@ +package cn.ac.iie.cusflume.sink.avroUtils.avroMonitorBean; + +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +@SuppressWarnings("all") +@org.apache.avro.specific.AvroGenerated +public class SystemStatus extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"SystemStatus\",\"fields\":[{\"name\":\"time\",\"type\":\"string\"},{\"name\":\"system\",\"type\":{\"type\":\"record\",\"name\":\"RealTimeStatus\",\"fields\":[{\"name\":\"id\",\"type\":\"string\"},{\"name\":\"status\",\"type\":\"string\"},{\"name\":\"alarms\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"RtAlarm\",\"fields\":[{\"name\":\"level\",\"type\":\"string\"},{\"name\":\"time\",\"type\":\"string\"},{\"name\":\"type\",\"type\":\"string\"},{\"name\":\"detail\",\"type\":\"string\"}]}}},{\"name\":\"stats\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"RtStatValue\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"content\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"RtNameValue\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"value\",\"type\":\"string\"}]}}}]}}}]}},{\"name\":\"components\",\"type\":{\"type\":\"array\",\"items\":\"RealTimeStatus\"}}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + @Deprecated public CharSequence time; + @Deprecated public RealTimeStatus system; + @Deprecated public java.util.List components; + + /** + * Default constructor. + */ + public SystemStatus() {} + + /** + * All-args constructor. + */ + public SystemStatus(CharSequence time, RealTimeStatus system, java.util.List components) { + this.time = time; + this.system = system; + this.components = components; + } + + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + // Used by DatumWriter. Applications should not call. + public Object get(int field$) { + switch (field$) { + case 0: return time; + case 1: return system; + case 2: return components; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + // Used by DatumReader. Applications should not call. + @SuppressWarnings(value="unchecked") + public void put(int field$, Object value$) { + switch (field$) { + case 0: time = (CharSequence)value$; break; + case 1: system = (RealTimeStatus)value$; break; + case 2: components = (java.util.List)value$; break; + default: throw new org.apache.avro.AvroRuntimeException("Bad index"); + } + } + + /** + * Gets the value of the 'time' field. + */ + public CharSequence getTime() { + return time; + } + + /** + * Sets the value of the 'time' field. + * @param value the value to set. + */ + public void setTime(CharSequence value) { + this.time = value; + } + + /** + * Gets the value of the 'system' field. + */ + public RealTimeStatus getSystem() { + return system; + } + + /** + * Sets the value of the 'system' field. + * @param value the value to set. + */ + public void setSystem(RealTimeStatus value) { + this.system = value; + } + + /** + * Gets the value of the 'components' field. + */ + public java.util.List getComponents() { + return components; + } + + /** + * Sets the value of the 'components' field. + * @param value the value to set. + */ + public void setComponents(java.util.List value) { + this.components = value; + } + + /** Creates a new SystemStatus RecordBuilder */ + public static Builder newBuilder() { + return new Builder(); + } + + /** Creates a new SystemStatus RecordBuilder by copying an existing Builder */ + public static Builder newBuilder(Builder other) { + return new Builder(other); + } + + /** Creates a new SystemStatus RecordBuilder by copying an existing SystemStatus instance */ + public static Builder newBuilder(SystemStatus other) { + return new Builder(other); + } + + /** + * RecordBuilder for SystemStatus instances. + */ + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private CharSequence time; + private RealTimeStatus system; + private java.util.List components; + + /** Creates a new Builder */ + private Builder() { + super(SystemStatus.SCHEMA$); + } + + /** Creates a Builder by copying an existing Builder */ + private Builder(Builder other) { + super(other); + } + + /** Creates a Builder by copying an existing SystemStatus instance */ + private Builder(SystemStatus other) { + super(SystemStatus.SCHEMA$); + if (isValidValue(fields()[0], other.time)) { + this.time = data().deepCopy(fields()[0].schema(), other.time); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.system)) { + this.system = data().deepCopy(fields()[1].schema(), other.system); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.components)) { + this.components = data().deepCopy(fields()[2].schema(), other.components); + fieldSetFlags()[2] = true; + } + } + + /** Gets the value of the 'time' field */ + public CharSequence getTime() { + return time; + } + + /** Sets the value of the 'time' field */ + public Builder setTime(CharSequence value) { + validate(fields()[0], value); + this.time = value; + fieldSetFlags()[0] = true; + return this; + } + + /** Checks whether the 'time' field has been set */ + public boolean hasTime() { + return fieldSetFlags()[0]; + } + + /** Clears the value of the 'time' field */ + public Builder clearTime() { + time = null; + fieldSetFlags()[0] = false; + return this; + } + + /** Gets the value of the 'system' field */ + public RealTimeStatus getSystem() { + return system; + } + + /** Sets the value of the 'system' field */ + public Builder setSystem(RealTimeStatus value) { + validate(fields()[1], value); + this.system = value; + fieldSetFlags()[1] = true; + return this; + } + + /** Checks whether the 'system' field has been set */ + public boolean hasSystem() { + return fieldSetFlags()[1]; + } + + /** Clears the value of the 'system' field */ + public Builder clearSystem() { + system = null; + fieldSetFlags()[1] = false; + return this; + } + + /** Gets the value of the 'components' field */ + public java.util.List getComponents() { + return components; + } + + /** Sets the value of the 'components' field */ + public Builder setComponents(java.util.List value) { + validate(fields()[2], value); + this.components = value; + fieldSetFlags()[2] = true; + return this; + } + + /** Checks whether the 'components' field has been set */ + public boolean hasComponents() { + return fieldSetFlags()[2]; + } + + /** Clears the value of the 'components' field */ + public Builder clearComponents() { + components = null; + fieldSetFlags()[2] = false; + return this; + } + + @Override + public SystemStatus build() { + try { + SystemStatus record = new SystemStatus(); + record.time = fieldSetFlags()[0] ? this.time : (CharSequence) defaultValue(fields()[0]); + record.system = fieldSetFlags()[1] ? this.system : (RealTimeStatus) defaultValue(fields()[1]); + record.components = fieldSetFlags()[2] ? this.components : (java.util.List) defaultValue(fields()[2]); + return record; + } catch (Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroRecord/GetAvroRecordByTopicUtils.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroRecord/GetAvroRecordByTopicUtils.java new file mode 100644 index 0000000..bb07c72 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroRecord/GetAvroRecordByTopicUtils.java @@ -0,0 +1,198 @@ +package cn.ac.iie.cusflume.sink.avroUtils.avroRecord; + +import cn.ac.iie.cusflume.sink.CommonUtils.DecodeUtils; +import cn.ac.iie.cusflume.sink.bean.dataBean.NTC_COLLECT_DNS_LOG; +import cn.ac.iie.cusflume.sink.bean.dataBean.NTC_COLLECT_HTTP_LOG; +import cn.ac.iie.cusflume.sink.bean.dataBean.NTC_COLLECT_SSL_LOG; +import cn.ac.iie.cusflume.sink.bean.fileBean.FILE_TAG_BEAN; +import cn.ac.iie.cusflume.sink.bean.fileBean.NTC_COLLECT_MAIL_LOG; +import cn.ac.iie.cusflume.sink.bean.tagBean.LOG_D_TAG_BEAN; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.zdjizhi.utils.StringUtil; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericRecord; +import org.apache.commons.lang.StringUtils; +import org.apache.log4j.Logger; +import tech.allegro.schema.json2avro.converter.JsonAvroConverter; + +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + + +public class GetAvroRecordByTopicUtils { + private static Logger logger = Logger.getLogger(GetAvroRecordByTopicUtils.class); + + private static JsonAvroConverter converter = new JsonAvroConverter(); + + public static GenericRecord getAvroRecordByTopicName(String topic, GenericRecord recordAvro, String dataJson, Schema schemaStr) { + switch (topic) { + case "NTC-CONN-RECORD-LOG": + return getConnRecordAvroRecord(recordAvro, dataJson, schemaStr); +// case "NTC-COLLECT-HTTP-LOG": +// return getCollHttpAvroRecord(recordAvro, dataJson); + case "NTC-COLLECT-SSL-LOG": + return getCollSslAvroRecord(recordAvro, dataJson, schemaStr); + case "NTC-COLLECT-DNS-LOG": + return getCollDnsAvroRecord(recordAvro, dataJson, schemaStr); + case "NTC-COLLECT-HTTP-DOC-LOG": + return getCollectHttpDocPostAvroRecord(recordAvro, dataJson, schemaStr); + case "NTC-COLLECT-HTTP-AV-LOG": + return getCollectHttpAvPostAvroRecord(recordAvro, dataJson, schemaStr); + case "NTC-COLLECT-FTP-DOC-LOG": + return getCollFtpDocAvroRecord(recordAvro, dataJson, schemaStr); + case "NTC-COLLECT-MAIL-LOG": + return getCollMailAvroRecord(recordAvro, dataJson, schemaStr); + case "NTC-COLLECT-FILE-LOG": + return getCollFileAvroRecord(recordAvro, dataJson, schemaStr); + case "log-tag": + return getLogTagAvroRecord(recordAvro, dataJson, schemaStr); + case "monitor-msg"://一部状态数据对象avro序列化 + return getMonitorMsgAvroRecord(recordAvro, dataJson, schemaStr); + case "status-tag"://数据字典-状态数据标签序列化 + return getStatusTagAvroRecord(recordAvro, dataJson, schemaStr); + case "INFLUX-SAPP-BPS-STAT-LOG"://20200110广东SAPP状态上传-广东状态数据对象avro序列化 + return getInfluxStatusAvroRecord(recordAvro, dataJson, schemaStr); + case "NTC-COLLECT-TELNET-LOG": + return getCollTelnetAvroRecord(recordAvro, dataJson, schemaStr); + default: + logger.error("There is no corresponding topic! topic name is :" + topic); + break; + } + return null; + } + + private static GenericRecord getMonitorMsgAvroRecord(GenericRecord recordAvro, String dataJson, Schema schemaStr) { + recordAvro = converter.convertToGenericDataRecord(dataJson.getBytes(), schemaStr); + + return recordAvro; + } + + private static GenericRecord getCollFtpDocAvroRecord(GenericRecord recordAvro, String dataJson, Schema schemaStr) { + recordAvro = converter.convertToGenericDataRecord(dataJson.getBytes(), schemaStr); + return recordAvro; + } + + private static GenericRecord getCollTelnetAvroRecord(GenericRecord recordAvro, String dataJson, Schema schemaStr) { + recordAvro = converter.convertToGenericDataRecord(dataJson.getBytes(), schemaStr); + return recordAvro; + } + + private static GenericRecord getCollectHttpDocPostAvroRecord(GenericRecord recordAvro, String dataJson, Schema schemaStr) { + recordAvro = converter.convertToGenericDataRecord(dataJson.getBytes(), schemaStr); + + return recordAvro; + } + + private static GenericRecord getCollectHttpAvPostAvroRecord(GenericRecord recordAvro, String dataJson, Schema schemaStr) { + recordAvro = converter.convertToGenericDataRecord(dataJson.getBytes(), schemaStr); + return recordAvro; + } + + private static GenericRecord getCollDnsAvroRecord(GenericRecord recordAvro, String dataJson, Schema schemaStr) { + NTC_COLLECT_DNS_LOG ntcCollectDnsLog = JSONObject.parseObject(dataJson, NTC_COLLECT_DNS_LOG.class);//用于json字段压成字符串 + recordAvro = converter.convertToGenericDataRecord(JSONObject.toJSONString(ntcCollectDnsLog).getBytes(), schemaStr); + return recordAvro; + } + + private static GenericRecord getCollSslAvroRecord(GenericRecord recordAvro, String dataJson, Schema schemaStr) { + NTC_COLLECT_SSL_LOG ntcCollectSslLog = JSONObject.parseObject(dataJson, NTC_COLLECT_SSL_LOG.class);//用于json字段压成字符串 + recordAvro = converter.convertToGenericDataRecord(JSONObject.toJSONString(ntcCollectSslLog).getBytes(), schemaStr); + return recordAvro; + } + + private static GenericRecord getCollHttpAvroRecord(GenericRecord recordAvro, String dataJson) { + NTC_COLLECT_HTTP_LOG ntcCollectHttpLog = JSONObject.parseObject(dataJson, NTC_COLLECT_HTTP_LOG.class); + + String k = ntcCollectHttpLog.getK(); + //String v = ntcCollectHttpLog.getV(); + //解析k,获取四元组,http_seq,proxy_flag,isn + String[] splitK = k.split("#"); + //String quad = splitK[0];//四元组集合,例如192.168.11.156.53608>112.80.255.122.80,>左边为源ip和源端口,>右边为目的ip和目的端口 + String http_seq = splitK[1]; + String proxy_flag = splitK[2]; + ntcCollectHttpLog.setHttp_seq(Integer.parseInt(http_seq)); + ntcCollectHttpLog.setProxy_flag(Integer.parseInt(proxy_flag)); + + return recordAvro; + } + + private static GenericRecord getCollMailAvroRecord(GenericRecord recordAvro, String dataJson, Schema schemaStr) { + NTC_COLLECT_MAIL_LOG ntcCollectMailLog = JSONObject.parseObject(dataJson, NTC_COLLECT_MAIL_LOG.class); + + if (StringUtil.isNotBlank(ntcCollectMailLog.getSubject())) { + String subjectCharset = JSONObject.parseObject(dataJson).getString("subject_charset"); + ntcCollectMailLog.setSubject(DecodeUtils.base64Str(ntcCollectMailLog.getSubject(), subjectCharset)); + } + + recordAvro = converter.convertToGenericDataRecord(JSONObject.toJSONString(ntcCollectMailLog).getBytes(), schemaStr); + + return recordAvro; + } + + private static GenericRecord getConnRecordAvroRecord(GenericRecord recordAvro, String dataJson, Schema schemaStr) { + recordAvro = converter.convertToGenericDataRecord(dataJson.getBytes(), schemaStr); + + return recordAvro; + } + + + private static GenericRecord getCollFileAvroRecord(GenericRecord recordAvro, String dataJson, Schema schemaStr) { + FILE_TAG_BEAN fileTagBean = JSONObject.parseObject(dataJson, FILE_TAG_BEAN.class); + + if (StringUtil.isNotBlank(fileTagBean.getFile_name())) { + String fileNameCharset = JSONObject.parseObject(dataJson).getString("file_name_charset"); + fileTagBean.setFile_name(DecodeUtils.base64Str(fileTagBean.getFile_name(), fileNameCharset)); + } + + recordAvro = converter.convertToGenericDataRecord(JSONObject.toJSONString(fileTagBean).getBytes(), schemaStr); + + return recordAvro; + } + + /** + * 消息内日志标签序列化 + * + * @param recordAvro + * @param dataJson + * @param schemaStr + * @return + */ + private static GenericRecord getLogTagAvroRecord(GenericRecord recordAvro, String dataJson, Schema schemaStr) { + //20200228新增-用于解决前端long类型精度丢失,前端以字符串写入,这一步将字符串转为数字 + LOG_D_TAG_BEAN log_d_tag_bean = JSONObject.parseObject(dataJson, LOG_D_TAG_BEAN.class); + recordAvro = converter.convertToGenericDataRecord(JSONObject.toJSONString(log_d_tag_bean).getBytes(), schemaStr);//20200228新修改,解决前端精度丢失问题 + + return recordAvro; + } + + /** + * 消息内状态标签序列化 + * + * @param recordAvro + * @param dataJson + * @param schemaStr + * @return + */ + private static GenericRecord getInfluxStatusAvroRecord(GenericRecord recordAvro, String dataJson, Schema schemaStr) { + recordAvro = converter.convertToGenericDataRecord(dataJson.getBytes(), schemaStr); + + return recordAvro; + } + + /** + * 消息内状态标签序列化 + * + * @param recordAvro + * @param dataJson + * @param schemaStr + * @return + */ + private static GenericRecord getStatusTagAvroRecord(GenericRecord recordAvro, String dataJson, Schema schemaStr) { + recordAvro = converter.convertToGenericDataRecord(dataJson.getBytes(), schemaStr); + + return recordAvro; + } + +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroSchema/GetAvroSchemaByTopicUtils.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroSchema/GetAvroSchemaByTopicUtils.java new file mode 100644 index 0000000..4aa2ef1 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/avroUtils/avroSchema/GetAvroSchemaByTopicUtils.java @@ -0,0 +1,48 @@ +package cn.ac.iie.cusflume.sink.avroUtils.avroSchema; + +import cn.ac.iie.cusflume.sink.daoUtils.RealtimeCountConfig; +import org.apache.log4j.Logger; + + +public class GetAvroSchemaByTopicUtils { + private static Logger logger = Logger.getLogger(GetAvroSchemaByTopicUtils.class); + + public static String getAvroSchemaByTopicName(String topic) { + switch (topic) { + case "NTC-CONN-RECORD-LOG": + return RealtimeCountConfig.SCHEMA_NTC_CONN_RECORD_LOG; + case "NTC-COLLECT-SSL-LOG"://server_cert直接作为json放在String类型下 + return RealtimeCountConfig.SCHEMA_NTC_COLLECT_SSL_LOG; + case "NTC-COLLECT-DNS-LOG": + return RealtimeCountConfig.SCHEMA_NTC_COLLECT_DNS_LOG; + case "NTC-COLLECT-MAIL-LOG": + return RealtimeCountConfig.SCHEMA_NTC_COLLECT_MAIL_LOG; + case "NTC-COLLECT-HTTP-DOC-LOG": + return RealtimeCountConfig.SCHEMA_NTC_COLLECT_HTTP_DOC_LOG; + case "NTC-COLLECT-HTTP-AV-LOG": + /** + * 20200810新增,数据Schema等同于NTC-COLLECT-HTTP-DOC-LOG + */ + return RealtimeCountConfig.SCHEMA_NTC_COLLECT_HTTP_AV_LOG; + case "NTC-COLLECT-FTP-DOC-LOG": + return RealtimeCountConfig.SCHEMA_NTC_COLLECT_FTP_DOC_LOG; + case "NTC-COLLECT-TELNET-LOG": + return RealtimeCountConfig.SCHEMA_NTC_COLLECT_TELNET_LOG; + case "log-tag"://20191219新增-日志标签 + return RealtimeCountConfig.SCHEMA_LOG_TAG; + case "NTC-COLLECT-FILE-LOG": + return RealtimeCountConfig.SCHEMA_NTC_COLLECT_FILE_LOG; + case "monitor-msg"://一部状态上传对应的数据对象-格式为总线规定 + return RealtimeCountConfig.SCHEMA_MONITOR_MSG; + case "status-tag"://状态数据标签-搭配状态数据对象使用-出自数据字典 + return RealtimeCountConfig.SCHEMA_STATUS_TAG; + case "INFLUX-SAPP-BPS-STAT-LOG"://20200110广东SAPP状态上传-即广东状态上传对应的数据对象-格式自己定义 + return RealtimeCountConfig.SCHEMA_INFLUX_SAPP_BPS_STAT_LOG; + default: + logger.error("There is no corresponding topic! topic name is :" + topic); + break; + } + return null; + } + +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/CommonLog_Msg_File.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/CommonLog_Msg_File.java new file mode 100644 index 0000000..1cac5d8 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/CommonLog_Msg_File.java @@ -0,0 +1,254 @@ +//package cn.ac.iie.cusflume.sink.bean; +// +//public class CommonLog_Msg_File { +// private int cfg_id; +// private int found_time; +// private int recv_time; +// private String trans_proto; +// private int addr_type; +// private String d_ip; +// private String s_ip; +// private int d_port; +// private int s_port; +// private int entrance_id; +// private int stream_dir; +// private String cap_ip; +// private String addr_list; +// private String server_locate; +// private String client_locate; +// private String s_asn; +// private String d_asn; +// private String user_region; +// private int service; +// private String scene_file; +// private String scene_file_id; +// +// public int getCfg_id() { +// return cfg_id; +// } +// +// public void setCfg_id(int cfg_id) { +// this.cfg_id = cfg_id; +// } +// +// public int getFound_time() { +// return found_time; +// } +// +// public void setFound_time(int found_time) { +// this.found_time = found_time; +// } +// +// public int getRecv_time() { +// return recv_time; +// } +// +// public void setRecv_time(int recv_time) { +// this.recv_time = recv_time; +// } +// +// public String getTrans_proto() { +// return trans_proto; +// } +// +// public void setTrans_proto(String trans_proto) { +// this.trans_proto = trans_proto; +// } +// +// public int getAddr_type() { +// return addr_type; +// } +// +// public void setAddr_type(int addr_type) { +// this.addr_type = addr_type; +// } +// +// public String getD_ip() { +// return d_ip; +// } +// +// public void setD_ip(String d_ip) { +// this.d_ip = d_ip; +// } +// +// public String getS_ip() { +// return s_ip; +// } +// +// public void setS_ip(String s_ip) { +// this.s_ip = s_ip; +// } +// +// public int getD_port() { +// return d_port; +// } +// +// public void setD_port(int d_port) { +// this.d_port = d_port; +// } +// +// public int getS_port() { +// return s_port; +// } +// +// public void setS_port(int s_port) { +// this.s_port = s_port; +// } +// +// public int getEntrance_id() { +// return entrance_id; +// } +// +// public void setEntrance_id(int entrance_id) { +// this.entrance_id = entrance_id; +// } +// +// public int getStream_dir() { +// return stream_dir; +// } +// +// public void setStream_dir(int stream_dir) { +// this.stream_dir = stream_dir; +// } +// +// public String getCap_ip() { +// return cap_ip; +// } +// +// public void setCap_ip(String cap_ip) { +// this.cap_ip = cap_ip; +// } +// +// public String getAddr_list() { +// return addr_list; +// } +// +// public void setAddr_list(String addr_list) { +// this.addr_list = addr_list; +// } +// +// public String getServer_locate() { +// return server_locate; +// } +// +// public void setServer_locate(String server_locate) { +// this.server_locate = server_locate; +// } +// +// public String getClient_locate() { +// return client_locate; +// } +// +// public void setClient_locate(String client_locate) { +// this.client_locate = client_locate; +// } +// +// public String getS_asn() { +// return s_asn; +// } +// +// public void setS_asn(String s_asn) { +// this.s_asn = s_asn; +// } +// +// public String getD_asn() { +// return d_asn; +// } +// +// public void setD_asn(String d_asn) { +// this.d_asn = d_asn; +// } +// +// public String getUser_region() { +// return user_region; +// } +// +// public void setUser_region(String user_region) { +// this.user_region = user_region; +// } +// +// public int getService() { +// return service; +// } +// +// public void setService(int service) { +// this.service = service; +// } +// +// public String getScene_file() { +// return scene_file; +// } +// +// public void setScene_file(String scene_file) { +// this.scene_file = scene_file; +// } +// +// public String getScene_file_id() { +// return scene_file_id; +// } +// +// public void setScene_file_id(String scene_file_id) { +// this.scene_file_id = scene_file_id; +// } +// +// @Override +// public String toString() { +// return cfg_id + "#" + +// found_time + "#" + +// recv_time + "#" + +// trans_proto + "#" + +// addr_type + "#" + +// d_ip + "#" + +// s_ip + "#" + +// d_port + "#" + +// s_port + "#" + +// entrance_id + "#" + +//// encap_type + "#" + +//// direction + "#" + +//// inner_smac + "#" + +//// inner_dmac + "#" + +// stream_dir + "#" + +// cap_ip + "#" + +// addr_list + "#" + +// server_locate + "#" + +// client_locate + "#" + +// s_asn + "#" + +// d_asn + "#" + +//// s_subscribe_id + "#" + +//// d_subscribe_id + "#" + +// user_region + "#" + +// service + "#" + +// scene_file + "#" + +// scene_file_id + "#"; +// } +// +// public String field() { +// return cfg_id + "#" + +// found_time + "#" + +// recv_time + "#" + +// trans_proto + "#" + +// addr_type + "#" + +// d_ip + "#" + +// s_ip + "#" + +// d_port + "#" + +// s_port + "#" + +// entrance_id + "#" + +//// encap_type + "#" + +//// direction + "#" + +//// inner_smac + "#" + +//// inner_dmac + "#" + +// stream_dir + "#" + +// cap_ip + "#" + +// addr_list + "#" + +// server_locate + "#" + +// client_locate + "#" + +// s_asn + "#" + +// d_asn + "#" + +//// s_subscribe_id + "#" + +//// d_subscribe_id + "#" + +// user_region + "#" + +// service + "#" + +// scene_file + "#" + +// scene_file_id + "#"; +// } +//} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/ac/Req/AcReqBody.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/ac/Req/AcReqBody.java new file mode 100644 index 0000000..ed7b04d --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/ac/Req/AcReqBody.java @@ -0,0 +1,21 @@ +package cn.ac.iie.cusflume.sink.bean.ac.Req; + +public class AcReqBody { + + private int requestType;//接入请求类型-10(生产消息)、20(从最开始位置开始消费),21(从上次消费位置之后开始消费,即获取总线中缓存的该标签自上次消费之后的增量消息)、22(从最新开始消费,即获取总线中缓存的该标签的最后一个消息) + + public int getRequestType() { + return requestType; + } + + public void setRequestType(int requestType) { + this.requestType = requestType; + } + + @Override + public String toString() { + return "AcReqBody{" + + "requestType=" + requestType + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/ac/Res/AcResBody.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/ac/Res/AcResBody.java new file mode 100644 index 0000000..853b712 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/ac/Res/AcResBody.java @@ -0,0 +1,51 @@ +package cn.ac.iie.cusflume.sink.bean.ac.Res; + +public class AcResBody { + private int code;//请求响应状态code-200,400,500,老版字段名称status,新版code + private String msg;//请求响应结果msg-请求成功,请求失败,服务器异常-老版字段名称message,新版msg + private String sessionId;//会话标识Set-Cookie,即返回的响应头内包含的sessionId,注意sessionId存在于响应头中,是我手动添加进去的 +// private AcResData data;//数据 + private String data;//数据 + + public int getCode() { + return code; + } + + public void setCode(int code) { + this.code = code; + } + + public String getMsg() { + return msg; + } + + public void setMsg(String msg) { + this.msg = msg; + } + + public String getSessionId() { + return sessionId; + } + + public void setSessionId(String sessionId) { + this.sessionId = sessionId; + } + + public String getData() { + return data; + } + + public void setData(String data) { + this.data = data; + } + + @Override + public String toString() { + return "AcResBody{" + + "code=" + code + + ", msg='" + msg + '\'' + + ", sessionId='" + sessionId + '\'' + + ", data='" + data + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/ac/Res/AcResData.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/ac/Res/AcResData.java new file mode 100644 index 0000000..41a1ca1 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/ac/Res/AcResData.java @@ -0,0 +1,33 @@ +package cn.ac.iie.cusflume.sink.bean.ac.Res; + +/** + * 暂未使用,AcResBody的data字段的对象形式 + */ +public class AcResData { + private int status;//审核状态-0,1,2 + private String reason;//审核说明-审核通过,审核中,审核失败 + + public int getStatus() { + return status; + } + + public void setStatus(int status) { + this.status = status; + } + + public String getReason() { + return reason; + } + + public void setReason(String reason) { + this.reason = reason; + } + + @Override + public String toString() { + return "AcResData{" + + "status=" + status + + ", reason='" + reason + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/configBean/ConfigInfo.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/configBean/ConfigInfo.java new file mode 100644 index 0000000..cd23d28 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/configBean/ConfigInfo.java @@ -0,0 +1,142 @@ +package cn.ac.iie.cusflume.sink.bean.configBean; + +/** + * 存放与ZX交互的url以及验证标识---只存放长期固定不变的数据 + * 1.存储post文件,信息url(配置文件获取); + * 2.存储入库前验证文件,信息url(配置文件获取); + * 3.存储文件,信息SessionCookie(请求总线获取); + * 4.存储msg作为avro入库所需的topicName,userAgent,xTag,batchSize + */ +public class ConfigInfo { + + private String postMsgUrl; + private String postFileUrl; + + private String checkMsgUrl; + private String checkFileUrl; + + private String msgSessionCookie; + private String fileSessionCookie; + private String monitorSessionCookie;//20200818新增,缓存monitorSessionCookie + +// private String sendMsg; + + private String topicName; + private String userAgent; + private String xTag; + private int batchSize; + + public String getPostMsgUrl() { + return postMsgUrl; + } + + public void setPostMsgUrl(String postMsgUrl) { + this.postMsgUrl = postMsgUrl; + } + + public String getPostFileUrl() { + return postFileUrl; + } + + public void setPostFileUrl(String postFileUrl) { + this.postFileUrl = postFileUrl; + } + + public String getCheckMsgUrl() { + return checkMsgUrl; + } + + public void setCheckMsgUrl(String checkMsgUrl) { + this.checkMsgUrl = checkMsgUrl; + } + + public String getCheckFileUrl() { + return checkFileUrl; + } + + public void setCheckFileUrl(String checkFileUrl) { + this.checkFileUrl = checkFileUrl; + } + + public String getMsgSessionCookie() { + return msgSessionCookie; + } + + public void setMsgSessionCookie(String msgSessionCookie) { + this.msgSessionCookie = msgSessionCookie; + } + + public String getFileSessionCookie() { + return fileSessionCookie; + } + + public void setFileSessionCookie(String fileSessionCookie) { + this.fileSessionCookie = fileSessionCookie; + } + + public String getMonitorSessionCookie() { + return monitorSessionCookie; + } + + public void setMonitorSessionCookie(String monitorSessionCookie) { + this.monitorSessionCookie = monitorSessionCookie; + } + + public String getTopicName() { + return topicName; + } + + public void setTopicName(String topicName) { + this.topicName = topicName; + } + + public String getUserAgent() { + return userAgent; + } + + public void setUserAgent(String userAgent) { + this.userAgent = userAgent; + } + + public String getxTag() { + return xTag; + } + + public void setxTag(String xTag) { + this.xTag = xTag; + } + + public int getBatchSize() { + return batchSize; + } + + public void setBatchSize(int batchSize) { + this.batchSize = batchSize; + } + + // public String getSendMsg() { +// return sendMsg; +// } +// +// public void setSendMsg(String sendMsg) { +// this.sendMsg = sendMsg; +// } + + + @Override + public String toString() { + return "ConfigInfo{" + + "postMsgUrl='" + postMsgUrl + '\'' + + ", postFileUrl='" + postFileUrl + '\'' + + ", checkMsgUrl='" + checkMsgUrl + '\'' + + ", checkFileUrl='" + checkFileUrl + '\'' + + ", msgSessionCookie='" + msgSessionCookie + '\'' + + ", fileSessionCookie='" + fileSessionCookie + '\'' + + ", monitorSessionCookie='" + monitorSessionCookie + '\'' + + ", topicName='" + topicName + '\'' + + ", userAgent='" + userAgent + '\'' + + ", xTag='" + xTag + '\'' + + ", batchSize=" + batchSize + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/Collect_DNS_field_flags.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/Collect_DNS_field_flags.java new file mode 100644 index 0000000..09beaa2 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/Collect_DNS_field_flags.java @@ -0,0 +1,78 @@ +package cn.ac.iie.cusflume.sink.bean.dataBean; + +public class Collect_DNS_field_flags { + + private long qr; + private long opcode; + private long aa; + private long tc; + private long rd; + private long ra; + private long z; + private long rcode; + + + public long getQr() { + return qr; + } + + public void setQr(long qr) { + this.qr = qr; + } + + public long getOpcode() { + return opcode; + } + + public void setOpcode(long opcode) { + this.opcode = opcode; + } + + public long getAa() { + return aa; + } + + public void setAa(long aa) { + this.aa = aa; + } + + public long getTc() { + return tc; + } + + public void setTc(long tc) { + this.tc = tc; + } + + public long getRd() { + return rd; + } + + public void setRd(long rd) { + this.rd = rd; + } + + public long getRa() { + return ra; + } + + public void setRa(long ra) { + this.ra = ra; + } + + public long getZ() { + return z; + } + + public void setZ(long z) { + this.z = z; + } + + public long getRcode() { + return rcode; + } + + public void setRcode(long rcode) { + this.rcode = rcode; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/Collect_DNS_field_hdr.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/Collect_DNS_field_hdr.java new file mode 100644 index 0000000..c6033d3 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/Collect_DNS_field_hdr.java @@ -0,0 +1,50 @@ +package cn.ac.iie.cusflume.sink.bean.dataBean; + +public class Collect_DNS_field_hdr { + + private long id; + private long qdcount; + private long ancount; + private long aucount; + private long adcount; + + public long getId() { + return id; + } + + public void setId(long id) { + this.id = id; + } + + public long getQdcount() { + return qdcount; + } + + public void setQdcount(long qdcount) { + this.qdcount = qdcount; + } + + public long getAncount() { + return ancount; + } + + public void setAncount(long ancount) { + this.ancount = ancount; + } + + public long getAucount() { + return aucount; + } + + public void setAucount(long aucount) { + this.aucount = aucount; + } + + public long getAdcount() { + return adcount; + } + + public void setAdcount(long adcount) { + this.adcount = adcount; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/Collect_SSL_field_server_cert.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/Collect_SSL_field_server_cert.java new file mode 100644 index 0000000..84208a7 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/Collect_SSL_field_server_cert.java @@ -0,0 +1,151 @@ +package cn.ac.iie.cusflume.sink.bean.dataBean; + +public class Collect_SSL_field_server_cert { + + private String version; + private String serial_number; + private String algorithm_id; + private String issuer; + private String issue_country; + private String issue_organize; + private String issue_cname; + private String sub; + private String sub_country; + private String sub_organize; + private String sub_cname; + private String start_time; + private String expire_time; + private String san; + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public String getSerial_number() { + return serial_number; + } + + public void setSerial_number(String serial_number) { + this.serial_number = serial_number; + } + + public String getAlgorithm_id() { + return algorithm_id; + } + + public void setAlgorithm_id(String algorithm_id) { + this.algorithm_id = algorithm_id; + } + + public String getIssuer() { + return issuer; + } + + public void setIssuer(String issuer) { + this.issuer = issuer; + } + + public String getIssue_country() { + return issue_country; + } + + public void setIssue_country(String issue_country) { + this.issue_country = issue_country; + } + + public String getIssue_organize() { + return issue_organize; + } + + public void setIssue_organize(String issue_organize) { + this.issue_organize = issue_organize; + } + + public String getIssue_cname() { + return issue_cname; + } + + public void setIssue_cname(String issue_cname) { + this.issue_cname = issue_cname; + } + + public String getSub() { + return sub; + } + + public void setSub(String sub) { + this.sub = sub; + } + + public String getSub_country() { + return sub_country; + } + + public void setSub_country(String sub_country) { + this.sub_country = sub_country; + } + + public String getSub_organize() { + return sub_organize; + } + + public void setSub_organize(String sub_organize) { + this.sub_organize = sub_organize; + } + + public String getSub_cname() { + return sub_cname; + } + + public void setSub_cname(String sub_cname) { + this.sub_cname = sub_cname; + } + + public String getStart_time() { + return start_time; + } + + public void setStart_time(String start_time) { + this.start_time = start_time; + } + + public String getExpire_time() { + return expire_time; + } + + public void setExpire_time(String expire_time) { + this.expire_time = expire_time; + } + + public String getSan() { + return san; + } + + public void setSan(String san) { + this.san = san; + } + + @Override + public String toString() { + return "server_cert{" + + "version='" + version + '\'' + + ", serial_number='" + serial_number + '\'' + + ", algorithm_id='" + algorithm_id + '\'' + + ", issuer='" + issuer + '\'' + + ", issue_country='" + issue_country + '\'' + + ", issue_organize='" + issue_organize + '\'' + + ", issue_cname='" + issue_cname + '\'' + + ", sub='" + sub + '\'' + + ", sub_country='" + sub_country + '\'' + + ", sub_organize='" + sub_organize + '\'' + + ", sub_cname='" + sub_cname + '\'' + + ", start_time='" + start_time + '\'' + + ", expire_time='" + expire_time + '\'' + + ", san='" + san + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/CommonLog.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/CommonLog.java new file mode 100644 index 0000000..6da257e --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/CommonLog.java @@ -0,0 +1,314 @@ +package cn.ac.iie.cusflume.sink.bean.dataBean; + +public class CommonLog { + //后期特别新增的字段,加到前面 + private String device_id;//20200506 + //后期特别新增的字段,加到前面 + + + private int cfg_id; + private int found_time; + private int recv_time; + private String trans_proto; + private int addr_type; + private String d_ip; + private String s_ip; + private int d_port; + private int s_port; + private int service; + private int encap_type; + private int direction; + private String inner_smac; + private String inner_dmac; + private int stream_dir; + private String cap_ip; + private String addr_list; + private String server_locate; + private String client_locate; + private String s_asn; + private String d_asn; + private String s_subscribe_id; + private String d_subscribe_id; + private String user_region; + private String scene_file; + private String scene_file_id; + + public String getDevice_id() { + return device_id; + } + + public void setDevice_id(String device_id) { + this.device_id = device_id; + } + + public int getCfg_id() { + return cfg_id; + } + + public void setCfg_id(int cfg_id) { + this.cfg_id = cfg_id; + } + + public int getFound_time() { + return found_time; + } + + public void setFound_time(int found_time) { + this.found_time = found_time; + } + + public int getRecv_time() { + return recv_time; + } + + public void setRecv_time(int recv_time) { + this.recv_time = recv_time; + } + + public String getTrans_proto() { + return trans_proto; + } + + public void setTrans_proto(String trans_proto) { + this.trans_proto = trans_proto; + } + + public int getAddr_type() { + return addr_type; + } + + public void setAddr_type(int addr_type) { + this.addr_type = addr_type; + } + + public String getD_ip() { + return d_ip; + } + + public void setD_ip(String d_ip) { + this.d_ip = d_ip; + } + + public String getS_ip() { + return s_ip; + } + + public void setS_ip(String s_ip) { + this.s_ip = s_ip; + } + + public int getD_port() { + return d_port; + } + + public void setD_port(int d_port) { + this.d_port = d_port; + } + + public int getS_port() { + return s_port; + } + + public void setS_port(int s_port) { + this.s_port = s_port; + } + + public int getService() { + return service; + } + + public void setService(int service) { + this.service = service; + } + + public int getEncap_type() { + return encap_type; + } + + public void setEncap_type(int encap_type) { + this.encap_type = encap_type; + } + + public int getDirection() { + return direction; + } + + public void setDirection(int direction) { + this.direction = direction; + } + + public String getInner_smac() { + return inner_smac; + } + + public void setInner_smac(String inner_smac) { + this.inner_smac = inner_smac; + } + + public String getInner_dmac() { + return inner_dmac; + } + + public void setInner_dmac(String inner_dmac) { + this.inner_dmac = inner_dmac; + } + + public int getStream_dir() { + return stream_dir; + } + + public void setStream_dir(int stream_dir) { + this.stream_dir = stream_dir; + } + + public String getCap_ip() { + return cap_ip; + } + + public void setCap_ip(String cap_ip) { + this.cap_ip = cap_ip; + } + + public String getAddr_list() { + return addr_list; + } + + public void setAddr_list(String addr_list) { + this.addr_list = addr_list; + } + + public String getServer_locate() { + return server_locate; + } + + public void setServer_locate(String server_locate) { + this.server_locate = server_locate; + } + + public String getClient_locate() { + return client_locate; + } + + public void setClient_locate(String client_locate) { + this.client_locate = client_locate; + } + + public String getS_asn() { + return s_asn; + } + + public void setS_asn(String s_asn) { + this.s_asn = s_asn; + } + + public String getD_asn() { + return d_asn; + } + + public void setD_asn(String d_asn) { + this.d_asn = d_asn; + } + + public String getS_subscribe_id() { + return s_subscribe_id; + } + + public void setS_subscribe_id(String s_subscribe_id) { + this.s_subscribe_id = s_subscribe_id; + } + + public String getD_subscribe_id() { + return d_subscribe_id; + } + + public void setD_subscribe_id(String d_subscribe_id) { + this.d_subscribe_id = d_subscribe_id; + } + + public String getUser_region() { + return user_region; + } + + public void setUser_region(String user_region) { + this.user_region = user_region; + } + + public String getScene_file() { + return scene_file; + } + + public void setScene_file(String scene_file) { + this.scene_file = scene_file; + } + + public String getScene_file_id() { + return scene_file_id; + } + + public void setScene_file_id(String scene_file_id) { + this.scene_file_id = scene_file_id; + } + + @Override + public String toString() { + return + device_id + "#" + + cfg_id + "#" + + found_time + "#" + + recv_time + "#" + + trans_proto + "#" + + addr_type + "#" + + d_ip + "#" + + s_ip + "#" + + d_port + "#" + + s_port + "#" + + service + "#" + + encap_type + "#" + + direction + "#" + + inner_smac + "#" + + inner_dmac + "#" + + stream_dir + "#" + + cap_ip + "#" + + addr_list + "#" + + server_locate + "#" + + client_locate + "#" + + s_asn + "#" + + d_asn + "#" + + s_subscribe_id + "#" + + d_subscribe_id + "#" + + user_region + "#" + + scene_file + "#" + + scene_file_id + "#"; + } + + public String field() { + return + device_id + "#" + + cfg_id + "#" + + found_time + "#" + + recv_time + "#" + + trans_proto + "#" + + addr_type + "#" + + d_ip + "#" + + s_ip + "#" + + d_port + "#" + + s_port + "#" + + service + "#" + + encap_type + "#" + + direction + "#" + + inner_smac + "#" + + inner_dmac + "#" + + stream_dir + "#" + + cap_ip + "#" + + addr_list + "#" + + server_locate + "#" + + client_locate + "#" + + s_asn + "#" + + d_asn + "#" + + s_subscribe_id + "#" + + d_subscribe_id + "#" + + user_region + "#" + + scene_file + "#" + + scene_file_id + "#"; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_COLLECT_DNS_LOG.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_COLLECT_DNS_LOG.java new file mode 100644 index 0000000..b86eabe --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_COLLECT_DNS_LOG.java @@ -0,0 +1,194 @@ +package cn.ac.iie.cusflume.sink.bean.dataBean; + +import cn.ac.iie.cusflume.sink.bean.tagBean.LOG_D_TAG_BEAN; + +public class NTC_COLLECT_DNS_LOG extends CommonLog { + + /** + * 旧 + */ +// private int dns_id; +// private int qr; +// private int rd; +// private int ra; +// private int tc; +// private int aa; +// private int rcode; +// private String rr; +// private int qtype; +// private int qclass; +// private int opcode; +// private String qname; +// private String cname; +// private int dns_sub; + + /** + * 新 + */ + private long dns_sub; + + /** + * hdr json + */ +// private Collect_DNS_field_hdr hdr;//旧-废弃-20200507 + private String hdr;//新-修改-压成字符串-20200507 + /** + * hdr内部字段-20200507移除 + */ +// private long id; +// private long qdcount; +// private long ancount; +// private long aucount; +// private long adcount; + + /** + * flags json + */ +// private Collect_DNS_field_flags flags;//旧-废弃-20200507 + private String flags;//新-修改-压成字符串-20200507 + /** + * flags内部字段-20200507移除 + */ +// private long qr; +// private long opcode; +// private long aa; +// private long tc; +// private long rd; +// private long ra; +// private long z; +// private long rcode; + + private String question; + + private String rr; + + //20200509新增 + private LOG_D_TAG_BEAN d_tag;//json版本-20200509 + private String x_tag;//json字符串版本,就算前端传入的为json也会强转为字符串 + + +// public Collect_DNS_field_hdr getHdr() { +// return hdr; +// } + +// public void setHdr(Collect_DNS_field_hdr hdr) { +// this.hdr = hdr; +// //增加自赋值 +// this.id = hdr.getId(); +// this.qdcount = hdr.getQdcount(); +// this.ancount = hdr.getAncount(); +// this.aucount = hdr.getAucount(); +// this.adcount = hdr.getAdcount(); +// } + +// public Collect_DNS_field_flags getFlags() { +// return flags; +// } +// +// public void setFlags(Collect_DNS_field_flags flags) { +// this.flags = flags; +// //增加自赋值 +// this.qr = flags.getQr(); +// this.opcode = flags.getOpcode(); +// this.aa = flags.getAa(); +// this.tc = flags.getTc(); +// this.rd = flags.getRd(); +// this.ra = flags.getRa(); +// this.z = flags.getZ(); +// this.rcode = flags.getRcode(); +// } + + public long getDns_sub() { + return dns_sub; + } + + public void setDns_sub(long dns_sub) { + this.dns_sub = dns_sub; + } + + public String getHdr() { + return hdr; + } + + public void setHdr(String hdr) { + this.hdr = hdr; + } + + public String getFlags() { + return flags; + } + + public void setFlags(String flags) { + this.flags = flags; + } + + public String getQuestion() { + return question; + } + + public void setQuestion(String question) { + this.question = question; + } + + public String getRr() { + return rr; + } + + public void setRr(String rr) { + this.rr = rr; + } + + public LOG_D_TAG_BEAN getD_tag() { + return d_tag; + } + + public void setD_tag(LOG_D_TAG_BEAN d_tag) { + this.d_tag = d_tag; + } + + public String getX_tag() { + return x_tag; + } + + public void setX_tag(String x_tag) { + this.x_tag = x_tag; + } + + /** + * 旧 + */ +// @Override +// public String toString() { +// return field() + +// dns_id + "\t" + +// qr + "\t" + +// rd + "\t" + +// ra + "\t" + +// tc + "\t" + +// aa + "\t" + +// rcode + "\t" + +// rr + "\t" + +// qtype + "\t" + +// qclass + "\t" + +// opcode + "\t" + +// qname + "\t" + +// cname + "\t" + +// dns_sub + "\n"; +// } + + /** + * 新 + */ + @Override + public String toString() { + return "NTC_COLLECT_DNS_LOG{" + + "dns_sub=" + dns_sub + + ", hdr='" + hdr + '\'' + + ", flags='" + flags + '\'' + + ", question='" + question + '\'' + + ", rr='" + rr + '\'' + + ", d_tag=" + d_tag + + ", x_tag='" + x_tag + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_COLLECT_HTTP_LOG.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_COLLECT_HTTP_LOG.java new file mode 100644 index 0000000..3e8bbfb --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_COLLECT_HTTP_LOG.java @@ -0,0 +1,80 @@ +package cn.ac.iie.cusflume.sink.bean.dataBean; + +public class NTC_COLLECT_HTTP_LOG extends CommonLog { + + private String k;//用于获取http_seq和proxy_flag + private String v; + + private int http_seq; + private int proxy_flag; + private String url; + private String referer; + private String domain; + + + public String getK() { + return k; + } + + public void setK(String k) { + this.k = k; + } + + public String getV() { + return v; + } + + public void setV(String v) { + this.v = v; + } + + public int getHttp_seq() { + return http_seq; + } + + public void setHttp_seq(int http_seq) { + this.http_seq = http_seq; + } + + public int getProxy_flag() { + return proxy_flag; + } + + public void setProxy_flag(int proxy_flag) { + this.proxy_flag = proxy_flag; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getReferer() { + return referer; + } + + public void setReferer(String referer) { + this.referer = referer; + } + + public String getDomain() { + return domain; + } + + public void setDomain(String domain) { + this.domain = domain; + } + + @Override + public String toString() { + return field() + + http_seq + "\t" + + proxy_flag + "\t" + + url + "\t" + + referer + "\t" + + domain + "\n"; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_COLLECT_MAIL_LOG.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_COLLECT_MAIL_LOG.java new file mode 100644 index 0000000..9e116a5 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_COLLECT_MAIL_LOG.java @@ -0,0 +1,70 @@ +//package cn.ac.iie.cusflume.sink.bean.dataBean; +// +//public class NTC_COLLECT_MAIL_LOG extends CommonLog { +// +// private String mail_proto; +// private String mail_from; +// private String mail_to; +// private String subject; +// private String eml_key; +// private String eml_file; +// +// public String getMail_proto() { +// return mail_proto; +// } +// +// public void setMail_proto(String mail_proto) { +// this.mail_proto = mail_proto; +// } +// +// public String getMail_from() { +// return mail_from; +// } +// +// public void setMail_from(String mail_from) { +// this.mail_from = mail_from; +// } +// +// public String getMail_to() { +// return mail_to; +// } +// +// public void setMail_to(String mail_to) { +// this.mail_to = mail_to; +// } +// +// public String getSubject() { +// return subject; +// } +// +// public void setSubject(String subject) { +// this.subject = subject; +// } +// +// public String getEml_key() { +// return eml_key; +// } +// +// public void setEml_key(String eml_key) { +// this.eml_key = eml_key; +// } +// +// public String getEml_file() { +// return eml_file; +// } +// +// public void setEml_file(String eml_file) { +// this.eml_file = eml_file; +// } +// +// @Override +// public String toString() { +// return field() + +// mail_proto + "\t" + +// mail_from + "\t" + +// mail_to + "\t" + +// subject + "\t" + +// eml_key + "\t" + +// eml_file + "\n"; +// } +//} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_COLLECT_SSL_LOG.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_COLLECT_SSL_LOG.java new file mode 100644 index 0000000..396f7d4 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_COLLECT_SSL_LOG.java @@ -0,0 +1,175 @@ +package cn.ac.iie.cusflume.sink.bean.dataBean; + +import cn.ac.iie.cusflume.sink.bean.tagBean.LOG_D_TAG_BEAN; + +public class NTC_COLLECT_SSL_LOG extends CommonLog { + + private String c2s_pkt_num; + private String s2c_pkt_num; + private String c2s_byte_num; + private String s2c_byte_num; + + +// private String SNI;//旧-废弃-20200507 + private String sni;//新-修改-20200507 + private long create_time; + +// private long lastmtime;//旧-废弃-20200507 + private long last_time;//新-修改-20200507 + + private String server_ciphersuits; + private String client_ciphersuits; + +// private Collect_SSL_field_server_cert server_cert;//旧-废弃-20200507 + private String server_cert;//新-修改-压成字符串-20200507 + +// private String version;//旧-server_cert内部字段-废弃-20200507 + + private String client_cert; + + //20200509新增 + private LOG_D_TAG_BEAN d_tag;//json版本-20200509 + private String x_tag;//json字符串版本,就算前端传入的为json也会强转为字符串 + + public String getC2s_pkt_num() { + return c2s_pkt_num; + } + + public void setC2s_pkt_num(String c2s_pkt_num) { + this.c2s_pkt_num = c2s_pkt_num; + } + + public String getS2c_pkt_num() { + return s2c_pkt_num; + } + + public void setS2c_pkt_num(String s2c_pkt_num) { + this.s2c_pkt_num = s2c_pkt_num; + } + + public String getC2s_byte_num() { + return c2s_byte_num; + } + + public void setC2s_byte_num(String c2s_byte_num) { + this.c2s_byte_num = c2s_byte_num; + } + + public String getS2c_byte_num() { + return s2c_byte_num; + } + + public void setS2c_byte_num(String s2c_byte_num) { + this.s2c_byte_num = s2c_byte_num; + } + + public String getSni() { + return sni; + } + + public void setSni(String sni) { + this.sni = sni; + } + + public long getCreate_time() { + return create_time; + } + + public void setCreate_time(long create_time) { + this.create_time = create_time; + } + + public long getLast_time() { + return last_time; + } + + public void setLast_time(long last_time) { + this.last_time = last_time; + } + + public String getServer_ciphersuits() { + return server_ciphersuits; + } + + public void setServer_ciphersuits(String server_ciphersuits) { + this.server_ciphersuits = server_ciphersuits; + } + + public String getClient_ciphersuits() { + return client_ciphersuits; + } + + public void setClient_ciphersuits(String client_ciphersuits) { + this.client_ciphersuits = client_ciphersuits; + } + + public String getServer_cert() { + return server_cert; + } + + public void setServer_cert(String server_cert) { + this.server_cert = server_cert; + } + + public String getClient_cert() { + return client_cert; + } + + public void setClient_cert(String client_cert) { + this.client_cert = client_cert; + } + + public LOG_D_TAG_BEAN getD_tag() { + return d_tag; + } + + public void setD_tag(LOG_D_TAG_BEAN d_tag) { + this.d_tag = d_tag; + } + + public String getX_tag() { + return x_tag; + } + + public void setX_tag(String x_tag) { + this.x_tag = x_tag; + } + + // @Override +// public String toString() { +// return field() + +//// version + "\t" + +// c2s_pkt_num + "\t" + +// s2c_pkt_num + "\t" + +// c2s_byte_num + "\t" + +// s2c_byte_num + "\t" + +// sni + "\t" + +// create_time + "\t" + +// last_time + "\t" + +// server_ciphersuits + "\t" + +// client_ciphersuits + "\t" + +// +// server_cert + "\t" + +// client_cert + "\n"; +// } + + + @Override + public String toString() { + return "NTC_COLLECT_SSL_LOG{" + + "c2s_pkt_num='" + c2s_pkt_num + '\'' + + ", s2c_pkt_num='" + s2c_pkt_num + '\'' + + ", c2s_byte_num='" + c2s_byte_num + '\'' + + ", s2c_byte_num='" + s2c_byte_num + '\'' + + ", sni='" + sni + '\'' + + ", create_time=" + create_time + + ", last_time=" + last_time + + ", server_ciphersuits='" + server_ciphersuits + '\'' + + ", client_ciphersuits='" + client_ciphersuits + '\'' + + ", server_cert='" + server_cert + '\'' + + ", client_cert='" + client_cert + '\'' + + ", d_tag=" + d_tag + + ", x_tag='" + x_tag + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_CONN_RECORD_LOG.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_CONN_RECORD_LOG.java new file mode 100644 index 0000000..d0704e2 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/dataBean/NTC_CONN_RECORD_LOG.java @@ -0,0 +1,86 @@ +package cn.ac.iie.cusflume.sink.bean.dataBean; + +import cn.ac.iie.cusflume.sink.bean.tagBean.LOG_D_TAG_BEAN; + +public class NTC_CONN_RECORD_LOG extends CommonLog { + private String app_label; + private String c2s_pkt_num; + private String s2c_pkt_num; + private String c2s_byte_num; + private String s2c_byte_num; + + //20191216新增 +// private String d_tag;//json字符串版本 + private LOG_D_TAG_BEAN d_tag;//json版本-20191216 + private String x_tag;//json字符串版本,就算前端传入的为json也会强转为字符串 + //20191216新增 + + public String getApp_label() { + return app_label; + } + + public void setApp_label(String app_label) { + this.app_label = app_label; + } + + public String getC2s_pkt_num() { + return c2s_pkt_num; + } + + public void setC2s_pkt_num(String c2s_pkt_num) { + this.c2s_pkt_num = c2s_pkt_num; + } + + public String getS2c_pkt_num() { + return s2c_pkt_num; + } + + public void setS2c_pkt_num(String s2c_pkt_num) { + this.s2c_pkt_num = s2c_pkt_num; + } + + public String getC2s_byte_num() { + return c2s_byte_num; + } + + public void setC2s_byte_num(String c2s_byte_num) { + this.c2s_byte_num = c2s_byte_num; + } + + public String getS2c_byte_num() { + return s2c_byte_num; + } + + public void setS2c_byte_num(String s2c_byte_num) { + this.s2c_byte_num = s2c_byte_num; + } + + public LOG_D_TAG_BEAN getD_tag() { + return d_tag; + } + + public void setD_tag(LOG_D_TAG_BEAN d_tag) { + this.d_tag = d_tag; + } + + public String getX_tag() { + return x_tag; + } + + public void setX_tag(String x_tag) { + this.x_tag = x_tag; + } + + @Override + public String toString() { + return "NTC_CONN_RECORD_LOG{" + + "app_label='" + app_label + '\'' + + ", c2s_pkt_num='" + c2s_pkt_num + '\'' + + ", s2c_pkt_num='" + s2c_pkt_num + '\'' + + ", c2s_byte_num='" + c2s_byte_num + '\'' + + ", s2c_byte_num='" + s2c_byte_num + '\'' + + ", d_tag=" + d_tag + + ", x_tag='" + x_tag + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/CommonLog_File.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/CommonLog_File.java new file mode 100644 index 0000000..dbd3f69 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/CommonLog_File.java @@ -0,0 +1,276 @@ +package cn.ac.iie.cusflume.sink.bean.fileBean; + +public class CommonLog_File { + //后期特别新增的字段,加到前面 + private String device_id;//20200506 + //后期特别新增的字段,加到前面 + + private int cfg_id; + private int found_time; + private int recv_time; + private String trans_proto; + private int addr_type; + private String d_ip; + private String s_ip; + private int d_port; + private int s_port; + private int entrance_id; + // private int encap_type; +// private int direction; +// private String inner_smac; +// private String inner_dmac; + private int stream_dir; + private String cap_ip; + private String addr_list; + private String server_locate; + private String client_locate; + private String s_asn; + private String d_asn; + // private String s_subscribe_id; +// private String d_subscribe_id; + private String user_region; + private int service; + private String scene_file; + private String scene_file_id; + + public String getDevice_id() { + return device_id; + } + + public void setDevice_id(String device_id) { + this.device_id = device_id; + } + + public int getCfg_id() { + return cfg_id; + } + + public void setCfg_id(int cfg_id) { + this.cfg_id = cfg_id; + } + + public int getFound_time() { + return found_time; + } + + public void setFound_time(int found_time) { + this.found_time = found_time; + } + + public int getRecv_time() { + return recv_time; + } + + public void setRecv_time(int recv_time) { + this.recv_time = recv_time; + } + + public String getTrans_proto() { + return trans_proto; + } + + public void setTrans_proto(String trans_proto) { + this.trans_proto = trans_proto; + } + + public int getAddr_type() { + return addr_type; + } + + public void setAddr_type(int addr_type) { + this.addr_type = addr_type; + } + + public String getD_ip() { + return d_ip; + } + + public void setD_ip(String d_ip) { + this.d_ip = d_ip; + } + + public String getS_ip() { + return s_ip; + } + + public void setS_ip(String s_ip) { + this.s_ip = s_ip; + } + + public int getD_port() { + return d_port; + } + + public void setD_port(int d_port) { + this.d_port = d_port; + } + + public int getS_port() { + return s_port; + } + + public void setS_port(int s_port) { + this.s_port = s_port; + } + + public int getEntrance_id() { + return entrance_id; + } + + public void setEntrance_id(int entrance_id) { + this.entrance_id = entrance_id; + } + + public int getStream_dir() { + return stream_dir; + } + + public void setStream_dir(int stream_dir) { + this.stream_dir = stream_dir; + } + + public String getCap_ip() { + return cap_ip; + } + + public void setCap_ip(String cap_ip) { + this.cap_ip = cap_ip; + } + + public String getAddr_list() { + return addr_list; + } + + public void setAddr_list(String addr_list) { + this.addr_list = addr_list; + } + + public String getServer_locate() { + return server_locate; + } + + public void setServer_locate(String server_locate) { + this.server_locate = server_locate; + } + + public String getClient_locate() { + return client_locate; + } + + public void setClient_locate(String client_locate) { + this.client_locate = client_locate; + } + + public String getS_asn() { + return s_asn; + } + + public void setS_asn(String s_asn) { + this.s_asn = s_asn; + } + + public String getD_asn() { + return d_asn; + } + + public void setD_asn(String d_asn) { + this.d_asn = d_asn; + } + + public String getUser_region() { + return user_region; + } + + public void setUser_region(String user_region) { + this.user_region = user_region; + } + + public int getService() { + return service; + } + + public void setService(int service) { + this.service = service; + } + + public String getScene_file() { + return scene_file; + } + + public void setScene_file(String scene_file) { + this.scene_file = scene_file; + } + + public String getScene_file_id() { + return scene_file_id; + } + + public void setScene_file_id(String scene_file_id) { + this.scene_file_id = scene_file_id; + } + + @Override + public String toString() { + return + device_id + "#" + + cfg_id + "#" + + found_time + "#" + + recv_time + "#" + + trans_proto + "#" + + addr_type + "#" + + d_ip + "#" + + s_ip + "#" + + d_port + "#" + + s_port + "#" + + entrance_id + "#" + +// encap_type + "#" + +// direction + "#" + +// inner_smac + "#" + +// inner_dmac + "#" + + stream_dir + "#" + + cap_ip + "#" + + addr_list + "#" + + server_locate + "#" + + client_locate + "#" + + s_asn + "#" + + d_asn + "#" + +// s_subscribe_id + "#" + +// d_subscribe_id + "#" + + user_region + "#" + + service + "#" + + scene_file + "#" + + scene_file_id + "#"; + } + + public String field() { + return + device_id + "#" + + cfg_id + "#" + + found_time + "#" + + recv_time + "#" + + trans_proto + "#" + + addr_type + "#" + + d_ip + "#" + + s_ip + "#" + + d_port + "#" + + s_port + "#" + + entrance_id + "#" + +// encap_type + "#" + +// direction + "#" + +// inner_smac + "#" + +// inner_dmac + "#" + + stream_dir + "#" + + cap_ip + "#" + + addr_list + "#" + + server_locate + "#" + + client_locate + "#" + + s_asn + "#" + + d_asn + "#" + +// s_subscribe_id + "#" + +// d_subscribe_id + "#" + + user_region + "#" + + service + "#" + + scene_file + "#" + + scene_file_id + "#"; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/FILE_TAG_BEAN.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/FILE_TAG_BEAN.java new file mode 100644 index 0000000..6e86411 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/FILE_TAG_BEAN.java @@ -0,0 +1,217 @@ +package cn.ac.iie.cusflume.sink.bean.fileBean; + +import java.util.Arrays; + +/** + * 接收NTC-COLLECT-FILE-LOG数据,是独立出来的文件标签,单独发送不与数据对象拼接 + */ +public class FILE_TAG_BEAN { +// /** +// * 旧 +// * @return +// */ +// private String tag_version; +// private long data_type; +// private long data_subtype; +// private long producer_id; +// private long data_source; +// private long task_id; +// +// private long file_id; +// private String file_path; +// private String file_name; +// private String file_name_charset;//新增-20191216 +// private double file_size; +// private String md5; +// private String encrypted; +// private String timestamp; +// private long parent_id; +//// private long parent_type; +// private long flow_id; +// +// private String x_tag;//新增-20191216 + + + /** + * 新-20191219 + * @return + */ + private String tag_version; + private int data_type; + private int data_subtype; + private int producer_id; + private int data_source; + private int[] task_id;//int数组 + + private long file_id; + private String file_path; + private String file_name; + private String file_name_charset;//新增-20191216 + private float file_size; + private String md5; + private String encrypted; + private String timestamp; + private long parent_id; + private long flow_id; + + private String x_tag;//新增-20191216 + + public String getTag_version() { + return tag_version; + } + + public void setTag_version(String tag_version) { + this.tag_version = tag_version; + } + + public int getData_type() { + return data_type; + } + + public void setData_type(int data_type) { + this.data_type = data_type; + } + + public int getData_subtype() { + return data_subtype; + } + + public void setData_subtype(int data_subtype) { + this.data_subtype = data_subtype; + } + + public int getProducer_id() { + return producer_id; + } + + public void setProducer_id(int producer_id) { + this.producer_id = producer_id; + } + + public int getData_source() { + return data_source; + } + + public void setData_source(int data_source) { + this.data_source = data_source; + } + + public int[] getTask_id() { + return task_id; + } + + public void setTask_id(int[] task_id) { + this.task_id = task_id; + } + + public long getFile_id() { + return file_id; + } + + public void setFile_id(long file_id) { + this.file_id = file_id; + } + + public String getFile_path() { + return file_path; + } + + public void setFile_path(String file_path) { + this.file_path = file_path; + } + + public String getFile_name() { + return file_name; + } + + public void setFile_name(String file_name) { + this.file_name = file_name; + } + + public String getFile_name_charset() { + return file_name_charset; + } + + public void setFile_name_charset(String file_name_charset) { + this.file_name_charset = file_name_charset; + } + + public float getFile_size() { + return file_size; + } + + public void setFile_size(float file_size) { + this.file_size = file_size; + } + + public String getMd5() { + return md5; + } + + public void setMd5(String md5) { + this.md5 = md5; + } + + public String getEncrypted() { + return encrypted; + } + + public void setEncrypted(String encrypted) { + this.encrypted = encrypted; + } + + public String getTimestamp() { + return timestamp; + } + + public void setTimestamp(String timestamp) { + this.timestamp = timestamp; + } + + public long getParent_id() { + return parent_id; + } + + public void setParent_id(long parent_id) { + this.parent_id = parent_id; + } + + public long getFlow_id() { + return flow_id; + } + + public void setFlow_id(long flow_id) { + this.flow_id = flow_id; + } + + public String getX_tag() { + return x_tag; + } + + public void setX_tag(String x_tag) { + this.x_tag = x_tag; + } + + @Override + public String toString() { + return "FILE_TAG_BEAN{" + + "tag_version='" + tag_version + '\'' + + ", data_type=" + data_type + + ", data_subtype=" + data_subtype + + ", producer_id=" + producer_id + + ", data_source=" + data_source + + ", task_id=" + Arrays.toString(task_id) + + ", file_id=" + file_id + + ", file_path='" + file_path + '\'' + + ", file_name='" + file_name + '\'' + + ", file_name_charset='" + file_name_charset + '\'' + + ", file_size=" + file_size + + ", md5='" + md5 + '\'' + + ", encrypted='" + encrypted + '\'' + + ", timestamp='" + timestamp + '\'' + + ", parent_id=" + parent_id + + ", flow_id=" + flow_id + + ", x_tag='" + x_tag + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/NTC_COLLECT_FTP_DOC_LOG.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/NTC_COLLECT_FTP_DOC_LOG.java new file mode 100644 index 0000000..89b310f --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/NTC_COLLECT_FTP_DOC_LOG.java @@ -0,0 +1,83 @@ +package cn.ac.iie.cusflume.sink.bean.fileBean; + +import cn.ac.iie.cusflume.sink.bean.tagBean.LOG_D_TAG_BEAN; + +public class NTC_COLLECT_FTP_DOC_LOG extends CommonLog_File { + +// private String url; +// private String res_body_file; +// private String res_body_file_id; + + private String ftp_url; + private String ftp_content; +// private String ftp_content_id;//旧-20200506废弃 + private String content_path;//新-20200506新修改 + + //20200211新增 + private LOG_D_TAG_BEAN d_tag;//json版本 + private String x_tag;//json字符串版本,就算前端传入的为json也会强转为字符串 + //20200211新增 + + public String getFtp_url() { + return ftp_url; + } + + public void setFtp_url(String ftp_url) { + this.ftp_url = ftp_url; + } + + public String getFtp_content() { + return ftp_content; + } + + public void setFtp_content(String ftp_content) { + this.ftp_content = ftp_content; + } + + public String getContent_path() { + return content_path; + } + + public void setContent_path(String content_path) { + this.content_path = content_path; + } + + public LOG_D_TAG_BEAN getD_tag() { + return d_tag; + } + + public void setD_tag(LOG_D_TAG_BEAN d_tag) { + this.d_tag = d_tag; + } + + public String getX_tag() { + return x_tag; + } + + public void setX_tag(String x_tag) { + this.x_tag = x_tag; + } + +// @Override +// public String toString() { +// return "NTC_COLLECT_FTP_DOC_LOG{" + +// "ftp_url='" + ftp_url + '\'' + +// ", ftp_content='" + ftp_content + '\'' + +// ", ftp_content_id='" + content_path + '\'' + +// ", d_tag=" + d_tag + +// ", x_tag='" + x_tag + '\'' + +// '}'; +// } + + + @Override + public String toString() { + return "NTC_COLLECT_FTP_DOC_LOG{" + + "ftp_url='" + ftp_url + '\'' + + ", ftp_content='" + ftp_content + '\'' + + ", content_path='" + content_path + '\'' + + ", d_tag=" + d_tag + + ", x_tag='" + x_tag + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/NTC_COLLECT_HTTP_DOC_LOG.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/NTC_COLLECT_HTTP_DOC_LOG.java new file mode 100644 index 0000000..fb2dd40 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/NTC_COLLECT_HTTP_DOC_LOG.java @@ -0,0 +1,245 @@ +package cn.ac.iie.cusflume.sink.bean.fileBean; + +import cn.ac.iie.cusflume.sink.bean.tagBean.LOG_D_TAG_BEAN; + +public class NTC_COLLECT_HTTP_DOC_LOG extends CommonLog_File { + + private String url; + private String referer; + private String user_agent; + private int http_seq; + private String req_line; + private String res_line; + private String cookie; + private String content_type; + + private long content_len;//20200507新增字段 + private String doc_type;//20200514新增字段 + + private String req_body_file_path;//新增-20191216 + private String res_body_file_path;//新增-20191216 + + //20191209新增 +// private String d_tag;//json字符串版本 + private LOG_D_TAG_BEAN d_tag;//json版本-20191216 + private String x_tag;//json字符串版本,就算前端传入的为json也会强转为字符串 + //20191209新增 + + + private String set_cookie;//目前未使用-20191216 + + private String req_hdr_file;//原本;//目前未使用-20191216 + private String req_hdr_file_id;//目前未使用-20191216 + private String req_body_file;//原本;//目前未使用-20191216 + private String req_body_file_id;//目前未使用-20191216 + + private String res_hdr_file;//原本;//目前未使用-20191216 + private String res_hdr_file_id;//目前未使用-20191216 + private String res_body_file;//原本;//目前未使用-20191216 + private String res_body_file_id;//目前未使用-20191216 + + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getReferer() { + return referer; + } + + public void setReferer(String referer) { + this.referer = referer; + } + + public String getUser_agent() { + return user_agent; + } + + public void setUser_agent(String user_agent) { + this.user_agent = user_agent; + } + + public int getHttp_seq() { + return http_seq; + } + + public void setHttp_seq(int http_seq) { + this.http_seq = http_seq; + } + + public String getReq_line() { + return req_line; + } + + public void setReq_line(String req_line) { + this.req_line = req_line; + } + + public String getRes_line() { + return res_line; + } + + public void setRes_line(String res_line) { + this.res_line = res_line; + } + + public String getCookie() { + return cookie; + } + + public void setCookie(String cookie) { + this.cookie = cookie; + } + + public String getContent_type() { + return content_type; + } + + public void setContent_type(String content_type) { + this.content_type = content_type; + } + + public long getContent_len() { + return content_len; + } + + public void setContent_len(long content_len) { + this.content_len = content_len; + } + + public String getDoc_type() { + return doc_type; + } + + public void setDoc_type(String doc_type) { + this.doc_type = doc_type; + } + + public String getReq_body_file_path() { + return req_body_file_path; + } + + public void setReq_body_file_path(String req_body_file_path) { + this.req_body_file_path = req_body_file_path; + } + + public String getRes_body_file_path() { + return res_body_file_path; + } + + public void setRes_body_file_path(String res_body_file_path) { + this.res_body_file_path = res_body_file_path; + } + + public LOG_D_TAG_BEAN getD_tag() { + return d_tag; + } + + public void setD_tag(LOG_D_TAG_BEAN d_tag) { + this.d_tag = d_tag; + } + + public String getX_tag() { + return x_tag; + } + + public void setX_tag(String x_tag) { + this.x_tag = x_tag; + } + + public String getSet_cookie() { + return set_cookie; + } + + public void setSet_cookie(String set_cookie) { + this.set_cookie = set_cookie; + } + + public String getReq_hdr_file() { + return req_hdr_file; + } + + public void setReq_hdr_file(String req_hdr_file) { + this.req_hdr_file = req_hdr_file; + } + + public String getReq_hdr_file_id() { + return req_hdr_file_id; + } + + public void setReq_hdr_file_id(String req_hdr_file_id) { + this.req_hdr_file_id = req_hdr_file_id; + } + + public String getReq_body_file() { + return req_body_file; + } + + public void setReq_body_file(String req_body_file) { + this.req_body_file = req_body_file; + } + + public String getReq_body_file_id() { + return req_body_file_id; + } + + public void setReq_body_file_id(String req_body_file_id) { + this.req_body_file_id = req_body_file_id; + } + + public String getRes_hdr_file() { + return res_hdr_file; + } + + public void setRes_hdr_file(String res_hdr_file) { + this.res_hdr_file = res_hdr_file; + } + + public String getRes_hdr_file_id() { + return res_hdr_file_id; + } + + public void setRes_hdr_file_id(String res_hdr_file_id) { + this.res_hdr_file_id = res_hdr_file_id; + } + + public String getRes_body_file() { + return res_body_file; + } + + public void setRes_body_file(String res_body_file) { + this.res_body_file = res_body_file; + } + + public String getRes_body_file_id() { + return res_body_file_id; + } + + public void setRes_body_file_id(String res_body_file_id) { + this.res_body_file_id = res_body_file_id; + } + + @Override + public String toString() { + return "NTC_COLLECT_HTTP_DOC_LOG{" + + "url='" + url + '\'' + + ", referer='" + referer + '\'' + + ", user_agent='" + user_agent + '\'' + + ", http_seq=" + http_seq + + ", req_line='" + req_line + '\'' + + ", res_line='" + res_line + '\'' + + ", cookie='" + cookie + '\'' + + ", content_type='" + content_type + '\'' + + ", content_len=" + content_len + + ", doc_type='" + doc_type + '\'' + + ", req_body_file_path='" + req_body_file_path + '\'' + + ", res_body_file_path='" + res_body_file_path + '\'' + + ", d_tag=" + d_tag + + ", x_tag='" + x_tag + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/NTC_COLLECT_MAIL_LOG.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/NTC_COLLECT_MAIL_LOG.java new file mode 100644 index 0000000..d80f67a --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/NTC_COLLECT_MAIL_LOG.java @@ -0,0 +1,191 @@ +package cn.ac.iie.cusflume.sink.bean.fileBean; + +import cn.ac.iie.cusflume.sink.bean.tagBean.LOG_D_TAG_BEAN; + +import java.util.Arrays; + +public class NTC_COLLECT_MAIL_LOG extends CommonLog_File{ + + private String mail_proto; + private String mail_from; + private String mail_to; + private String mail_cc;//新增-20191216 + private String mail_date;//新增-20200304 + private String subject; + + private String eml_file_path;//新增-20191216 + private String attachments_path;//新增-20191216 + private String subject_charset; + + //20191209新增 +// private String d_tag;//json字符串版本 + private LOG_D_TAG_BEAN d_tag;//json版本-20191216 + private String x_tag;//json字符串版本,就算前端传入的为json也会强转为字符串 + //20191209新增 + + private String eml_file;//目前未使用-20191216 + private String eml_file_id;//目前未使用-20191216 + + private String[] attachments;//目前未使用-20191216 + private String attachments_id;//目前未使用-20191216 + + + + + public String getMail_proto() { + return mail_proto; + } + + public void setMail_proto(String mail_proto) { + this.mail_proto = mail_proto; + } + + public String getMail_from() { + return mail_from; + } + + public void setMail_from(String mail_from) { + this.mail_from = mail_from; + } + + public String getMail_to() { + return mail_to; + } + + public void setMail_to(String mail_to) { + this.mail_to = mail_to; + } + + public String getSubject() { + return subject; + } + + public void setSubject(String subject) { + this.subject = subject; + } + + public String getEml_file() { + return eml_file; + } + + public void setEml_file(String eml_file) { + this.eml_file = eml_file; + } + + public String getEml_file_id() { + return eml_file_id; + } + + public void setEml_file_id(String eml_file_id) { + this.eml_file_id = eml_file_id; + } + + public String[] getAttachments() { + return attachments; + } + + public void setAttachments(String[] attachments) { + this.attachments = attachments; + } + + public String getAttachments_id() { + return attachments_id; + } + + public void setAttachments_id(String attachments_id) { + this.attachments_id = attachments_id; + } + + public String getSubject_charset() { + return subject_charset; + } + + public void setSubject_charset(String subject_charset) { + this.subject_charset = subject_charset; + } + + public LOG_D_TAG_BEAN getD_tag() { + return d_tag; + } + + public void setD_tag(LOG_D_TAG_BEAN d_tag) { + this.d_tag = d_tag; + } + + public String getX_tag() { + return x_tag; + } + + public void setX_tag(String x_tag) { + this.x_tag = x_tag; + } + + public String getMail_cc() { + return mail_cc; + } + + public void setMail_cc(String mail_cc) { + this.mail_cc = mail_cc; + } + + public String getEml_file_path() { + return eml_file_path; + } + + public void setEml_file_path(String eml_file_path) { + this.eml_file_path = eml_file_path; + } + + public String getAttachments_path() { + return attachments_path; + } + + public void setAttachments_path(String attachments_path) { + this.attachments_path = attachments_path; + } + + public String getMail_date() { + return mail_date; + } + + public void setMail_date(String mail_date) { + this.mail_date = mail_date; + } + + // @Override +// public String toString() { +// return "NTC_COLLECT_MAIL_LOG{" + +// "mail_proto='" + mail_proto + '\'' + +// ", mail_from='" + mail_from + '\'' + +// ", mail_to='" + mail_to + '\'' + +// ", subject='" + subject + '\'' + +// ", eml_file='" + eml_file + '\'' + +// ", eml_file_id='" + eml_file_id + '\'' + +// ", attachments=" + Arrays.toString(attachments) + +// ", attachments_id='" + attachments_id + '\'' + +// ", subject_charset='" + subject_charset + '\'' + +// '}'; +// } + + + @Override + public String toString() { + return "NTC_COLLECT_MAIL_LOG{" + + "mail_proto='" + mail_proto + '\'' + + ", mail_from='" + mail_from + '\'' + + ", mail_to='" + mail_to + '\'' + + ", mail_cc='" + mail_cc + '\'' + + ", mail_date='" + mail_date + '\'' + + ", subject='" + subject + '\'' + + ", eml_file_path='" + eml_file_path + '\'' + + ", attachments_path='" + attachments_path + '\'' + + ", subject_charset='" + subject_charset + '\'' + + ", d_tag=" + d_tag + + ", x_tag='" + x_tag + '\'' + + ", eml_file='" + eml_file + '\'' + + ", eml_file_id='" + eml_file_id + '\'' + + ", attachments=" + Arrays.toString(attachments) + + ", attachments_id='" + attachments_id + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/NTC_COLLECT_TELNET_LOG.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/NTC_COLLECT_TELNET_LOG.java new file mode 100644 index 0000000..f6a898b --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/fileBean/NTC_COLLECT_TELNET_LOG.java @@ -0,0 +1,83 @@ +package cn.ac.iie.cusflume.sink.bean.fileBean; + +import cn.ac.iie.cusflume.sink.bean.tagBean.LOG_D_TAG_BEAN; + +/** + * TELNET 采集日志-20200325 + */ +public class NTC_COLLECT_TELNET_LOG extends CommonLog_File { + + private String username; + private String password; +// private String telnet_content_path;//旧-20200506废弃 + private String content_path;//新-20200506新修改 + + //20200211新增 + private LOG_D_TAG_BEAN d_tag;//json版本 + private String x_tag;//json字符串版本,就算前端传入的为json也会强转为字符串 + //20200211新增 + + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public String getPassword() { + return password; + } + + public void setPassword(String password) { + this.password = password; + } + + public String getContent_path() { + return content_path; + } + + public void setContent_path(String content_path) { + this.content_path = content_path; + } + + public LOG_D_TAG_BEAN getD_tag() { + return d_tag; + } + + public void setD_tag(LOG_D_TAG_BEAN d_tag) { + this.d_tag = d_tag; + } + + public String getX_tag() { + return x_tag; + } + + public void setX_tag(String x_tag) { + this.x_tag = x_tag; + } + +// @Override +// public String toString() { +// return "NTC_COLLECT_TELNET_LOG{" + +// "username='" + username + '\'' + +// ", password='" + password + '\'' + +// ", telnet_content_path='" + content_path + '\'' + +// ", d_tag=" + d_tag + +// ", x_tag='" + x_tag + '\'' + +// '}'; +// } + + + @Override + public String toString() { + return "NTC_COLLECT_TELNET_LOG{" + + "username='" + username + '\'' + + ", password='" + password + '\'' + + ", content_path='" + content_path + '\'' + + ", d_tag=" + d_tag + + ", x_tag='" + x_tag + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/postFileBean/PostFileResBody.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/postFileBean/PostFileResBody.java new file mode 100644 index 0000000..7fe4426 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/postFileBean/PostFileResBody.java @@ -0,0 +1,46 @@ +package cn.ac.iie.cusflume.sink.bean.postFileBean; + +import java.util.Map; + +/** + * 推送文件成功后的返回体 + */ +public class PostFileResBody { + private String msg; + private int code; +// private Map data;//{"id":"123456"}----旧 + private String data;//{"id":"123456"}----新-20191115 + + public String getMsg() { + return msg; + } + + public void setMsg(String msg) { + this.msg = msg; + } + + public int getCode() { + return code; + } + + public void setCode(int code) { + this.code = code; + } + + public String getData() { + return data; + } + + public void setData(String data) { + this.data = data; + } + + @Override + public String toString() { + return "PostFileResBody{" + + "msg='" + msg + '\'' + + ", code=" + code + + ", data='" + data + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/producer/Res/ProResBody.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/producer/Res/ProResBody.java new file mode 100644 index 0000000..6f58ca1 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/producer/Res/ProResBody.java @@ -0,0 +1,41 @@ +package cn.ac.iie.cusflume.sink.bean.producer.Res; + +public class ProResBody { + private int code;//老版status,新版code + private String msg;//老版message,新版msg +// private ProResData data; + private String data; + + public int getCode() { + return code; + } + + public void setCode(int code) { + this.code = code; + } + + public String getMsg() { + return msg; + } + + public void setMsg(String msg) { + this.msg = msg; + } + + public String getData() { + return data; + } + + public void setData(String data) { + this.data = data; + } + + @Override + public String toString() { + return "ProResBody{" + + "code=" + code + + ", msg='" + msg + '\'' + + ", data='" + data + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/producer/Res/ProResData.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/producer/Res/ProResData.java new file mode 100644 index 0000000..dc2301d --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/producer/Res/ProResData.java @@ -0,0 +1,23 @@ +package cn.ac.iie.cusflume.sink.bean.producer.Res; + +/** + * 暂未使用,ProResBody中data的对象形式 + */ +public class ProResData { + private String checksum; + + public String getChecksum() { + return checksum; + } + + public void setChecksum(String checksum) { + this.checksum = checksum; + } + + @Override + public String toString() { + return "ProResData{" + + "checksum='" + checksum + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/quoteStrFileBean/QUOTE_LOG_D_TAG_BEAN.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/quoteStrFileBean/QUOTE_LOG_D_TAG_BEAN.java new file mode 100644 index 0000000..0ca0308 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/quoteStrFileBean/QUOTE_LOG_D_TAG_BEAN.java @@ -0,0 +1,118 @@ +//package cn.ac.iie.cusflume.sink.bean.quoteStrFileBean; +// +//import java.util.Arrays; +// +///** +// * 存储在日志消息内部的d_tag字段,实际为日志标签,需要提取出来进行avro序列化后和数据对象拼成字节数组一同发送 +// */ +//public class QUOTE_LOG_D_TAG_BEAN { +// +// private String tag_version; +// private int data_type; +// private int data_subtype; +// private int producer_id; +// private int data_source; +// private int[] task_id; +// private long data_id; +// private long flow_id; +// +// private long[] file_id_list; +//// private String[] file_path_list; +// private String file_path_list; +// +// public String getTag_version() { +// return tag_version; +// } +// +// public void setTag_version(String tag_version) { +// this.tag_version = tag_version; +// } +// +// public int getData_type() { +// return data_type; +// } +// +// public void setData_type(int data_type) { +// this.data_type = data_type; +// } +// +// public int getData_subtype() { +// return data_subtype; +// } +// +// public void setData_subtype(int data_subtype) { +// this.data_subtype = data_subtype; +// } +// +// public int getProducer_id() { +// return producer_id; +// } +// +// public void setProducer_id(int producer_id) { +// this.producer_id = producer_id; +// } +// +// public int getData_source() { +// return data_source; +// } +// +// public void setData_source(int data_source) { +// this.data_source = data_source; +// } +// +// public int[] getTask_id() { +// return task_id; +// } +// +// public void setTask_id(int[] task_id) { +// this.task_id = task_id; +// } +// +// public long getData_id() { +// return data_id; +// } +// +// public void setData_id(long data_id) { +// this.data_id = data_id; +// } +// +// public long getFlow_id() { +// return flow_id; +// } +// +// public void setFlow_id(long flow_id) { +// this.flow_id = flow_id; +// } +// +// public long[] getFile_id_list() { +// return file_id_list; +// } +// +// public void setFile_id_list(long[] file_id_list) { +// this.file_id_list = file_id_list; +// } +// +// public String getFile_path_list() { +// return file_path_list; +// } +// +// public void setFile_path_list(String file_path_list) { +// this.file_path_list = file_path_list; +// } +// +// @Override +// public String toString() { +// return "QUOTE_LOG_D_TAG_BEAN{" + +// "tag_version='" + tag_version + '\'' + +// ", data_type=" + data_type + +// ", data_subtype=" + data_subtype + +// ", producer_id=" + producer_id + +// ", data_source=" + data_source + +// ", task_id=" + Arrays.toString(task_id) + +// ", data_id=" + data_id + +// ", flow_id=" + flow_id + +// ", file_id_list=" + Arrays.toString(file_id_list) + +// ", file_path_list='" + file_path_list + '\'' + +// '}'; +// } +//} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/quoteStrFileBean/QUOTE_NTC_COLLECT_MAIL_LOG.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/quoteStrFileBean/QUOTE_NTC_COLLECT_MAIL_LOG.java new file mode 100644 index 0000000..426c3ae --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/quoteStrFileBean/QUOTE_NTC_COLLECT_MAIL_LOG.java @@ -0,0 +1,163 @@ +//package cn.ac.iie.cusflume.sink.bean.quoteStrFileBean; +// +//import cn.ac.iie.cusflume.sink.bean.fileBean.CommonLog_File; +//import cn.ac.iie.cusflume.sink.bean.tagBean.LOG_D_TAG_BEAN; +// +//import java.util.Arrays; +// +//public class QUOTE_NTC_COLLECT_MAIL_LOG extends CommonLog_File{ +// +// private String mail_proto; +// private String mail_from; +// private String mail_to; +// private String mail_cc;//新增-20191216 +// private String subject; +// +// private String eml_file_path;//新增-20191216 +// private String attachments_path;//新增-20191216 +// private String subject_charset; +// +// //20191209新增 +//// private String d_tag;//json字符串版本 +// private QUOTE_LOG_D_TAG_BEAN d_tag;//json版本-20191216 +// private String x_tag;//json字符串版本,就算前端传入的为json也会强转为字符串 +// //20191209新增 +// +// private String eml_file;//目前未使用-20191216 +// private String eml_file_id;//目前未使用-20191216 +// +// private String[] attachments;//目前未使用-20191216 +// private String attachments_id;//目前未使用-20191216 +// +// public String getMail_proto() { +// return mail_proto; +// } +// +// public void setMail_proto(String mail_proto) { +// this.mail_proto = mail_proto; +// } +// +// public String getMail_from() { +// return mail_from; +// } +// +// public void setMail_from(String mail_from) { +// this.mail_from = mail_from; +// } +// +// public String getMail_to() { +// return mail_to; +// } +// +// public void setMail_to(String mail_to) { +// this.mail_to = mail_to; +// } +// +// public String getMail_cc() { +// return mail_cc; +// } +// +// public void setMail_cc(String mail_cc) { +// this.mail_cc = mail_cc; +// } +// +// public String getSubject() { +// return subject; +// } +// +// public void setSubject(String subject) { +// this.subject = subject; +// } +// +// public String getEml_file_path() { +// return eml_file_path; +// } +// +// public void setEml_file_path(String eml_file_path) { +// this.eml_file_path = eml_file_path; +// } +// +// public String getAttachments_path() { +// return attachments_path; +// } +// +// public void setAttachments_path(String attachments_path) { +// this.attachments_path = attachments_path; +// } +// +// public String getSubject_charset() { +// return subject_charset; +// } +// +// public void setSubject_charset(String subject_charset) { +// this.subject_charset = subject_charset; +// } +// +// public QUOTE_LOG_D_TAG_BEAN getD_tag() { +// return d_tag; +// } +// +// public void setD_tag(QUOTE_LOG_D_TAG_BEAN d_tag) { +// this.d_tag = d_tag; +// } +// +// public String getX_tag() { +// return x_tag; +// } +// +// public void setX_tag(String x_tag) { +// this.x_tag = x_tag; +// } +// +// public String getEml_file() { +// return eml_file; +// } +// +// public void setEml_file(String eml_file) { +// this.eml_file = eml_file; +// } +// +// public String getEml_file_id() { +// return eml_file_id; +// } +// +// public void setEml_file_id(String eml_file_id) { +// this.eml_file_id = eml_file_id; +// } +// +// public String[] getAttachments() { +// return attachments; +// } +// +// public void setAttachments(String[] attachments) { +// this.attachments = attachments; +// } +// +// public String getAttachments_id() { +// return attachments_id; +// } +// +// public void setAttachments_id(String attachments_id) { +// this.attachments_id = attachments_id; +// } +// +// @Override +// public String toString() { +// return "QUOTE_NTC_COLLECT_MAIL_LOG{" + +// "mail_proto='" + mail_proto + '\'' + +// ", mail_from='" + mail_from + '\'' + +// ", mail_to='" + mail_to + '\'' + +// ", mail_cc='" + mail_cc + '\'' + +// ", subject='" + subject + '\'' + +// ", eml_file_path='" + eml_file_path + '\'' + +// ", attachments_path='" + attachments_path + '\'' + +// ", subject_charset='" + subject_charset + '\'' + +// ", d_tag=" + d_tag + +// ", x_tag='" + x_tag + '\'' + +// ", eml_file='" + eml_file + '\'' + +// ", eml_file_id='" + eml_file_id + '\'' + +// ", attachments=" + Arrays.toString(attachments) + +// ", attachments_id='" + attachments_id + '\'' + +// '}'; +// } +//} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/redirectBean/ResRedirBody.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/redirectBean/ResRedirBody.java new file mode 100644 index 0000000..ad7e266 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/redirectBean/ResRedirBody.java @@ -0,0 +1,41 @@ +package cn.ac.iie.cusflume.sink.bean.redirectBean; + +public class ResRedirBody { + private int code; + private String msg; + private ResRedirBodyData data; + + public int getCode() { + return code; + } + + public void setCode(int code) { + this.code = code; + } + + public String getMsg() { + return msg; + } + + public void setMsg(String msg) { + this.msg = msg; + } + + public ResRedirBodyData getData() { + return data; + } + + public void setData(ResRedirBodyData data) { + this.data = data; + } + + + @Override + public String toString() { + return "ResRedirBody{" + + "code=" + code + + ", msg='" + msg + '\'' + + ", data=" + data + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/redirectBean/ResRedirBodyData.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/redirectBean/ResRedirBodyData.java new file mode 100644 index 0000000..57008fa --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/redirectBean/ResRedirBodyData.java @@ -0,0 +1,23 @@ +package cn.ac.iie.cusflume.sink.bean.redirectBean; + +/** + * ProReirBodyData中data的对象形式-20200818新增 + */ +public class ResRedirBodyData { + private String redirect; + + public String getRedirect() { + return redirect; + } + + public void setRedirect(String redirect) { + this.redirect = redirect; + } + + @Override + public String toString() { + return "ResRedirBodyData{" + + "redirect='" + redirect + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/LOG_D_TAG_BEAN.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/LOG_D_TAG_BEAN.java new file mode 100644 index 0000000..33c7228 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/LOG_D_TAG_BEAN.java @@ -0,0 +1,117 @@ +package cn.ac.iie.cusflume.sink.bean.tagBean; + +import java.util.Arrays; + +/** + * 存储在日志消息内部的d_tag字段,实际为日志标签,需要提取出来进行avro序列化后和数据对象拼成字节数组一同发送 + */ +public class LOG_D_TAG_BEAN { + + private String tag_version; + private int data_type; + private int data_subtype; + private int producer_id; + private int data_source; + private int[] task_id; + private long data_id; + private long flow_id; + + private long[] file_id_list; + private String[] file_path_list; + + public String getTag_version() { + return tag_version; + } + + public void setTag_version(String tag_version) { + this.tag_version = tag_version; + } + + public int getData_type() { + return data_type; + } + + public void setData_type(int data_type) { + this.data_type = data_type; + } + + public int getData_subtype() { + return data_subtype; + } + + public void setData_subtype(int data_subtype) { + this.data_subtype = data_subtype; + } + + public int getProducer_id() { + return producer_id; + } + + public void setProducer_id(int producer_id) { + this.producer_id = producer_id; + } + + public int getData_source() { + return data_source; + } + + public void setData_source(int data_source) { + this.data_source = data_source; + } + + public int[] getTask_id() { + return task_id; + } + + public void setTask_id(int[] task_id) { + this.task_id = task_id; + } + + public long getData_id() { + return data_id; + } + + public void setData_id(long data_id) { + this.data_id = data_id; + } + + public long getFlow_id() { + return flow_id; + } + + public void setFlow_id(long flow_id) { + this.flow_id = flow_id; + } + + public long[] getFile_id_list() { + return file_id_list; + } + + public void setFile_id_list(long[] file_id_list) { + this.file_id_list = file_id_list; + } + + public String[] getFile_path_list() { + return file_path_list; + } + + public void setFile_path_list(String[] file_path_list) { + this.file_path_list = file_path_list; + } + + @Override + public String toString() { + return "LOG_D_TAG_BEAN{" + + "tag_version='" + tag_version + '\'' + + ", data_type=" + data_type + + ", data_subtype=" + data_subtype + + ", producer_id=" + producer_id + + ", data_source=" + data_source + + ", task_id=" + Arrays.toString(task_id) + + ", data_id=" + data_id + + ", flow_id=" + flow_id + + ", file_id_list=" + Arrays.toString(file_id_list) + + ", file_path_list=" + Arrays.toString(file_path_list) + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/noUse/AvroCompileTagBean/file_tag.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/noUse/AvroCompileTagBean/file_tag.java new file mode 100644 index 0000000..10c74b2 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/noUse/AvroCompileTagBean/file_tag.java @@ -0,0 +1,829 @@ +//package cn.ac.iie.cusflume.sink.bean.tagBean.noUse.AvroCompileTagBean; +// +///** +// * Autogenerated by Avro +// * +// * DO NOT EDIT DIRECTLY +// */ +//@SuppressWarnings("all") +//@org.apache.avro.specific.AvroGenerated +//public class file_tag extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { +// public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"file_tag\",\"fields\":[{\"name\":\"tag_version\",\"type\":\"string\",\"default\":\"\"},{\"name\":\"data_type\",\"type\":\"int\",\"default\":0},{\"name\":\"data_subtype\",\"type\":\"int\",\"default\":0},{\"name\":\"producer_id\",\"type\":\"int\",\"default\":0},{\"name\":\"data_source\",\"type\":\"int\",\"default\":0},{\"name\":\"task_id\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"file_id\",\"type\":\"long\",\"default\":0},{\"name\":\"file_path\",\"type\":\"string\",\"default\":\"\"},{\"name\":\"file_name\",\"type\":\"string\",\"default\":\"\"},{\"name\":\"file_size\",\"type\":\"float\",\"default\":0.0},{\"name\":\"md5\",\"type\":\"string\",\"default\":\"\"},{\"name\":\"encrypted\",\"type\":\"string\",\"default\":\"\"},{\"name\":\"timestamp\",\"type\":\"string\",\"default\":\"\"},{\"name\":\"parent_id\",\"type\":\"long\",\"default\":0},{\"name\":\"flow_id\",\"type\":\"long\",\"default\":0}]}"); +// public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } +// @Deprecated public CharSequence tag_version; +// @Deprecated public int data_type; +// @Deprecated public int data_subtype; +// @Deprecated public int producer_id; +// @Deprecated public int data_source; +// @Deprecated public java.util.List task_id; +// @Deprecated public long file_id; +// @Deprecated public CharSequence file_path; +// @Deprecated public CharSequence file_name; +// @Deprecated public float file_size; +// @Deprecated public CharSequence md5; +// @Deprecated public CharSequence encrypted; +// @Deprecated public CharSequence timestamp; +// @Deprecated public long parent_id; +// @Deprecated public long flow_id; +// +// /** +// * Default constructor. +// */ +// public file_tag() {} +// +// /** +// * All-args constructor. +// */ +// public file_tag(CharSequence tag_version, Integer data_type, Integer data_subtype, Integer producer_id, Integer data_source, java.util.List task_id, Long file_id, CharSequence file_path, CharSequence file_name, Float file_size, CharSequence md5, CharSequence encrypted, CharSequence timestamp, Long parent_id, Long flow_id) { +// this.tag_version = tag_version; +// this.data_type = data_type; +// this.data_subtype = data_subtype; +// this.producer_id = producer_id; +// this.data_source = data_source; +// this.task_id = task_id; +// this.file_id = file_id; +// this.file_path = file_path; +// this.file_name = file_name; +// this.file_size = file_size; +// this.md5 = md5; +// this.encrypted = encrypted; +// this.timestamp = timestamp; +// this.parent_id = parent_id; +// this.flow_id = flow_id; +// } +// +// public org.apache.avro.Schema getSchema() { return SCHEMA$; } +// // Used by DatumWriter. Applications should not call. +// public Object get(int field$) { +// switch (field$) { +// case 0: return tag_version; +// case 1: return data_type; +// case 2: return data_subtype; +// case 3: return producer_id; +// case 4: return data_source; +// case 5: return task_id; +// case 6: return file_id; +// case 7: return file_path; +// case 8: return file_name; +// case 9: return file_size; +// case 10: return md5; +// case 11: return encrypted; +// case 12: return timestamp; +// case 13: return parent_id; +// case 14: return flow_id; +// default: throw new org.apache.avro.AvroRuntimeException("Bad index"); +// } +// } +// // Used by DatumReader. Applications should not call. +// @SuppressWarnings(value="unchecked") +// public void put(int field$, Object value$) { +// switch (field$) { +// case 0: tag_version = (CharSequence)value$; break; +// case 1: data_type = (Integer)value$; break; +// case 2: data_subtype = (Integer)value$; break; +// case 3: producer_id = (Integer)value$; break; +// case 4: data_source = (Integer)value$; break; +// case 5: task_id = (java.util.List)value$; break; +// case 6: file_id = (Long)value$; break; +// case 7: file_path = (CharSequence)value$; break; +// case 8: file_name = (CharSequence)value$; break; +// case 9: file_size = (Float)value$; break; +// case 10: md5 = (CharSequence)value$; break; +// case 11: encrypted = (CharSequence)value$; break; +// case 12: timestamp = (CharSequence)value$; break; +// case 13: parent_id = (Long)value$; break; +// case 14: flow_id = (Long)value$; break; +// default: throw new org.apache.avro.AvroRuntimeException("Bad index"); +// } +// } +// +// /** +// * Gets the value of the 'tag_version' field. +// */ +// public CharSequence getTagVersion() { +// return tag_version; +// } +// +// /** +// * Sets the value of the 'tag_version' field. +// * @param value the value to set. +// */ +// public void setTagVersion(CharSequence value) { +// this.tag_version = value; +// } +// +// /** +// * Gets the value of the 'data_type' field. +// */ +// public Integer getDataType() { +// return data_type; +// } +// +// /** +// * Sets the value of the 'data_type' field. +// * @param value the value to set. +// */ +// public void setDataType(Integer value) { +// this.data_type = value; +// } +// +// /** +// * Gets the value of the 'data_subtype' field. +// */ +// public Integer getDataSubtype() { +// return data_subtype; +// } +// +// /** +// * Sets the value of the 'data_subtype' field. +// * @param value the value to set. +// */ +// public void setDataSubtype(Integer value) { +// this.data_subtype = value; +// } +// +// /** +// * Gets the value of the 'producer_id' field. +// */ +// public Integer getProducerId() { +// return producer_id; +// } +// +// /** +// * Sets the value of the 'producer_id' field. +// * @param value the value to set. +// */ +// public void setProducerId(Integer value) { +// this.producer_id = value; +// } +// +// /** +// * Gets the value of the 'data_source' field. +// */ +// public Integer getDataSource() { +// return data_source; +// } +// +// /** +// * Sets the value of the 'data_source' field. +// * @param value the value to set. +// */ +// public void setDataSource(Integer value) { +// this.data_source = value; +// } +// +// /** +// * Gets the value of the 'task_id' field. +// */ +// public java.util.List getTaskId() { +// return task_id; +// } +// +// /** +// * Sets the value of the 'task_id' field. +// * @param value the value to set. +// */ +// public void setTaskId(java.util.List value) { +// this.task_id = value; +// } +// +// /** +// * Gets the value of the 'file_id' field. +// */ +// public Long getFileId() { +// return file_id; +// } +// +// /** +// * Sets the value of the 'file_id' field. +// * @param value the value to set. +// */ +// public void setFileId(Long value) { +// this.file_id = value; +// } +// +// /** +// * Gets the value of the 'file_path' field. +// */ +// public CharSequence getFilePath() { +// return file_path; +// } +// +// /** +// * Sets the value of the 'file_path' field. +// * @param value the value to set. +// */ +// public void setFilePath(CharSequence value) { +// this.file_path = value; +// } +// +// /** +// * Gets the value of the 'file_name' field. +// */ +// public CharSequence getFileName() { +// return file_name; +// } +// +// /** +// * Sets the value of the 'file_name' field. +// * @param value the value to set. +// */ +// public void setFileName(CharSequence value) { +// this.file_name = value; +// } +// +// /** +// * Gets the value of the 'file_size' field. +// */ +// public Float getFileSize() { +// return file_size; +// } +// +// /** +// * Sets the value of the 'file_size' field. +// * @param value the value to set. +// */ +// public void setFileSize(Float value) { +// this.file_size = value; +// } +// +// /** +// * Gets the value of the 'md5' field. +// */ +// public CharSequence getMd5() { +// return md5; +// } +// +// /** +// * Sets the value of the 'md5' field. +// * @param value the value to set. +// */ +// public void setMd5(CharSequence value) { +// this.md5 = value; +// } +// +// /** +// * Gets the value of the 'encrypted' field. +// */ +// public CharSequence getEncrypted() { +// return encrypted; +// } +// +// /** +// * Sets the value of the 'encrypted' field. +// * @param value the value to set. +// */ +// public void setEncrypted(CharSequence value) { +// this.encrypted = value; +// } +// +// /** +// * Gets the value of the 'timestamp' field. +// */ +// public CharSequence getTimestamp() { +// return timestamp; +// } +// +// /** +// * Sets the value of the 'timestamp' field. +// * @param value the value to set. +// */ +// public void setTimestamp(CharSequence value) { +// this.timestamp = value; +// } +// +// /** +// * Gets the value of the 'parent_id' field. +// */ +// public Long getParentId() { +// return parent_id; +// } +// +// /** +// * Sets the value of the 'parent_id' field. +// * @param value the value to set. +// */ +// public void setParentId(Long value) { +// this.parent_id = value; +// } +// +// /** +// * Gets the value of the 'flow_id' field. +// */ +// public Long getFlowId() { +// return flow_id; +// } +// +// /** +// * Sets the value of the 'flow_id' field. +// * @param value the value to set. +// */ +// public void setFlowId(Long value) { +// this.flow_id = value; +// } +// +// /** Creates a new file_tag RecordBuilder */ +// public static Builder newBuilder() { +// return new Builder(); +// } +// +// /** Creates a new file_tag RecordBuilder by copying an existing Builder */ +// public static Builder newBuilder(Builder other) { +// return new Builder(other); +// } +// +// /** Creates a new file_tag RecordBuilder by copying an existing file_tag instance */ +// public static Builder newBuilder(file_tag other) { +// return new Builder(other); +// } +// +// /** +// * RecordBuilder for file_tag instances. +// */ +// public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase +// implements org.apache.avro.data.RecordBuilder { +// +// private CharSequence tag_version; +// private int data_type; +// private int data_subtype; +// private int producer_id; +// private int data_source; +// private java.util.List task_id; +// private long file_id; +// private CharSequence file_path; +// private CharSequence file_name; +// private float file_size; +// private CharSequence md5; +// private CharSequence encrypted; +// private CharSequence timestamp; +// private long parent_id; +// private long flow_id; +// +// /** Creates a new Builder */ +// private Builder() { +// super(file_tag.SCHEMA$); +// } +// +// /** Creates a Builder by copying an existing Builder */ +// private Builder(Builder other) { +// super(other); +// } +// +// /** Creates a Builder by copying an existing file_tag instance */ +// private Builder(file_tag other) { +// super(file_tag.SCHEMA$); +// if (isValidValue(fields()[0], other.tag_version)) { +// this.tag_version = data().deepCopy(fields()[0].schema(), other.tag_version); +// fieldSetFlags()[0] = true; +// } +// if (isValidValue(fields()[1], other.data_type)) { +// this.data_type = data().deepCopy(fields()[1].schema(), other.data_type); +// fieldSetFlags()[1] = true; +// } +// if (isValidValue(fields()[2], other.data_subtype)) { +// this.data_subtype = data().deepCopy(fields()[2].schema(), other.data_subtype); +// fieldSetFlags()[2] = true; +// } +// if (isValidValue(fields()[3], other.producer_id)) { +// this.producer_id = data().deepCopy(fields()[3].schema(), other.producer_id); +// fieldSetFlags()[3] = true; +// } +// if (isValidValue(fields()[4], other.data_source)) { +// this.data_source = data().deepCopy(fields()[4].schema(), other.data_source); +// fieldSetFlags()[4] = true; +// } +// if (isValidValue(fields()[5], other.task_id)) { +// this.task_id = data().deepCopy(fields()[5].schema(), other.task_id); +// fieldSetFlags()[5] = true; +// } +// if (isValidValue(fields()[6], other.file_id)) { +// this.file_id = data().deepCopy(fields()[6].schema(), other.file_id); +// fieldSetFlags()[6] = true; +// } +// if (isValidValue(fields()[7], other.file_path)) { +// this.file_path = data().deepCopy(fields()[7].schema(), other.file_path); +// fieldSetFlags()[7] = true; +// } +// if (isValidValue(fields()[8], other.file_name)) { +// this.file_name = data().deepCopy(fields()[8].schema(), other.file_name); +// fieldSetFlags()[8] = true; +// } +// if (isValidValue(fields()[9], other.file_size)) { +// this.file_size = data().deepCopy(fields()[9].schema(), other.file_size); +// fieldSetFlags()[9] = true; +// } +// if (isValidValue(fields()[10], other.md5)) { +// this.md5 = data().deepCopy(fields()[10].schema(), other.md5); +// fieldSetFlags()[10] = true; +// } +// if (isValidValue(fields()[11], other.encrypted)) { +// this.encrypted = data().deepCopy(fields()[11].schema(), other.encrypted); +// fieldSetFlags()[11] = true; +// } +// if (isValidValue(fields()[12], other.timestamp)) { +// this.timestamp = data().deepCopy(fields()[12].schema(), other.timestamp); +// fieldSetFlags()[12] = true; +// } +// if (isValidValue(fields()[13], other.parent_id)) { +// this.parent_id = data().deepCopy(fields()[13].schema(), other.parent_id); +// fieldSetFlags()[13] = true; +// } +// if (isValidValue(fields()[14], other.flow_id)) { +// this.flow_id = data().deepCopy(fields()[14].schema(), other.flow_id); +// fieldSetFlags()[14] = true; +// } +// } +// +// /** Gets the value of the 'tag_version' field */ +// public CharSequence getTagVersion() { +// return tag_version; +// } +// +// /** Sets the value of the 'tag_version' field */ +// public Builder setTagVersion(CharSequence value) { +// validate(fields()[0], value); +// this.tag_version = value; +// fieldSetFlags()[0] = true; +// return this; +// } +// +// /** Checks whether the 'tag_version' field has been set */ +// public boolean hasTagVersion() { +// return fieldSetFlags()[0]; +// } +// +// /** Clears the value of the 'tag_version' field */ +// public Builder clearTagVersion() { +// tag_version = null; +// fieldSetFlags()[0] = false; +// return this; +// } +// +// /** Gets the value of the 'data_type' field */ +// public Integer getDataType() { +// return data_type; +// } +// +// /** Sets the value of the 'data_type' field */ +// public Builder setDataType(int value) { +// validate(fields()[1], value); +// this.data_type = value; +// fieldSetFlags()[1] = true; +// return this; +// } +// +// /** Checks whether the 'data_type' field has been set */ +// public boolean hasDataType() { +// return fieldSetFlags()[1]; +// } +// +// /** Clears the value of the 'data_type' field */ +// public Builder clearDataType() { +// fieldSetFlags()[1] = false; +// return this; +// } +// +// /** Gets the value of the 'data_subtype' field */ +// public Integer getDataSubtype() { +// return data_subtype; +// } +// +// /** Sets the value of the 'data_subtype' field */ +// public Builder setDataSubtype(int value) { +// validate(fields()[2], value); +// this.data_subtype = value; +// fieldSetFlags()[2] = true; +// return this; +// } +// +// /** Checks whether the 'data_subtype' field has been set */ +// public boolean hasDataSubtype() { +// return fieldSetFlags()[2]; +// } +// +// /** Clears the value of the 'data_subtype' field */ +// public Builder clearDataSubtype() { +// fieldSetFlags()[2] = false; +// return this; +// } +// +// /** Gets the value of the 'producer_id' field */ +// public Integer getProducerId() { +// return producer_id; +// } +// +// /** Sets the value of the 'producer_id' field */ +// public Builder setProducerId(int value) { +// validate(fields()[3], value); +// this.producer_id = value; +// fieldSetFlags()[3] = true; +// return this; +// } +// +// /** Checks whether the 'producer_id' field has been set */ +// public boolean hasProducerId() { +// return fieldSetFlags()[3]; +// } +// +// /** Clears the value of the 'producer_id' field */ +// public Builder clearProducerId() { +// fieldSetFlags()[3] = false; +// return this; +// } +// +// /** Gets the value of the 'data_source' field */ +// public Integer getDataSource() { +// return data_source; +// } +// +// /** Sets the value of the 'data_source' field */ +// public Builder setDataSource(int value) { +// validate(fields()[4], value); +// this.data_source = value; +// fieldSetFlags()[4] = true; +// return this; +// } +// +// /** Checks whether the 'data_source' field has been set */ +// public boolean hasDataSource() { +// return fieldSetFlags()[4]; +// } +// +// /** Clears the value of the 'data_source' field */ +// public Builder clearDataSource() { +// fieldSetFlags()[4] = false; +// return this; +// } +// +// /** Gets the value of the 'task_id' field */ +// public java.util.List getTaskId() { +// return task_id; +// } +// +// /** Sets the value of the 'task_id' field */ +// public Builder setTaskId(java.util.List value) { +// validate(fields()[5], value); +// this.task_id = value; +// fieldSetFlags()[5] = true; +// return this; +// } +// +// /** Checks whether the 'task_id' field has been set */ +// public boolean hasTaskId() { +// return fieldSetFlags()[5]; +// } +// +// /** Clears the value of the 'task_id' field */ +// public Builder clearTaskId() { +// task_id = null; +// fieldSetFlags()[5] = false; +// return this; +// } +// +// /** Gets the value of the 'file_id' field */ +// public Long getFileId() { +// return file_id; +// } +// +// /** Sets the value of the 'file_id' field */ +// public Builder setFileId(long value) { +// validate(fields()[6], value); +// this.file_id = value; +// fieldSetFlags()[6] = true; +// return this; +// } +// +// /** Checks whether the 'file_id' field has been set */ +// public boolean hasFileId() { +// return fieldSetFlags()[6]; +// } +// +// /** Clears the value of the 'file_id' field */ +// public Builder clearFileId() { +// fieldSetFlags()[6] = false; +// return this; +// } +// +// /** Gets the value of the 'file_path' field */ +// public CharSequence getFilePath() { +// return file_path; +// } +// +// /** Sets the value of the 'file_path' field */ +// public Builder setFilePath(CharSequence value) { +// validate(fields()[7], value); +// this.file_path = value; +// fieldSetFlags()[7] = true; +// return this; +// } +// +// /** Checks whether the 'file_path' field has been set */ +// public boolean hasFilePath() { +// return fieldSetFlags()[7]; +// } +// +// /** Clears the value of the 'file_path' field */ +// public Builder clearFilePath() { +// file_path = null; +// fieldSetFlags()[7] = false; +// return this; +// } +// +// /** Gets the value of the 'file_name' field */ +// public CharSequence getFileName() { +// return file_name; +// } +// +// /** Sets the value of the 'file_name' field */ +// public Builder setFileName(CharSequence value) { +// validate(fields()[8], value); +// this.file_name = value; +// fieldSetFlags()[8] = true; +// return this; +// } +// +// /** Checks whether the 'file_name' field has been set */ +// public boolean hasFileName() { +// return fieldSetFlags()[8]; +// } +// +// /** Clears the value of the 'file_name' field */ +// public Builder clearFileName() { +// file_name = null; +// fieldSetFlags()[8] = false; +// return this; +// } +// +// /** Gets the value of the 'file_size' field */ +// public Float getFileSize() { +// return file_size; +// } +// +// /** Sets the value of the 'file_size' field */ +// public Builder setFileSize(float value) { +// validate(fields()[9], value); +// this.file_size = value; +// fieldSetFlags()[9] = true; +// return this; +// } +// +// /** Checks whether the 'file_size' field has been set */ +// public boolean hasFileSize() { +// return fieldSetFlags()[9]; +// } +// +// /** Clears the value of the 'file_size' field */ +// public Builder clearFileSize() { +// fieldSetFlags()[9] = false; +// return this; +// } +// +// /** Gets the value of the 'md5' field */ +// public CharSequence getMd5() { +// return md5; +// } +// +// /** Sets the value of the 'md5' field */ +// public Builder setMd5(CharSequence value) { +// validate(fields()[10], value); +// this.md5 = value; +// fieldSetFlags()[10] = true; +// return this; +// } +// +// /** Checks whether the 'md5' field has been set */ +// public boolean hasMd5() { +// return fieldSetFlags()[10]; +// } +// +// /** Clears the value of the 'md5' field */ +// public Builder clearMd5() { +// md5 = null; +// fieldSetFlags()[10] = false; +// return this; +// } +// +// /** Gets the value of the 'encrypted' field */ +// public CharSequence getEncrypted() { +// return encrypted; +// } +// +// /** Sets the value of the 'encrypted' field */ +// public Builder setEncrypted(CharSequence value) { +// validate(fields()[11], value); +// this.encrypted = value; +// fieldSetFlags()[11] = true; +// return this; +// } +// +// /** Checks whether the 'encrypted' field has been set */ +// public boolean hasEncrypted() { +// return fieldSetFlags()[11]; +// } +// +// /** Clears the value of the 'encrypted' field */ +// public Builder clearEncrypted() { +// encrypted = null; +// fieldSetFlags()[11] = false; +// return this; +// } +// +// /** Gets the value of the 'timestamp' field */ +// public CharSequence getTimestamp() { +// return timestamp; +// } +// +// /** Sets the value of the 'timestamp' field */ +// public Builder setTimestamp(CharSequence value) { +// validate(fields()[12], value); +// this.timestamp = value; +// fieldSetFlags()[12] = true; +// return this; +// } +// +// /** Checks whether the 'timestamp' field has been set */ +// public boolean hasTimestamp() { +// return fieldSetFlags()[12]; +// } +// +// /** Clears the value of the 'timestamp' field */ +// public Builder clearTimestamp() { +// timestamp = null; +// fieldSetFlags()[12] = false; +// return this; +// } +// +// /** Gets the value of the 'parent_id' field */ +// public Long getParentId() { +// return parent_id; +// } +// +// /** Sets the value of the 'parent_id' field */ +// public Builder setParentId(long value) { +// validate(fields()[13], value); +// this.parent_id = value; +// fieldSetFlags()[13] = true; +// return this; +// } +// +// /** Checks whether the 'parent_id' field has been set */ +// public boolean hasParentId() { +// return fieldSetFlags()[13]; +// } +// +// /** Clears the value of the 'parent_id' field */ +// public Builder clearParentId() { +// fieldSetFlags()[13] = false; +// return this; +// } +// +// /** Gets the value of the 'flow_id' field */ +// public Long getFlowId() { +// return flow_id; +// } +// +// /** Sets the value of the 'flow_id' field */ +// public Builder setFlowId(long value) { +// validate(fields()[14], value); +// this.flow_id = value; +// fieldSetFlags()[14] = true; +// return this; +// } +// +// /** Checks whether the 'flow_id' field has been set */ +// public boolean hasFlowId() { +// return fieldSetFlags()[14]; +// } +// +// /** Clears the value of the 'flow_id' field */ +// public Builder clearFlowId() { +// fieldSetFlags()[14] = false; +// return this; +// } +// +// @Override +// public file_tag build() { +// try { +// file_tag record = new file_tag(); +// record.tag_version = fieldSetFlags()[0] ? this.tag_version : (CharSequence) defaultValue(fields()[0]); +// record.data_type = fieldSetFlags()[1] ? this.data_type : (Integer) defaultValue(fields()[1]); +// record.data_subtype = fieldSetFlags()[2] ? this.data_subtype : (Integer) defaultValue(fields()[2]); +// record.producer_id = fieldSetFlags()[3] ? this.producer_id : (Integer) defaultValue(fields()[3]); +// record.data_source = fieldSetFlags()[4] ? this.data_source : (Integer) defaultValue(fields()[4]); +// record.task_id = fieldSetFlags()[5] ? this.task_id : (java.util.List) defaultValue(fields()[5]); +// record.file_id = fieldSetFlags()[6] ? this.file_id : (Long) defaultValue(fields()[6]); +// record.file_path = fieldSetFlags()[7] ? this.file_path : (CharSequence) defaultValue(fields()[7]); +// record.file_name = fieldSetFlags()[8] ? this.file_name : (CharSequence) defaultValue(fields()[8]); +// record.file_size = fieldSetFlags()[9] ? this.file_size : (Float) defaultValue(fields()[9]); +// record.md5 = fieldSetFlags()[10] ? this.md5 : (CharSequence) defaultValue(fields()[10]); +// record.encrypted = fieldSetFlags()[11] ? this.encrypted : (CharSequence) defaultValue(fields()[11]); +// record.timestamp = fieldSetFlags()[12] ? this.timestamp : (CharSequence) defaultValue(fields()[12]); +// record.parent_id = fieldSetFlags()[13] ? this.parent_id : (Long) defaultValue(fields()[13]); +// record.flow_id = fieldSetFlags()[14] ? this.flow_id : (Long) defaultValue(fields()[14]); +// return record; +// } catch (Exception e) { +// throw new org.apache.avro.AvroRuntimeException(e); +// } +// } +// } +//} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/noUse/AvroCompileTagBean/file_tag_old.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/noUse/AvroCompileTagBean/file_tag_old.java new file mode 100644 index 0000000..075030b --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/noUse/AvroCompileTagBean/file_tag_old.java @@ -0,0 +1,829 @@ +//package cn.ac.iie.cusflume.sink.bean.tagBean.noUse.AvroCompileTagBean; +// +///** +// * Autogenerated by Avro +// * +// * DO NOT EDIT DIRECTLY +// */ +//@SuppressWarnings("all") +//@org.apache.avro.specific.AvroGenerated +//public class file_tag extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { +// public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"file_tag\",\"fields\":[{\"name\":\"tag_version\",\"type\":\"string\"},{\"name\":\"data_type\",\"type\":\"int\"},{\"name\":\"data_subtype\",\"type\":\"int\"},{\"name\":\"producer_id\",\"type\":\"int\"},{\"name\":\"data_source\",\"type\":\"int\"},{\"name\":\"task_id\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"file_id\",\"type\":\"long\"},{\"name\":\"file_path\",\"type\":\"string\"},{\"name\":\"file_name\",\"type\":\"string\"},{\"name\":\"file_size\",\"type\":\"float\"},{\"name\":\"md5\",\"type\":\"string\"},{\"name\":\"encrypted\",\"type\":\"string\"},{\"name\":\"timestamp\",\"type\":\"string\"},{\"name\":\"parent_id\",\"type\":\"long\"},{\"name\":\"flow_id\",\"type\":\"long\"}]}"); +// public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } +// @Deprecated public CharSequence tag_version; +// @Deprecated public int data_type; +// @Deprecated public int data_subtype; +// @Deprecated public int producer_id; +// @Deprecated public int data_source; +// @Deprecated public java.util.List task_id; +// @Deprecated public long file_id; +// @Deprecated public CharSequence file_path; +// @Deprecated public CharSequence file_name; +// @Deprecated public float file_size; +// @Deprecated public CharSequence md5; +// @Deprecated public CharSequence encrypted; +// @Deprecated public CharSequence timestamp; +// @Deprecated public long parent_id; +// @Deprecated public long flow_id; +// +// /** +// * Default constructor. +// */ +// public file_tag() {} +// +// /** +// * All-args constructor. +// */ +// public file_tag(CharSequence tag_version, Integer data_type, Integer data_subtype, Integer producer_id, Integer data_source, java.util.List task_id, Long file_id, CharSequence file_path, CharSequence file_name, Float file_size, CharSequence md5, CharSequence encrypted, CharSequence timestamp, Long parent_id, Long flow_id) { +// this.tag_version = tag_version; +// this.data_type = data_type; +// this.data_subtype = data_subtype; +// this.producer_id = producer_id; +// this.data_source = data_source; +// this.task_id = task_id; +// this.file_id = file_id; +// this.file_path = file_path; +// this.file_name = file_name; +// this.file_size = file_size; +// this.md5 = md5; +// this.encrypted = encrypted; +// this.timestamp = timestamp; +// this.parent_id = parent_id; +// this.flow_id = flow_id; +// } +// +// public org.apache.avro.Schema getSchema() { return SCHEMA$; } +// // Used by DatumWriter. Applications should not call. +// public Object get(int field$) { +// switch (field$) { +// case 0: return tag_version; +// case 1: return data_type; +// case 2: return data_subtype; +// case 3: return producer_id; +// case 4: return data_source; +// case 5: return task_id; +// case 6: return file_id; +// case 7: return file_path; +// case 8: return file_name; +// case 9: return file_size; +// case 10: return md5; +// case 11: return encrypted; +// case 12: return timestamp; +// case 13: return parent_id; +// case 14: return flow_id; +// default: throw new org.apache.avro.AvroRuntimeException("Bad index"); +// } +// } +// // Used by DatumReader. Applications should not call. +// @SuppressWarnings(value="unchecked") +// public void put(int field$, Object value$) { +// switch (field$) { +// case 0: tag_version = (CharSequence)value$; break; +// case 1: data_type = (Integer)value$; break; +// case 2: data_subtype = (Integer)value$; break; +// case 3: producer_id = (Integer)value$; break; +// case 4: data_source = (Integer)value$; break; +// case 5: task_id = (java.util.List)value$; break; +// case 6: file_id = (Long)value$; break; +// case 7: file_path = (CharSequence)value$; break; +// case 8: file_name = (CharSequence)value$; break; +// case 9: file_size = (Float)value$; break; +// case 10: md5 = (CharSequence)value$; break; +// case 11: encrypted = (CharSequence)value$; break; +// case 12: timestamp = (CharSequence)value$; break; +// case 13: parent_id = (Long)value$; break; +// case 14: flow_id = (Long)value$; break; +// default: throw new org.apache.avro.AvroRuntimeException("Bad index"); +// } +// } +// +// /** +// * Gets the value of the 'tag_version' field. +// */ +// public CharSequence getTagVersion() { +// return tag_version; +// } +// +// /** +// * Sets the value of the 'tag_version' field. +// * @param value the value to set. +// */ +// public void setTagVersion(CharSequence value) { +// this.tag_version = value; +// } +// +// /** +// * Gets the value of the 'data_type' field. +// */ +// public Integer getDataType() { +// return data_type; +// } +// +// /** +// * Sets the value of the 'data_type' field. +// * @param value the value to set. +// */ +// public void setDataType(Integer value) { +// this.data_type = value; +// } +// +// /** +// * Gets the value of the 'data_subtype' field. +// */ +// public Integer getDataSubtype() { +// return data_subtype; +// } +// +// /** +// * Sets the value of the 'data_subtype' field. +// * @param value the value to set. +// */ +// public void setDataSubtype(Integer value) { +// this.data_subtype = value; +// } +// +// /** +// * Gets the value of the 'producer_id' field. +// */ +// public Integer getProducerId() { +// return producer_id; +// } +// +// /** +// * Sets the value of the 'producer_id' field. +// * @param value the value to set. +// */ +// public void setProducerId(Integer value) { +// this.producer_id = value; +// } +// +// /** +// * Gets the value of the 'data_source' field. +// */ +// public Integer getDataSource() { +// return data_source; +// } +// +// /** +// * Sets the value of the 'data_source' field. +// * @param value the value to set. +// */ +// public void setDataSource(Integer value) { +// this.data_source = value; +// } +// +// /** +// * Gets the value of the 'task_id' field. +// */ +// public java.util.List getTaskId() { +// return task_id; +// } +// +// /** +// * Sets the value of the 'task_id' field. +// * @param value the value to set. +// */ +// public void setTaskId(java.util.List value) { +// this.task_id = value; +// } +// +// /** +// * Gets the value of the 'file_id' field. +// */ +// public Long getFileId() { +// return file_id; +// } +// +// /** +// * Sets the value of the 'file_id' field. +// * @param value the value to set. +// */ +// public void setFileId(Long value) { +// this.file_id = value; +// } +// +// /** +// * Gets the value of the 'file_path' field. +// */ +// public CharSequence getFilePath() { +// return file_path; +// } +// +// /** +// * Sets the value of the 'file_path' field. +// * @param value the value to set. +// */ +// public void setFilePath(CharSequence value) { +// this.file_path = value; +// } +// +// /** +// * Gets the value of the 'file_name' field. +// */ +// public CharSequence getFileName() { +// return file_name; +// } +// +// /** +// * Sets the value of the 'file_name' field. +// * @param value the value to set. +// */ +// public void setFileName(CharSequence value) { +// this.file_name = value; +// } +// +// /** +// * Gets the value of the 'file_size' field. +// */ +// public Float getFileSize() { +// return file_size; +// } +// +// /** +// * Sets the value of the 'file_size' field. +// * @param value the value to set. +// */ +// public void setFileSize(Float value) { +// this.file_size = value; +// } +// +// /** +// * Gets the value of the 'md5' field. +// */ +// public CharSequence getMd5() { +// return md5; +// } +// +// /** +// * Sets the value of the 'md5' field. +// * @param value the value to set. +// */ +// public void setMd5(CharSequence value) { +// this.md5 = value; +// } +// +// /** +// * Gets the value of the 'encrypted' field. +// */ +// public CharSequence getEncrypted() { +// return encrypted; +// } +// +// /** +// * Sets the value of the 'encrypted' field. +// * @param value the value to set. +// */ +// public void setEncrypted(CharSequence value) { +// this.encrypted = value; +// } +// +// /** +// * Gets the value of the 'timestamp' field. +// */ +// public CharSequence getTimestamp() { +// return timestamp; +// } +// +// /** +// * Sets the value of the 'timestamp' field. +// * @param value the value to set. +// */ +// public void setTimestamp(CharSequence value) { +// this.timestamp = value; +// } +// +// /** +// * Gets the value of the 'parent_id' field. +// */ +// public Long getParentId() { +// return parent_id; +// } +// +// /** +// * Sets the value of the 'parent_id' field. +// * @param value the value to set. +// */ +// public void setParentId(Long value) { +// this.parent_id = value; +// } +// +// /** +// * Gets the value of the 'flow_id' field. +// */ +// public Long getFlowId() { +// return flow_id; +// } +// +// /** +// * Sets the value of the 'flow_id' field. +// * @param value the value to set. +// */ +// public void setFlowId(Long value) { +// this.flow_id = value; +// } +// +// /** Creates a new file_tag RecordBuilder */ +// public static Builder newBuilder() { +// return new Builder(); +// } +// +// /** Creates a new file_tag RecordBuilder by copying an existing Builder */ +// public static Builder newBuilder(Builder other) { +// return new Builder(other); +// } +// +// /** Creates a new file_tag RecordBuilder by copying an existing file_tag instance */ +// public static Builder newBuilder(file_tag other) { +// return new Builder(other); +// } +// +// /** +// * RecordBuilder for file_tag instances. +// */ +// public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase +// implements org.apache.avro.data.RecordBuilder { +// +// private CharSequence tag_version; +// private int data_type; +// private int data_subtype; +// private int producer_id; +// private int data_source; +// private java.util.List task_id; +// private long file_id; +// private CharSequence file_path; +// private CharSequence file_name; +// private float file_size; +// private CharSequence md5; +// private CharSequence encrypted; +// private CharSequence timestamp; +// private long parent_id; +// private long flow_id; +// +// /** Creates a new Builder */ +// private Builder() { +// super(file_tag.SCHEMA$); +// } +// +// /** Creates a Builder by copying an existing Builder */ +// private Builder(Builder other) { +// super(other); +// } +// +// /** Creates a Builder by copying an existing file_tag instance */ +// private Builder(file_tag other) { +// super(file_tag.SCHEMA$); +// if (isValidValue(fields()[0], other.tag_version)) { +// this.tag_version = data().deepCopy(fields()[0].schema(), other.tag_version); +// fieldSetFlags()[0] = true; +// } +// if (isValidValue(fields()[1], other.data_type)) { +// this.data_type = data().deepCopy(fields()[1].schema(), other.data_type); +// fieldSetFlags()[1] = true; +// } +// if (isValidValue(fields()[2], other.data_subtype)) { +// this.data_subtype = data().deepCopy(fields()[2].schema(), other.data_subtype); +// fieldSetFlags()[2] = true; +// } +// if (isValidValue(fields()[3], other.producer_id)) { +// this.producer_id = data().deepCopy(fields()[3].schema(), other.producer_id); +// fieldSetFlags()[3] = true; +// } +// if (isValidValue(fields()[4], other.data_source)) { +// this.data_source = data().deepCopy(fields()[4].schema(), other.data_source); +// fieldSetFlags()[4] = true; +// } +// if (isValidValue(fields()[5], other.task_id)) { +// this.task_id = data().deepCopy(fields()[5].schema(), other.task_id); +// fieldSetFlags()[5] = true; +// } +// if (isValidValue(fields()[6], other.file_id)) { +// this.file_id = data().deepCopy(fields()[6].schema(), other.file_id); +// fieldSetFlags()[6] = true; +// } +// if (isValidValue(fields()[7], other.file_path)) { +// this.file_path = data().deepCopy(fields()[7].schema(), other.file_path); +// fieldSetFlags()[7] = true; +// } +// if (isValidValue(fields()[8], other.file_name)) { +// this.file_name = data().deepCopy(fields()[8].schema(), other.file_name); +// fieldSetFlags()[8] = true; +// } +// if (isValidValue(fields()[9], other.file_size)) { +// this.file_size = data().deepCopy(fields()[9].schema(), other.file_size); +// fieldSetFlags()[9] = true; +// } +// if (isValidValue(fields()[10], other.md5)) { +// this.md5 = data().deepCopy(fields()[10].schema(), other.md5); +// fieldSetFlags()[10] = true; +// } +// if (isValidValue(fields()[11], other.encrypted)) { +// this.encrypted = data().deepCopy(fields()[11].schema(), other.encrypted); +// fieldSetFlags()[11] = true; +// } +// if (isValidValue(fields()[12], other.timestamp)) { +// this.timestamp = data().deepCopy(fields()[12].schema(), other.timestamp); +// fieldSetFlags()[12] = true; +// } +// if (isValidValue(fields()[13], other.parent_id)) { +// this.parent_id = data().deepCopy(fields()[13].schema(), other.parent_id); +// fieldSetFlags()[13] = true; +// } +// if (isValidValue(fields()[14], other.flow_id)) { +// this.flow_id = data().deepCopy(fields()[14].schema(), other.flow_id); +// fieldSetFlags()[14] = true; +// } +// } +// +// /** Gets the value of the 'tag_version' field */ +// public CharSequence getTagVersion() { +// return tag_version; +// } +// +// /** Sets the value of the 'tag_version' field */ +// public Builder setTagVersion(CharSequence value) { +// validate(fields()[0], value); +// this.tag_version = value; +// fieldSetFlags()[0] = true; +// return this; +// } +// +// /** Checks whether the 'tag_version' field has been set */ +// public boolean hasTagVersion() { +// return fieldSetFlags()[0]; +// } +// +// /** Clears the value of the 'tag_version' field */ +// public Builder clearTagVersion() { +// tag_version = null; +// fieldSetFlags()[0] = false; +// return this; +// } +// +// /** Gets the value of the 'data_type' field */ +// public Integer getDataType() { +// return data_type; +// } +// +// /** Sets the value of the 'data_type' field */ +// public Builder setDataType(int value) { +// validate(fields()[1], value); +// this.data_type = value; +// fieldSetFlags()[1] = true; +// return this; +// } +// +// /** Checks whether the 'data_type' field has been set */ +// public boolean hasDataType() { +// return fieldSetFlags()[1]; +// } +// +// /** Clears the value of the 'data_type' field */ +// public Builder clearDataType() { +// fieldSetFlags()[1] = false; +// return this; +// } +// +// /** Gets the value of the 'data_subtype' field */ +// public Integer getDataSubtype() { +// return data_subtype; +// } +// +// /** Sets the value of the 'data_subtype' field */ +// public Builder setDataSubtype(int value) { +// validate(fields()[2], value); +// this.data_subtype = value; +// fieldSetFlags()[2] = true; +// return this; +// } +// +// /** Checks whether the 'data_subtype' field has been set */ +// public boolean hasDataSubtype() { +// return fieldSetFlags()[2]; +// } +// +// /** Clears the value of the 'data_subtype' field */ +// public Builder clearDataSubtype() { +// fieldSetFlags()[2] = false; +// return this; +// } +// +// /** Gets the value of the 'producer_id' field */ +// public Integer getProducerId() { +// return producer_id; +// } +// +// /** Sets the value of the 'producer_id' field */ +// public Builder setProducerId(int value) { +// validate(fields()[3], value); +// this.producer_id = value; +// fieldSetFlags()[3] = true; +// return this; +// } +// +// /** Checks whether the 'producer_id' field has been set */ +// public boolean hasProducerId() { +// return fieldSetFlags()[3]; +// } +// +// /** Clears the value of the 'producer_id' field */ +// public Builder clearProducerId() { +// fieldSetFlags()[3] = false; +// return this; +// } +// +// /** Gets the value of the 'data_source' field */ +// public Integer getDataSource() { +// return data_source; +// } +// +// /** Sets the value of the 'data_source' field */ +// public Builder setDataSource(int value) { +// validate(fields()[4], value); +// this.data_source = value; +// fieldSetFlags()[4] = true; +// return this; +// } +// +// /** Checks whether the 'data_source' field has been set */ +// public boolean hasDataSource() { +// return fieldSetFlags()[4]; +// } +// +// /** Clears the value of the 'data_source' field */ +// public Builder clearDataSource() { +// fieldSetFlags()[4] = false; +// return this; +// } +// +// /** Gets the value of the 'task_id' field */ +// public java.util.List getTaskId() { +// return task_id; +// } +// +// /** Sets the value of the 'task_id' field */ +// public Builder setTaskId(java.util.List value) { +// validate(fields()[5], value); +// this.task_id = value; +// fieldSetFlags()[5] = true; +// return this; +// } +// +// /** Checks whether the 'task_id' field has been set */ +// public boolean hasTaskId() { +// return fieldSetFlags()[5]; +// } +// +// /** Clears the value of the 'task_id' field */ +// public Builder clearTaskId() { +// task_id = null; +// fieldSetFlags()[5] = false; +// return this; +// } +// +// /** Gets the value of the 'file_id' field */ +// public Long getFileId() { +// return file_id; +// } +// +// /** Sets the value of the 'file_id' field */ +// public Builder setFileId(long value) { +// validate(fields()[6], value); +// this.file_id = value; +// fieldSetFlags()[6] = true; +// return this; +// } +// +// /** Checks whether the 'file_id' field has been set */ +// public boolean hasFileId() { +// return fieldSetFlags()[6]; +// } +// +// /** Clears the value of the 'file_id' field */ +// public Builder clearFileId() { +// fieldSetFlags()[6] = false; +// return this; +// } +// +// /** Gets the value of the 'file_path' field */ +// public CharSequence getFilePath() { +// return file_path; +// } +// +// /** Sets the value of the 'file_path' field */ +// public Builder setFilePath(CharSequence value) { +// validate(fields()[7], value); +// this.file_path = value; +// fieldSetFlags()[7] = true; +// return this; +// } +// +// /** Checks whether the 'file_path' field has been set */ +// public boolean hasFilePath() { +// return fieldSetFlags()[7]; +// } +// +// /** Clears the value of the 'file_path' field */ +// public Builder clearFilePath() { +// file_path = null; +// fieldSetFlags()[7] = false; +// return this; +// } +// +// /** Gets the value of the 'file_name' field */ +// public CharSequence getFileName() { +// return file_name; +// } +// +// /** Sets the value of the 'file_name' field */ +// public Builder setFileName(CharSequence value) { +// validate(fields()[8], value); +// this.file_name = value; +// fieldSetFlags()[8] = true; +// return this; +// } +// +// /** Checks whether the 'file_name' field has been set */ +// public boolean hasFileName() { +// return fieldSetFlags()[8]; +// } +// +// /** Clears the value of the 'file_name' field */ +// public Builder clearFileName() { +// file_name = null; +// fieldSetFlags()[8] = false; +// return this; +// } +// +// /** Gets the value of the 'file_size' field */ +// public Float getFileSize() { +// return file_size; +// } +// +// /** Sets the value of the 'file_size' field */ +// public Builder setFileSize(float value) { +// validate(fields()[9], value); +// this.file_size = value; +// fieldSetFlags()[9] = true; +// return this; +// } +// +// /** Checks whether the 'file_size' field has been set */ +// public boolean hasFileSize() { +// return fieldSetFlags()[9]; +// } +// +// /** Clears the value of the 'file_size' field */ +// public Builder clearFileSize() { +// fieldSetFlags()[9] = false; +// return this; +// } +// +// /** Gets the value of the 'md5' field */ +// public CharSequence getMd5() { +// return md5; +// } +// +// /** Sets the value of the 'md5' field */ +// public Builder setMd5(CharSequence value) { +// validate(fields()[10], value); +// this.md5 = value; +// fieldSetFlags()[10] = true; +// return this; +// } +// +// /** Checks whether the 'md5' field has been set */ +// public boolean hasMd5() { +// return fieldSetFlags()[10]; +// } +// +// /** Clears the value of the 'md5' field */ +// public Builder clearMd5() { +// md5 = null; +// fieldSetFlags()[10] = false; +// return this; +// } +// +// /** Gets the value of the 'encrypted' field */ +// public CharSequence getEncrypted() { +// return encrypted; +// } +// +// /** Sets the value of the 'encrypted' field */ +// public Builder setEncrypted(CharSequence value) { +// validate(fields()[11], value); +// this.encrypted = value; +// fieldSetFlags()[11] = true; +// return this; +// } +// +// /** Checks whether the 'encrypted' field has been set */ +// public boolean hasEncrypted() { +// return fieldSetFlags()[11]; +// } +// +// /** Clears the value of the 'encrypted' field */ +// public Builder clearEncrypted() { +// encrypted = null; +// fieldSetFlags()[11] = false; +// return this; +// } +// +// /** Gets the value of the 'timestamp' field */ +// public CharSequence getTimestamp() { +// return timestamp; +// } +// +// /** Sets the value of the 'timestamp' field */ +// public Builder setTimestamp(CharSequence value) { +// validate(fields()[12], value); +// this.timestamp = value; +// fieldSetFlags()[12] = true; +// return this; +// } +// +// /** Checks whether the 'timestamp' field has been set */ +// public boolean hasTimestamp() { +// return fieldSetFlags()[12]; +// } +// +// /** Clears the value of the 'timestamp' field */ +// public Builder clearTimestamp() { +// timestamp = null; +// fieldSetFlags()[12] = false; +// return this; +// } +// +// /** Gets the value of the 'parent_id' field */ +// public Long getParentId() { +// return parent_id; +// } +// +// /** Sets the value of the 'parent_id' field */ +// public Builder setParentId(long value) { +// validate(fields()[13], value); +// this.parent_id = value; +// fieldSetFlags()[13] = true; +// return this; +// } +// +// /** Checks whether the 'parent_id' field has been set */ +// public boolean hasParentId() { +// return fieldSetFlags()[13]; +// } +// +// /** Clears the value of the 'parent_id' field */ +// public Builder clearParentId() { +// fieldSetFlags()[13] = false; +// return this; +// } +// +// /** Gets the value of the 'flow_id' field */ +// public Long getFlowId() { +// return flow_id; +// } +// +// /** Sets the value of the 'flow_id' field */ +// public Builder setFlowId(long value) { +// validate(fields()[14], value); +// this.flow_id = value; +// fieldSetFlags()[14] = true; +// return this; +// } +// +// /** Checks whether the 'flow_id' field has been set */ +// public boolean hasFlowId() { +// return fieldSetFlags()[14]; +// } +// +// /** Clears the value of the 'flow_id' field */ +// public Builder clearFlowId() { +// fieldSetFlags()[14] = false; +// return this; +// } +// +// @Override +// public file_tag build() { +// try { +// file_tag record = new file_tag(); +// record.tag_version = fieldSetFlags()[0] ? this.tag_version : (CharSequence) defaultValue(fields()[0]); +// record.data_type = fieldSetFlags()[1] ? this.data_type : (Integer) defaultValue(fields()[1]); +// record.data_subtype = fieldSetFlags()[2] ? this.data_subtype : (Integer) defaultValue(fields()[2]); +// record.producer_id = fieldSetFlags()[3] ? this.producer_id : (Integer) defaultValue(fields()[3]); +// record.data_source = fieldSetFlags()[4] ? this.data_source : (Integer) defaultValue(fields()[4]); +// record.task_id = fieldSetFlags()[5] ? this.task_id : (java.util.List) defaultValue(fields()[5]); +// record.file_id = fieldSetFlags()[6] ? this.file_id : (Long) defaultValue(fields()[6]); +// record.file_path = fieldSetFlags()[7] ? this.file_path : (CharSequence) defaultValue(fields()[7]); +// record.file_name = fieldSetFlags()[8] ? this.file_name : (CharSequence) defaultValue(fields()[8]); +// record.file_size = fieldSetFlags()[9] ? this.file_size : (Float) defaultValue(fields()[9]); +// record.md5 = fieldSetFlags()[10] ? this.md5 : (CharSequence) defaultValue(fields()[10]); +// record.encrypted = fieldSetFlags()[11] ? this.encrypted : (CharSequence) defaultValue(fields()[11]); +// record.timestamp = fieldSetFlags()[12] ? this.timestamp : (CharSequence) defaultValue(fields()[12]); +// record.parent_id = fieldSetFlags()[13] ? this.parent_id : (Long) defaultValue(fields()[13]); +// record.flow_id = fieldSetFlags()[14] ? this.flow_id : (Long) defaultValue(fields()[14]); +// return record; +// } catch (Exception e) { +// throw new org.apache.avro.AvroRuntimeException(e); +// } +// } +// } +//} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/noUse/fileTagBean/FILE_X_TAG_BEAN.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/noUse/fileTagBean/FILE_X_TAG_BEAN.java new file mode 100644 index 0000000..afe2299 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/noUse/fileTagBean/FILE_X_TAG_BEAN.java @@ -0,0 +1,78 @@ +//package cn.ac.iie.cusflume.sink.bean.tagBean.noUse.fileTagBean; +// +//import java.util.Arrays; +// +//public class FILE_X_TAG_BEAN { +// +// private String tag_version; +// private String data_type; +// private String data_subtype; +// private String producer_id; +// // private int producer_id; +// private String data_source; +// private String[] task_id; +// +//// public static void main(String[] args) { +//// System.out.println("{\"tag_version\":\"1.0\",\"data_type\" : \"log\", \"data_subtype\" : \"mail\",\"provider\" : \"011\",\"data_source\":\"056\",\"task_name\":[\"task1\",\"task2\",\"task3\"]}"); +//// } +// +// public String getTag_version() { +// return tag_version; +// } +// +// public void setTag_version(String tag_version) { +// this.tag_version = tag_version; +// } +// +// public String getData_type() { +// return data_type; +// } +// +// public void setData_type(String data_type) { +// this.data_type = data_type; +// } +// +// public String getData_subtype() { +// return data_subtype; +// } +// +// public void setData_subtype(String data_subtype) { +// this.data_subtype = data_subtype; +// } +// +// public String getProducer_id() { +// return producer_id; +// } +// +// public void setProducer_id(String producer_id) { +// this.producer_id = producer_id; +// } +// +// public String getData_source() { +// return data_source; +// } +// +// public void setData_source(String data_source) { +// this.data_source = data_source; +// } +// +// public String[] getTask_id() { +// return task_id; +// } +// +// public void setTask_id(String[] task_id) { +// this.task_id = task_id; +// } +// +// @Override +// public String toString() { +// return "FILE_X_TAG_BEAN{" + +// "tag_version='" + tag_version + '\'' + +// ", data_type='" + data_type + '\'' + +// ", data_subtype='" + data_subtype + '\'' + +// ", producer_id='" + producer_id + '\'' + +// ", data_source='" + data_source + '\'' + +// ", task_id=" + Arrays.toString(task_id) + +// '}'; +// } +//} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/noUse/logTagBean/LOG_X_TAG_BEAN.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/noUse/logTagBean/LOG_X_TAG_BEAN.java new file mode 100644 index 0000000..bdabd4e --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/tagBean/noUse/logTagBean/LOG_X_TAG_BEAN.java @@ -0,0 +1,77 @@ +//package cn.ac.iie.cusflume.sink.bean.tagBean.noUse.logTagBean; +// +//import java.util.Arrays; +// +//public class LOG_X_TAG_BEAN { +// +// private String tag_version; +// private String data_type; +// private String data_subtype; +// private String producer_id; +// private String data_source; +// private String[] task_id; +// +// // public static void main(String[] args) { +//// System.out.println("{\"tag_version\":\"1.0\",\"data_type\" : \"log\", \"data_subtype\" : \"mail\",\"provider\" : \"011\",\"data_source\":\"056\",\"task_name\":[\"task1\",\"task2\",\"task3\"]}"); +//// } +// +// public String getTag_version() { +// return tag_version; +// } +// +// public void setTag_version(String tag_version) { +// this.tag_version = tag_version; +// } +// +// public String getData_type() { +// return data_type; +// } +// +// public void setData_type(String data_type) { +// this.data_type = data_type; +// } +// +// public String getData_subtype() { +// return data_subtype; +// } +// +// public void setData_subtype(String data_subtype) { +// this.data_subtype = data_subtype; +// } +// +// public String getProducer_id() { +// return producer_id; +// } +// +// public void setProducer_id(String producer_id) { +// this.producer_id = producer_id; +// } +// +// public String getData_source() { +// return data_source; +// } +// +// public void setData_source(String data_source) { +// this.data_source = data_source; +// } +// +// public String[] getTask_id() { +// return task_id; +// } +// +// public void setTask_id(String[] task_id) { +// this.task_id = task_id; +// } +// +// @Override +// public String toString() { +// return "LOG_X_TAG_BEAN{" + +// "tag_version='" + tag_version + '\'' + +// ", data_type='" + data_type + '\'' + +// ", data_subtype='" + data_subtype + '\'' + +// ", producer_id='" + producer_id + '\'' + +// ", data_source='" + data_source + '\'' + +// ", task_id=" + Arrays.toString(task_id) + +// '}'; +// } +//} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/testBean/FILE_TAG_BEAN_TEST.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/testBean/FILE_TAG_BEAN_TEST.java new file mode 100644 index 0000000..1f9c255 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/testBean/FILE_TAG_BEAN_TEST.java @@ -0,0 +1,191 @@ +package cn.ac.iie.cusflume.sink.bean.testBean; + +/** + * 接收NTC-COLLECT-FILE-LOG数据,是独立出来的文件标签 + * 处理方式按照非文件成批处理,不设置FilePath头部 + */ +public class FILE_TAG_BEAN_TEST { + /** + * 旧 + * @return + */ + private String tag_version; + private long data_type; + private long data_subtype; + private long producer_id; + private long data_source; + private long task_id; + + private long file_id; + private String file_path; + private String file_name; + private String file_name_charset;//新增-20191216 + private double file_size; + private String md5; + private String encrypted; + private String timestamp; + private long parent_id; +// private long parent_type; + private long flow_id; + + private String x_tag;//新增-20191216 + + public long getData_type() { + return data_type; + } + + public void setData_type(long data_type) { + this.data_type = data_type; + } + + public String getTag_version() { + return tag_version; + } + + public void setTag_version(String tag_version) { + this.tag_version = tag_version; + } + + public long getData_subtype() { + return data_subtype; + } + + public void setData_subtype(long data_subtype) { + this.data_subtype = data_subtype; + } + + public long getProducer_id() { + return producer_id; + } + + public void setProducer_id(long producer_id) { + this.producer_id = producer_id; + } + + public long getData_source() { + return data_source; + } + + public void setData_source(long data_source) { + this.data_source = data_source; + } + + public long getTask_id() { + return task_id; + } + + public void setTask_id(long task_id) { + this.task_id = task_id; + } + + public long getFile_id() { + return file_id; + } + + public void setFile_id(long file_id) { + this.file_id = file_id; + } + + public String getFile_path() { + return file_path; + } + + public void setFile_path(String file_path) { + this.file_path = file_path; + } + + public String getFile_name() { + return file_name; + } + + public void setFile_name(String file_name) { + this.file_name = file_name; + } + + public double getFile_size() { + return file_size; + } + + public void setFile_size(double file_size) { + this.file_size = file_size; + } + + public String getMd5() { + return md5; + } + + public void setMd5(String md5) { + this.md5 = md5; + } + + public String getEncrypted() { + return encrypted; + } + + public void setEncrypted(String encrypted) { + this.encrypted = encrypted; + } + + public String getTimestamp() { + return timestamp; + } + + public void setTimestamp(String timestamp) { + this.timestamp = timestamp; + } + + public long getParent_id() { + return parent_id; + } + + public void setParent_id(long parent_id) { + this.parent_id = parent_id; + } + + public long getFlow_id() { + return flow_id; + } + + public void setFlow_id(long flow_id) { + this.flow_id = flow_id; + } + + public String getX_tag() { + return x_tag; + } + + public void setX_tag(String x_tag) { + this.x_tag = x_tag; + } + + public String getFile_name_charset() { + return file_name_charset; + } + + public void setFile_name_charset(String file_name_charset) { + this.file_name_charset = file_name_charset; + } + + @Override + public String toString() { + return "FILE_TAG_BEAN{" + + "tag_version='" + tag_version + '\'' + + ", data_type=" + data_type + + ", data_subtype=" + data_subtype + + ", producer_id=" + producer_id + + ", data_source=" + data_source + + ", task_id=" + task_id + + ", file_id=" + file_id + + ", file_path='" + file_path + '\'' + + ", file_name='" + file_name + '\'' + + ", file_name_charset='" + file_name_charset + '\'' + + ", file_size=" + file_size + + ", md5='" + md5 + '\'' + + ", encrypted='" + encrypted + '\'' + + ", timestamp='" + timestamp + '\'' + + ", parent_id=" + parent_id + + ", flow_id=" + flow_id + + ", x_tag='" + x_tag + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/testBean/LOG_D_TAG_BEAN_TEST.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/testBean/LOG_D_TAG_BEAN_TEST.java new file mode 100644 index 0000000..e517d64 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/testBean/LOG_D_TAG_BEAN_TEST.java @@ -0,0 +1,114 @@ +package cn.ac.iie.cusflume.sink.bean.testBean; + +import java.util.Arrays; + +public class LOG_D_TAG_BEAN_TEST { + + private String tag_version; + private int data_type; + private int data_subtype; + private int producer_id; + private int data_source; + private int[] task_id; + private long data_id; + private long flow_id; + + private long[] file_id_list; + private String[] file_path_list; + + public String getTag_version() { + return tag_version; + } + + public void setTag_version(String tag_version) { + this.tag_version = tag_version; + } + + public int getData_type() { + return data_type; + } + + public void setData_type(int data_type) { + this.data_type = data_type; + } + + public int getData_subtype() { + return data_subtype; + } + + public void setData_subtype(int data_subtype) { + this.data_subtype = data_subtype; + } + + public int getProducer_id() { + return producer_id; + } + + public void setProducer_id(int producer_id) { + this.producer_id = producer_id; + } + + public int getData_source() { + return data_source; + } + + public void setData_source(int data_source) { + this.data_source = data_source; + } + + public int[] getTask_id() { + return task_id; + } + + public void setTask_id(int[] task_id) { + this.task_id = task_id; + } + + public long getData_id() { + return data_id; + } + + public void setData_id(long data_id) { + this.data_id = data_id; + } + + public long getFlow_id() { + return flow_id; + } + + public void setFlow_id(long flow_id) { + this.flow_id = flow_id; + } + + public long[] getFile_id_list() { + return file_id_list; + } + + public void setFile_id_list(long[] file_id_list) { + this.file_id_list = file_id_list; + } + + public String[] getFile_path_list() { + return file_path_list; + } + + public void setFile_path_list(String[] file_path_list) { + this.file_path_list = file_path_list; + } + + @Override + public String toString() { + return "LOG_D_TAG_BEAN_TEST{" + + "tag_version='" + tag_version + '\'' + + ", data_type=" + data_type + + ", data_subtype=" + data_subtype + + ", producer_id=" + producer_id + + ", data_source=" + data_source + + ", task_id=" + Arrays.toString(task_id) + + ", data_id=" + data_id + + ", flow_id=" + flow_id + + ", file_id_list=" + Arrays.toString(file_id_list) + + ", file_path_list=" + Arrays.toString(file_path_list) + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/testBean/NTC_COLLECT_MAIL_LOG_TEST.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/testBean/NTC_COLLECT_MAIL_LOG_TEST.java new file mode 100644 index 0000000..4db95d3 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/bean/testBean/NTC_COLLECT_MAIL_LOG_TEST.java @@ -0,0 +1,289 @@ +package cn.ac.iie.cusflume.sink.bean.testBean; + +import cn.ac.iie.cusflume.sink.bean.fileBean.CommonLog_File; + +import java.util.Arrays; + +public class NTC_COLLECT_MAIL_LOG_TEST extends CommonLog_File{ + + private String mail_proto; + private String mail_from; + private String mail_to; + private String mail_cc;//新增-20191216 + private String subject; + + private String eml_file_path;//新增-20191216 + private String attachments_path;//新增-20191216 + private String subject_charset; + + //20191209新增 +// private String d_tag;//json字符串版本 + private LOG_D_TAG_BEAN_TEST d_tag;//json版本-20191216 + private String x_tag;//json字符串版本,就算前端传入的为json也会强转为字符串 + + //d_tag字段----↓ + private String tag_version; + private int data_type; + private int data_subtype; + private int producer_id; + private int data_source; + private int[] task_id; + private long data_id; + private long flow_id; + + private long[] file_id_list; + private String[] file_path_list; + //d_tag字段----↑ + + //20191209新增 + + private String eml_file;//目前未使用-20191216 + private String eml_file_id;//目前未使用-20191216 + + private String[] attachments;//目前未使用-20191216 + private String attachments_id;//目前未使用-20191216 + + + + + public String getMail_proto() { + return mail_proto; + } + + public void setMail_proto(String mail_proto) { + this.mail_proto = mail_proto; + } + + public String getMail_from() { + return mail_from; + } + + public void setMail_from(String mail_from) { + this.mail_from = mail_from; + } + + public String getMail_to() { + return mail_to; + } + + public void setMail_to(String mail_to) { + this.mail_to = mail_to; + } + + public String getSubject() { + return subject; + } + + public void setSubject(String subject) { + this.subject = subject; + } + + public String getEml_file() { + return eml_file; + } + + public void setEml_file(String eml_file) { + this.eml_file = eml_file; + } + + public String getEml_file_id() { + return eml_file_id; + } + + public void setEml_file_id(String eml_file_id) { + this.eml_file_id = eml_file_id; + } + + public String[] getAttachments() { + return attachments; + } + + public void setAttachments(String[] attachments) { + this.attachments = attachments; + } + + public String getAttachments_id() { + return attachments_id; + } + + public void setAttachments_id(String attachments_id) { + this.attachments_id = attachments_id; + } + + public String getSubject_charset() { + return subject_charset; + } + + public void setSubject_charset(String subject_charset) { + this.subject_charset = subject_charset; + } + + public LOG_D_TAG_BEAN_TEST getD_tag() { + return d_tag; + } + + //给Mail表层字段重新赋值 + public void setD_tag(LOG_D_TAG_BEAN_TEST d_tag) { + this.d_tag = d_tag; + this.tag_version=d_tag.getTag_version(); + this.data_type=d_tag.getData_type(); + this.data_subtype=d_tag.getData_subtype(); + this.producer_id=d_tag.getProducer_id(); + this.data_source=d_tag.getData_source(); + this.task_id=d_tag.getTask_id(); + this.data_id=d_tag.getData_id(); + this.flow_id=d_tag.getFlow_id(); + + this.file_id_list=d_tag.getFile_id_list(); + this.file_path_list=d_tag.getFile_path_list(); + } + + public String getX_tag() { + return x_tag; + } + + public void setX_tag(String x_tag) { + this.x_tag = x_tag; + } + + public String getMail_cc() { + return mail_cc; + } + + public void setMail_cc(String mail_cc) { + this.mail_cc = mail_cc; + } + + public String getEml_file_path() { + return eml_file_path; + } + + public void setEml_file_path(String eml_file_path) { + this.eml_file_path = eml_file_path; + } + + public String getAttachments_path() { + return attachments_path; + } + + public void setAttachments_path(String attachments_path) { + this.attachments_path = attachments_path; + } + + public String getTag_version() { + return tag_version; + } + + public void setTag_version(String tag_version) { + this.tag_version = tag_version; + } + + public int getData_type() { + return data_type; + } + + public void setData_type(int data_type) { + this.data_type = data_type; + } + + public int getData_subtype() { + return data_subtype; + } + + public void setData_subtype(int data_subtype) { + this.data_subtype = data_subtype; + } + + public int getProducer_id() { + return producer_id; + } + + public void setProducer_id(int producer_id) { + this.producer_id = producer_id; + } + + public int getData_source() { + return data_source; + } + + public void setData_source(int data_source) { + this.data_source = data_source; + } + + public int[] getTask_id() { + return task_id; + } + + public void setTask_id(int[] task_id) { + this.task_id = task_id; + } + + public long getData_id() { + return data_id; + } + + public void setData_id(long data_id) { + this.data_id = data_id; + } + + public long getFlow_id() { + return flow_id; + } + + public void setFlow_id(long flow_id) { + this.flow_id = flow_id; + } + + public long[] getFile_id_list() { + return file_id_list; + } + + public void setFile_id_list(long[] file_id_list) { + this.file_id_list = file_id_list; + } + + public String[] getFile_path_list() { + return file_path_list; + } + + public void setFile_path_list(String[] file_path_list) { + this.file_path_list = file_path_list; + } + + + // @Override +// public String toString() { +// return "NTC_COLLECT_MAIL_LOG{" + +// "mail_proto='" + mail_proto + '\'' + +// ", mail_from='" + mail_from + '\'' + +// ", mail_to='" + mail_to + '\'' + +// ", subject='" + subject + '\'' + +// ", eml_file='" + eml_file + '\'' + +// ", eml_file_id='" + eml_file_id + '\'' + +// ", attachments=" + Arrays.toString(attachments) + +// ", attachments_id='" + attachments_id + '\'' + +// ", subject_charset='" + subject_charset + '\'' + +// '}'; +// } + + + @Override + public String toString() { + return "NTC_COLLECT_MAIL_LOG{" + + "mail_proto='" + mail_proto + '\'' + + ", mail_from='" + mail_from + '\'' + + ", mail_to='" + mail_to + '\'' + + ", mail_cc='" + mail_cc + '\'' + + ", subject='" + subject + '\'' + + ", eml_file_path='" + eml_file_path + '\'' + + ", attachments_path='" + attachments_path + '\'' + + ", subject_charset='" + subject_charset + '\'' + + ", d_tag=" + d_tag + + ", x_tag='" + x_tag + '\'' + + ", eml_file='" + eml_file + '\'' + + ", eml_file_id='" + eml_file_id + '\'' + + ", attachments=" + Arrays.toString(attachments) + + ", attachments_id='" + attachments_id + '\'' + + '}'; + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/daoUtils/InfluxdbUtils.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/daoUtils/InfluxdbUtils.java new file mode 100644 index 0000000..7097476 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/daoUtils/InfluxdbUtils.java @@ -0,0 +1,127 @@ +package cn.ac.iie.cusflume.sink.daoUtils; + +import org.apache.log4j.Logger; +import org.influxdb.InfluxDB; +import org.influxdb.InfluxDBFactory; +import org.influxdb.dto.Point; +import org.influxdb.dto.Pong; + +import java.net.InetAddress; + + +public class InfluxdbUtils { + private static Logger logger = Logger.getLogger(InfluxdbUtils.class); + + public static InfluxDB client; + + //全量版 +// public static Boolean insert(long successMsgSum, long failMsgSum, long readyPostMsgSum, long totalMsgSum, +// long successFileSum, long failFileSum, long readyPostFileSum, long totalFileSum, +// long successFileBytesSum, long failFileBytesSum, long readyPostFileBytesSum, long totalFileBytesSum) { + //精简版 + public static Boolean insert(long successMsgSum, long failMsgSum, long readyPostMsgSum, +// public Boolean insert(long successMsgSum, long failMsgSum, long readyPostMsgSum, +// long totalMsgSum, + long successFileSum, +// long failFileSum, + long readyPostFileSum, +// long totalFileSum, + long successFileBytesSum, +// long failFileBytesSum, + long readyPostFileBytesSum +// long totalFileBytesSum + ) { + InetAddress addrThis = null; + try { + addrThis = InetAddress.getLocalHost(); + String ip = addrThis.getHostAddress(); + /** + * 消息计数 + */ + Point point1 = Point.measurement(RealtimeCountConfig.INFLUX_TABLE) + .tag("hostname", ip).tag("state", "success").tag("action", RealtimeCountConfig.INFLUX_ACTION_MSG) + .field("sum", successMsgSum) + .build(); + Point point2 = Point.measurement(RealtimeCountConfig.INFLUX_TABLE) + .tag("hostname", ip).tag("state", "fail").tag("action", RealtimeCountConfig.INFLUX_ACTION_MSG) + .field("sum", failMsgSum) + .build(); + Point point3 = Point.measurement(RealtimeCountConfig.INFLUX_TABLE) + .tag("hostname", ip).tag("state", "ready_post").tag("action", RealtimeCountConfig.INFLUX_ACTION_MSG) + .field("sum", readyPostMsgSum) + .build(); +// Point point4 = Point.measurement(RealtimeCountConfig.INFLUX_TABLE) +// .tag("hostname", ip).tag("state", "receive").tag("action", RealtimeCountConfig.INFLUX_ACTION_MSG) +// .field("sum", totalMsgSum) +// .build(); + + /** + * 文件计数 + */ + Point point5 = Point.measurement(RealtimeCountConfig.INFLUX_TABLE) + .tag("hostname", ip).tag("state", "success").tag("action", RealtimeCountConfig.INFLUX_ACTION_FILE) + .field("sum", successFileSum) + .build(); +// Point point6 = Point.measurement(RealtimeCountConfig.INFLUX_TABLE) +// .tag("hostname", ip).tag("state", "fail").tag("action", RealtimeCountConfig.INFLUX_ACTION_FILE) +// .field("sum", failFileSum) +// .build(); + Point point7 = Point.measurement(RealtimeCountConfig.INFLUX_TABLE) + .tag("hostname", ip).tag("state", "ready_post").tag("action", RealtimeCountConfig.INFLUX_ACTION_FILE) + .field("sum", readyPostFileSum) + .build(); +// Point point8 = Point.measurement(RealtimeCountConfig.INFLUX_TABLE) +// .tag("hostname", ip).tag("state", "receive").tag("action", RealtimeCountConfig.INFLUX_ACTION_FILE) +// .field("sum", totalFileSum) +// .build(); + + /** + * 文件大小计数 + */ + Point point9 = Point.measurement(RealtimeCountConfig.INFLUX_TABLE) + .tag("hostname", ip).tag("state", "success").tag("action", RealtimeCountConfig.INFLUX_ACTION_FILE_BYTES) + .field("sum", successFileBytesSum) + .build(); +// Point point10 = Point.measurement(RealtimeCountConfig.INFLUX_TABLE) +// .tag("hostname", ip).tag("state", "fail").tag("action", RealtimeCountConfig.INFLUX_ACTION_FILE_BYTES) +// .field("sum", failFileBytesSum) +// .build(); + Point point11 = Point.measurement(RealtimeCountConfig.INFLUX_TABLE) + .tag("hostname", ip).tag("state", "ready_post").tag("action", RealtimeCountConfig.INFLUX_ACTION_FILE_BYTES) + .field("sum", readyPostFileBytesSum) + .build(); +// Point point12 = Point.measurement(RealtimeCountConfig.INFLUX_TABLE) +// .tag("hostname", ip).tag("state", "receive").tag("action", RealtimeCountConfig.INFLUX_ACTION_FILE_BYTES) +// .field("sum", totalFileBytesSum) +// .build(); + + client = InfluxDBFactory.connect(RealtimeCountConfig.INFLUX_ADDRESS, RealtimeCountConfig.INFLUX_USERNAME, RealtimeCountConfig.INFLUX_PASSWORD); + Pong pong = client.ping(); + if (pong != null) { + logger.info("Connect InfluxDB Success!"); + client.write(RealtimeCountConfig.INFLUX_DATABASE, "", point1); + client.write(RealtimeCountConfig.INFLUX_DATABASE, "", point2); + client.write(RealtimeCountConfig.INFLUX_DATABASE, "", point3); +// client.write(RealtimeCountConfig.INFLUX_DATABASE, "", point4); + + client.write(RealtimeCountConfig.INFLUX_DATABASE, "", point5); +// client.write(RealtimeCountConfig.INFLUX_DATABASE, "", point6); + client.write(RealtimeCountConfig.INFLUX_DATABASE, "", point7); +// client.write(RealtimeCountConfig.INFLUX_DATABASE, "", point8); + + client.write(RealtimeCountConfig.INFLUX_DATABASE, "", point9); +// client.write(RealtimeCountConfig.INFLUX_DATABASE, "", point10); + client.write(RealtimeCountConfig.INFLUX_DATABASE, "", point11); +// client.write(RealtimeCountConfig.INFLUX_DATABASE, "", point12); + return true; + } else { + logger.error("Connect InfluxDB Failed!"); + return false; + } + + } catch (Exception e) { + e.printStackTrace(); + return false; + } + } +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/daoUtils/KafkaDB.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/daoUtils/KafkaDB.java new file mode 100644 index 0000000..bc41445 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/daoUtils/KafkaDB.java @@ -0,0 +1,103 @@ +package cn.ac.iie.cusflume.sink.daoUtils; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.Producer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.log4j.Logger; + +import java.text.SimpleDateFormat; +import java.util.Properties; + +/** + * 发送数据的kafka工具类 + */ +public class KafkaDB { + private static Logger logger = Logger.getLogger(KafkaDB.class); + + /** + * kafka生产者,用于向kafka中发送消息 + */ + private static Producer producer; + + /** + * kafka生产者适配器(单例),用来代理kafka生产者发送消息 + */ + private static KafkaDB kafkaDB; + + private KafkaDB() { + getProducer(); + } + + public static KafkaDB getInstance() { + if (kafkaDB == null) { + kafkaDB = new KafkaDB(); + } + return kafkaDB; + } + + //将无法处理的数据发往kafka,由另外一个程序处理. +// public void postFailMsgToKafka(String topicName, String jsonStr, Long timeSend) { + + /** + * @param topicName 处理失败的数据发往topic的名称 + * @param jsonStr + */ + public void postFailMsgToKafka(String topicName, String jsonStr) { + try { + switch (topicName) { + case "NTC-COLLECT-MAIL-LOG": + producer.send(new ProducerRecord<>(RealtimeCountConfig.FAIL_NTC_COLLECT_MAIL_LOG, jsonStr)); + break; + case "NTC-COLLECT-HTTP-DOC-LOG": + producer.send(new ProducerRecord<>(RealtimeCountConfig.FAIL_NTC_COLLECT_HTTP_DOC_LOG, jsonStr)); + break; + case "NTC-COLLECT-FTP-DOC-LOG": + producer.send(new ProducerRecord<>(RealtimeCountConfig.FAIL_NTC_COLLECT_FTP_DOC_LOG, jsonStr)); + break; + default: + logger.warn("KafkaDB postFailMsgToKafka kafkaTopicName is unknown!===>>>" + topicName + "<<<=== And this message is===>>>" + jsonStr + "<<<==="); + break; + } + } catch (Exception e) { + logger.error("KafkaDB postFailMsgToKafka send to kafka error!===>>>" + e); + e.printStackTrace(); + } + producer.flush(); + } + + /** + * 根据kafka生产者配置信息初始化kafka消息生产者,只初始化一次 + */ + private void getProducer() { + Properties properties = new Properties(); + properties.put("bootstrap.servers", RealtimeCountConfig.BOOTSTRAP_OUTPUT_SERVERS); + properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + properties.put("acks", "1"); + properties.put("linger.ms", "2"); + properties.put("request.timeout.ms", 20000); + properties.put("batch.size", 262144); + properties.put("buffer.memory", 33554432); + producer = new KafkaProducer(properties); + } + + //1分钟 + private static String generateTimeWithInterval() { + Long stamp = System.currentTimeMillis() + 60000L; + Long stamp1 = stamp / 60000 * 60000; + SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + return df.format(stamp1); + } + + //专属于GetNtcOriBoltDC的时间戳计时规整器 +// private String generateTimeWithIntervalForGetNtcOriBoltDC() { +// Long timeControllerLon = RealtimeCountConfig.TOPOLOGY_TICK_TUPLE_FREQ_SECS * 1000L; +// int timeControllerInt = RealtimeCountConfig.TOPOLOGY_TICK_TUPLE_FREQ_SECS * 1000; +// Long stamp = System.currentTimeMillis() + timeControllerLon; +// Long stamp1 = stamp / timeControllerInt * timeControllerInt; +// SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); +//// return stamp5 / 1000; +// return df.format(stamp1); +// } + +} diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/daoUtils/RealtimeCountConfig.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/daoUtils/RealtimeCountConfig.java new file mode 100644 index 0000000..050c86d --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/daoUtils/RealtimeCountConfig.java @@ -0,0 +1,108 @@ +package cn.ac.iie.cusflume.sink.daoUtils; + +import java.io.Serializable; + +public class RealtimeCountConfig implements Serializable { + + private static final long serialVersionUID = -8649024767966235184L; + + /** + * realtime_service_config.properties + */ + public static final String BOOTSTRAP_SERVERS = RealtimeCountConfigurations.getStringProperty(0, "bootstrap.servers"); + public static final String BOOTSTRAP_OUTPUT_SERVERS = RealtimeCountConfigurations.getStringProperty(0, "bootstrap.output.servers"); + public static final Integer BATCH_INSERT_NUM = RealtimeCountConfigurations.getIntProperty(0, "batch.insert.num"); + public static final String GROUP_ID_PREFIX = RealtimeCountConfigurations.getStringProperty(0, "group.id.prefix");//groupid前缀 + public static final String GROUP_ID_SUFFIX = RealtimeCountConfigurations.getStringProperty(0, "group.id.suffix");//groupid后缀 + public static final String AUTO_OFFSET_RESET = RealtimeCountConfigurations.getStringProperty(0, "auto.offset.reset"); + + public static final String FAIL_NTC_COLLECT_MAIL_LOG = RealtimeCountConfigurations.getStringProperty(0, "fail.ntc.collect.mail.log"); + public static final String FAIL_NTC_COLLECT_HTTP_DOC_LOG = RealtimeCountConfigurations.getStringProperty(0, "fail.ntc.collect.http.doc.log"); + public static final String FAIL_NTC_COLLECT_FTP_DOC_LOG = RealtimeCountConfigurations.getStringProperty(0, "fail.ntc.collect.ftp.doc.log"); + + /** + * 状态X-Tag + */ + public static final String MONITOR_NOFILE_MSG_X_TAG = RealtimeCountConfigurations.getStringProperty(0, "monitor.nofile.msg.xtag"); + + /** + * 放到YbHttpAvroSinkFile文件中的monitorMsgXTag配置 + */ + public static final String MONITOR_MSG_SINK_FILE_XTAG = RealtimeCountConfigurations.getStringProperty(0, "monitor.msg.sink.file.xtag"); + + /** + * 监控器类型:一部(yb):状态上报;广东(gd):日志打印,然后靠脚本收集到influxdb上传(外部脚本完成) + */ + public static final String MONITOR_TYPE = RealtimeCountConfigurations.getStringProperty(0, "monitor.type"); + + /** + * 状态上报所需参数-仅一部-系统组件编码-目前24832-20191222 + */ + public static final String MONITOR_SYSTEM_COMPONENT_CODE = RealtimeCountConfigurations.getStringProperty(0, "monitor.system.component.code"); + public static final String MONITOR_SYSTEM_COMPONENT_CODE_FLUME = RealtimeCountConfigurations.getStringProperty(0, "monitor.system.component.code.flume"); + + //监控用influxDb配置 + public static final String INFLUX_TABLE = RealtimeCountConfigurations.getStringProperty(0, "influx.table"); + public static final String INFLUX_ACTION_MSG = RealtimeCountConfigurations.getStringProperty(0, "influx.action.msg"); + public static final String INFLUX_ACTION_FILE = RealtimeCountConfigurations.getStringProperty(0, "influx.action.file"); + public static final String INFLUX_ACTION_FILE_BYTES = RealtimeCountConfigurations.getStringProperty(0, "influx.action.file.bytes"); + public static final String INFLUX_ADDRESS = RealtimeCountConfigurations.getStringProperty(0, "influx.address"); + public static final String INFLUX_USERNAME = RealtimeCountConfigurations.getStringProperty(0, "influx.username"); + public static final String INFLUX_PASSWORD = RealtimeCountConfigurations.getStringProperty(0, "influx.password"); + public static final String INFLUX_DATABASE = RealtimeCountConfigurations.getStringProperty(0, "influx.database"); + + /** + * 注册获取的数据对象SchemaID + */ + public static final Integer SCHEMA_ID_NTC_COLLECT_FILE_LOG = RealtimeCountConfigurations.getIntProperty(0, "schema.id.ntc-collect-file-log"); + public static final Integer SCHEMA_ID_NTC_CONN_RECORD_LOG = RealtimeCountConfigurations.getIntProperty(0, "schema.id.ntc-conn-record-log"); + public static final Integer SCHEMA_ID_NTC_COLLECT_HTTP_DOC_LOG = RealtimeCountConfigurations.getIntProperty(0, "schema.id.ntc-collect-http-doc-log"); + public static final Integer SCHEMA_ID_NTC_COLLECT_HTTP_AV_LOG = RealtimeCountConfigurations.getIntProperty(0, "schema.id.ntc-collect-http-av-log"); + public static final Integer SCHEMA_ID_NTC_COLLECT_MAIL_LOG = RealtimeCountConfigurations.getIntProperty(0, "schema.id.ntc-collect-mail-log"); + public static final Integer SCHEMA_ID_NTC_COLLECT_FTP_DOC_LOG = RealtimeCountConfigurations.getIntProperty(0, "schema.id.ntc-collect-ftp-doc-log"); + public static final Integer SCHEMA_ID_NTC_COLLECT_TELNET_LOG = RealtimeCountConfigurations.getIntProperty(0, "schema.id.ntc-collect-telnet-log"); + //一部状态上报数据对象 + public static final Integer SCHEMA_ID_MONITOR_MSG = RealtimeCountConfigurations.getIntProperty(0, "schema.id.monitor-msg"); + //广东状态上报数据对象 + public static final Integer SCHEMA_ID_INFLUX_SAPP_BPS_STAT_LOG = RealtimeCountConfigurations.getIntProperty(0, "schema.id.influx-sapp-bps-stat-log"); + + + /** + * flume_config.properties + */ + //HTTP配置信息 + public static final Integer HTTP_ASYNC_PARALLELISM = RealtimeCountConfigurations.getIntProperty(1, "http.async.parallelism"); + public static final Integer HTTP_ASYNC_SOCKETTIMEOUT = RealtimeCountConfigurations.getIntProperty(1, "http.async.socketTimeout"); + public static final Integer HTTP_ASYNC_CONNECTTIMEOUT = RealtimeCountConfigurations.getIntProperty(1, "http.async.connectTimeout"); + public static final Integer HTTP_ASYNC_POOLSIZE = RealtimeCountConfigurations.getIntProperty(1, "http.async.poolSize"); + public static final Integer HTTP_ASYNC_MAXPERROUTE = RealtimeCountConfigurations.getIntProperty(1, "http.async.maxPerRoute"); + public static final Integer HTTP_ASYNC_CONNECTIONREQUESTTIMEOUT = RealtimeCountConfigurations.getIntProperty(1, "http.async.connectionRequestTimeout"); + + //序列化Schema信息 + /** + * 携手前端定义 + */ + //纯消息 + public static final String SCHEMA_NTC_CONN_RECORD_LOG = RealtimeCountConfigurations.getStringProperty(1, "schema.ntc-conn-record-log"); + public static final String SCHEMA_NTC_COLLECT_SSL_LOG = RealtimeCountConfigurations.getStringProperty(1, "schema.ntc-collect-ssl-log"); + public static final String SCHEMA_NTC_COLLECT_DNS_LOG = RealtimeCountConfigurations.getStringProperty(1, "schema.ntc-collect-dns-log"); + //文件消息 + public static final String SCHEMA_NTC_COLLECT_MAIL_LOG = RealtimeCountConfigurations.getStringProperty(1, "schema.ntc-collect-mail-log"); + public static final String SCHEMA_NTC_COLLECT_HTTP_DOC_LOG = RealtimeCountConfigurations.getStringProperty(1, "schema.ntc-collect-http-doc-log"); + public static final String SCHEMA_NTC_COLLECT_HTTP_AV_LOG = RealtimeCountConfigurations.getStringProperty(1, "schema.ntc-collect-http-av-log"); + public static final String SCHEMA_NTC_COLLECT_FTP_DOC_LOG = RealtimeCountConfigurations.getStringProperty(1, "schema.ntc-collect-ftp-doc-log"); + public static final String SCHEMA_NTC_COLLECT_TELNET_LOG = RealtimeCountConfigurations.getStringProperty(1, "schema.ntc-collect-telnet-log"); + //特殊 + public static final String SCHEMA_NTC_COLLECT_FILE_LOG = RealtimeCountConfigurations.getStringProperty(1, "schema.ntc-collect-file-log"); + public static final String SCHEMA_LOG_TAG = RealtimeCountConfigurations.getStringProperty(1, "schema.log-tag"); + /** + * 完全自己定义(和前端无关)-修改不频繁 + */ + public static final String SCHEMA_INFLUX_SAPP_BPS_STAT_LOG = RealtimeCountConfigurations.getStringProperty(1, "schema.influx-sapp-bps-stat-log"); + /** + * 总线定义或数据字典定义(不需要频繁修改) + */ + public static final String SCHEMA_MONITOR_MSG = RealtimeCountConfigurations.getStringProperty(1, "schema.monitor-msg"); + public static final String SCHEMA_STATUS_TAG = RealtimeCountConfigurations.getStringProperty(1, "schema.status-tag"); + +} \ No newline at end of file diff --git a/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/daoUtils/RealtimeCountConfigurations.java b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/daoUtils/RealtimeCountConfigurations.java new file mode 100644 index 0000000..1398b25 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/java/cn/ac/iie/cusflume/sink/daoUtils/RealtimeCountConfigurations.java @@ -0,0 +1,80 @@ +package cn.ac.iie.cusflume.sink.daoUtils; + +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.Properties; + +//import com.nis.util.StringUtil; + + +public final class RealtimeCountConfigurations { + + private static Properties propCommon = new Properties();//0 + private static Properties propService = new Properties();//1 + + public static String getStringProperty(Integer type, String key) { + if(type == 0){ + return propCommon.getProperty(key); + } else if(type == 1){ + return propService.getProperty(key); + } else { + return null; + } + } + + + public static Integer getIntProperty(Integer type, String key) { + if(type == 0){ + return Integer.parseInt(propCommon.getProperty(key)); + } else if(type == 1){ + return Integer.parseInt(propService.getProperty(key)); + } else { + return null; + } + } + + public static Long getLongProperty(Integer type, String key) { + if(type == 0){ + return Long.parseLong(propCommon.getProperty(key)); + } else if(type == 1){ + return Long.parseLong(propService.getProperty(key)); + } else { + return null; + } + } + + public static Boolean getBooleanProperty(Integer type, String key) { + if(type == 0){ + return propCommon.getProperty(key).toLowerCase().trim().equals("true"); + } else if(type == 1){ + return propService.getProperty(key).toLowerCase().trim().equals("true"); + } else { + return null; + } + } + + static { + try { + InputStream resourceAsStream_realtime_service = RealtimeCountConfigurations.class.getClassLoader().getResourceAsStream("realtime_service_config.properties");//0,存放kafka相关配置 + if (resourceAsStream_realtime_service != null) { + propCommon.load(new InputStreamReader(resourceAsStream_realtime_service, StandardCharsets.UTF_8)); + } + + InputStream resourceAsStream_config = RealtimeCountConfigurations.class.getClassLoader().getResourceAsStream("flume_config.properties");//1,存放flume自身相关配置(包括多线程) + if (resourceAsStream_config != null) { + propService.load(new InputStreamReader(resourceAsStream_config, StandardCharsets.UTF_8)); + } + + /*prop.load(new FileInputStream(System.getProperty("user.dir") + + File.separator + "config"+File.separator + "config.properties"));*/ + System.out.println("realtime_service_config.properties加载成功"); + System.out.println("flume_config.properties加载成功"); + + } catch (Exception e) { + propCommon = null; + propService = null; + System.err.println("RealtimeCountConfigurations配置文件加载失败"); + } + } +} diff --git a/yb_http_avro_sink_file/src/main/resources/flume_config.properties b/yb_http_avro_sink_file/src/main/resources/flume_config.properties new file mode 100644 index 0000000..f5eacf4 --- /dev/null +++ b/yb_http_avro_sink_file/src/main/resources/flume_config.properties @@ -0,0 +1,52 @@ +#异步Http客户端-并行度线程池 +http.async.parallelism=10 + +#异步Http客户端-等待数据超时时间,根据业务调整 +http.async.socketTimeout=60000 + +#异步Http客户端-连接超时时间 +http.async.connectTimeout=60000 + +#异步Http客户端-连接池最大连接数 +http.async.poolSize=5000 + +#异步Http客户端-每个主机的并发最多只有1500 +http.async.maxPerRoute=2500 + +#异步Http客户端-从连接池中后去连接的timeout时间 +http.async.connectionRequestTimeout=90000 + + +#Schema配置信息 +#纯消息-和前端定义相关联 +schema.ntc-conn-record-log={"type":"record","name":"connrecord","fields":[{"name":"cfg_id","type":"int","doc":"label:配置ID","default":0},{"name":"found_time","type":"int","doc":"label:发现时间","default":0},{"name":"recv_time","type":"int","doc":"label:接收时间","default":0},{"name":"trans_proto","type":"string","doc":"label:传输层协议","default":""},{"name":"addr_type","type":"int","doc":"label:地址类型","default":0},{"name":"d_ip","type":"string","doc":"label:目的IP","default":""},{"name":"s_ip","type":"string","doc":"label:源IP","default":""},{"name":"d_port","type":"int","doc":"label:目的端口","default":0},{"name":"s_port","type":"int","doc":"label:源端口","default":0},{"name":"device_id","type":"string","doc":"label:设备ID","default":""},{"name":"stream_dir","type":"int","doc":"label:流类型","default":0},{"name":"cap_ip","type":"string","doc":"label:捕包机IP","default":""},{"name":"addr_list","type":"string","doc":"label:嵌套地址列表","default":""},{"name":"server_locate","type":"string","doc":"label:服务端地址定位信息","default":""},{"name":"client_locate","type":"string","doc":"label:客户端地址定位信息","default":""},{"name":"s_asn","type":"string","doc":"label:客户端ASN","default":""},{"name":"d_asn","type":"string","doc":"label:服务端ASN","default":""},{"name":"user_region","type":"string","doc":"label:用户自定义","default":""},{"name":"scene_file_id","type":"string","doc":"label:现场日志转储地址","default":""},{"name":"app_label","type":"string","doc":"label:应用识别标签","default":""},{"name":"c2s_pkt_num","type":"string","doc":"label:c2s包数","default":""},{"name":"s2c_pkt_num","type":"string","doc":"label:s2c包数","default":""},{"name":"c2s_byte_num","type":"string","doc":"label:c2s字节数","default":""},{"name":"s2c_byte_num","type":"string","doc":"label:s2c字节数","default":""}]} +schema.ntc-collect-ssl-log={"type":"record","name":"collssl","fields":[{"name":"cfg_id","type":"int","doc":"label:配置ID","default":0},{"name":"found_time","type":"int","doc":"label:发现时间","default":0},{"name":"recv_time","type":"int","doc":"label:接收时间","default":0},{"name":"trans_proto","type":"string","doc":"label:传输层协议","default":""},{"name":"addr_type","type":"int","doc":"label:地址类型","default":0},{"name":"d_ip","type":"string","doc":"label:目的IP","default":""},{"name":"s_ip","type":"string","doc":"label:源IP","default":""},{"name":"d_port","type":"int","doc":"label:目的端口","default":0},{"name":"s_port","type":"int","doc":"label:源端口","default":0},{"name":"device_id","type":"string","doc":"label:设备ID","default":""},{"name":"stream_dir","type":"int","doc":"label:流类型","default":0},{"name":"cap_ip","type":"string","doc":"label:捕包机IP","default":""},{"name":"addr_list","type":"string","doc":"label:嵌套地址列表","default":""},{"name":"server_locate","type":"string","doc":"label:服务端地址定位信息","default":""},{"name":"client_locate","type":"string","doc":"label:客户端地址定位信息","default":""},{"name":"s_asn","type":"string","doc":"label:客户端ASN","default":""},{"name":"d_asn","type":"string","doc":"label:服务端ASN","default":""},{"name":"user_region","type":"string","doc":"label:用户自定义","default":""},{"name":"scene_file_id","type":"string","doc":"label:现场日志转储地址","default":""},{"name":"c2s_pkt_num","type":"string","doc":"label:c2s包数","default":""},{"name":"s2c_pkt_num","type":"string","doc":"label:s2c包数","default":""},{"name":"c2s_byte_num","type":"string","doc":"label:c2s字节数","default":""},{"name":"s2c_byte_num","type":"string","doc":"label:s2c字节数","default":""},{"name":"sni","type":"string","doc":"label:SSLservernameindication","default":""},{"name":"create_time","type":"long","doc":"label:会话创建时间unixtimestamp","default":0},{"name":"last_time","type":"long","doc":"label:会话最后修改时间unixtimestamp","default":0},{"name":"server_ciphersuits","type":"string","doc":"label:服务端ciphersuits","default":""},{"name":"client_ciphersuits","type":"string","doc":"label:客户端ciphersuits","default":""},{"name":"server_cert","type":"string","doc":"label:服务端证书信息","default":""},{"name":"client_cert","type":"string","doc":"label:客户端证书信息","default":""}]} +schema.ntc-collect-dns-log={"type":"record","name":"colldns","fields":[{"name":"cfg_id","type":"int","doc":"label:配置ID","default":0},{"name":"found_time","type":"int","doc":"label:发现时间","default":0},{"name":"recv_time","type":"int","doc":"label:接收时间","default":0},{"name":"trans_proto","type":"string","doc":"label:传输层协议","default":""},{"name":"addr_type","type":"int","doc":"label:地址类型","default":0},{"name":"d_ip","type":"string","doc":"label:目的IP","default":""},{"name":"s_ip","type":"string","doc":"label:源IP","default":""},{"name":"d_port","type":"int","doc":"label:目的端口","default":0},{"name":"s_port","type":"int","doc":"label:源端口","default":0},{"name":"device_id","type":"string","doc":"label:设备ID","default":""},{"name":"stream_dir","type":"int","doc":"label:流类型","default":0},{"name":"cap_ip","type":"string","doc":"label:捕包机IP","default":""},{"name":"addr_list","type":"string","doc":"label:嵌套地址列表","default":""},{"name":"server_locate","type":"string","doc":"label:服务端地址定位信息","default":""},{"name":"client_locate","type":"string","doc":"label:客户端地址定位信息","default":""},{"name":"s_asn","type":"string","doc":"label:客户端ASN","default":""},{"name":"d_asn","type":"string","doc":"label:服务端ASN","default":""},{"name":"user_region","type":"string","doc":"label:用户自定义","default":""},{"name":"scene_file_id","type":"string","doc":"label:现场日志转储地址","default":""},{"name":"dns_sub","type":"long","doc":"label:DNS种类","default":0},{"name":"hdr","type":"string","doc":"label:hdr信息","default":""},{"name":"flags","type":"string","doc":"label:标志","default":""},{"name":"question","type":"string","doc":"label:问题计数","default":""},{"name":"rr","type":"string","doc":"label:资源记录","default":""}]} +#文件消息-和前端定义相关联 +schema.ntc-collect-mail-log={"type":"record","name":"collmail","fields":[{"name":"cfg_id","type":"int","doc":"label:配置ID","default":0},{"name":"found_time","type":"int","doc":"label:发现时间","default":0},{"name":"recv_time","type":"int","doc":"label:接收时间","default":0},{"name":"trans_proto","type":"string","doc":"label:传输层协议","default":""},{"name":"addr_type","type":"int","doc":"label:地址类型","default":0},{"name":"d_ip","type":"string","doc":"label:目的IP","default":""},{"name":"s_ip","type":"string","doc":"label:源IP","default":""},{"name":"d_port","type":"int","doc":"label:目的端口","default":0},{"name":"s_port","type":"int","doc":"label:源端口","default":0},{"name":"device_id","type":"string","doc":"label:设备ID","default":""},{"name":"stream_dir","type":"int","doc":"label:流类型","default":0},{"name":"cap_ip","type":"string","doc":"label:捕包机IP","default":""},{"name":"addr_list","type":"string","doc":"label:嵌套地址列表","default":""},{"name":"server_locate","type":"string","doc":"label:服务端地址定位信息","default":""},{"name":"client_locate","type":"string","doc":"label:客户端地址定位信息","default":""},{"name":"s_asn","type":"string","doc":"label:客户端ASN","default":""},{"name":"d_asn","type":"string","doc":"label:服务端ASN","default":""},{"name":"user_region","type":"string","doc":"label:用户自定义","default":""},{"name":"scene_file_id","type":"string","doc":"label:现场日志转储地址","default":""},{"name":"mail_proto","type":"string","doc":"label:邮件协议","default":""},{"name":"mail_from","type":"string","doc":"label:邮件发件人","default":""},{"name":"mail_to","type":"string","doc":"label:邮件收件人","default":""},{"name":"mail_cc","type":"string","doc":"label:邮件抄送人","default":""},{"name":"mail_date","type":"string","doc":"label:邮件日期","default":""},{"name":"subject","type":"string","doc":"label:邮件主题","default":""},{"name":"eml_file_path","type":"string","doc":"label:邮件EML文件转储地址","default":""},{"name":"attachments_path","type":"string","doc":"label:附件转储地址列表","default":""}]} +schema.ntc-collect-http-doc-log={"type":"record","name":"collhttpdoc","fields":[{"name":"cfg_id","type":"int","doc":"label:配置ID","default":0},{"name":"found_time","type":"int","doc":"label:发现时间","default":0},{"name":"recv_time","type":"int","doc":"label:接收时间","default":0},{"name":"trans_proto","type":"string","doc":"label:传输层协议","default":""},{"name":"addr_type","type":"int","doc":"label:地址类型","default":0},{"name":"d_ip","type":"string","doc":"label:目的IP","default":""},{"name":"s_ip","type":"string","doc":"label:源IP","default":""},{"name":"d_port","type":"int","doc":"label:目的端口","default":0},{"name":"s_port","type":"int","doc":"label:源端口","default":0},{"name":"device_id","type":"string","doc":"label:设备ID","default":""},{"name":"stream_dir","type":"int","doc":"label:流类型","default":0},{"name":"cap_ip","type":"string","doc":"label:捕包机IP","default":""},{"name":"addr_list","type":"string","doc":"label:嵌套地址列表","default":""},{"name":"server_locate","type":"string","doc":"label:服务端地址定位信息","default":""},{"name":"client_locate","type":"string","doc":"label:客户端地址定位信息","default":""},{"name":"s_asn","type":"string","doc":"label:客户端ASN","default":""},{"name":"d_asn","type":"string","doc":"label:服务端ASN","default":""},{"name":"user_region","type":"string","doc":"label:用户自定义","default":""},{"name":"scene_file_id","type":"string","doc":"label:现场日志转储地址","default":""},{"name":"url","type":"string","doc":"label:HTTP_url地址","default":""},{"name":"referer","type":"string","doc":"label:HTTP访问referer","default":""},{"name":"user_agent","type":"string","doc":"label:HTTP访问user_agent","default":""},{"name":"http_seq","type":"int","doc":"label:HTTP请求会话序列号","default":0},{"name":"req_line","type":"string","doc":"label:请求行","default":""},{"name":"res_line","type":"string","doc":"label:应答行","default":""},{"name":"cookie","type":"string","doc":"label:cookie值","default":""},{"name":"content_type","type":"string","doc":"label:content_type值","default":""},{"name":"content_len","type":"long","doc":"label:content_len值","default":0},{"name":"doc_type","type":"string","doc":"label:文件类型","default":""},{"name":"req_body_file_path","type":"string","doc":"label:请求体转储文件地址","default":""},{"name":"res_body_file_path","type":"string","doc":"label:应答体转储文件地址","default":""}]} +schema.ntc-collect-http-av-log={"type":"record","name":"collhttpav","fields":[{"name":"cfg_id","type":"int","doc":"label:配置ID","default":0},{"name":"found_time","type":"int","doc":"label:发现时间","default":0},{"name":"recv_time","type":"int","doc":"label:接收时间","default":0},{"name":"trans_proto","type":"string","doc":"label:传输层协议","default":""},{"name":"addr_type","type":"int","doc":"label:地址类型","default":0},{"name":"d_ip","type":"string","doc":"label:目的IP","default":""},{"name":"s_ip","type":"string","doc":"label:源IP","default":""},{"name":"d_port","type":"int","doc":"label:目的端口","default":0},{"name":"s_port","type":"int","doc":"label:源端口","default":0},{"name":"device_id","type":"string","doc":"label:设备ID","default":""},{"name":"stream_dir","type":"int","doc":"label:流类型","default":0},{"name":"cap_ip","type":"string","doc":"label:捕包机IP","default":""},{"name":"addr_list","type":"string","doc":"label:嵌套地址列表","default":""},{"name":"server_locate","type":"string","doc":"label:服务端地址定位信息","default":""},{"name":"client_locate","type":"string","doc":"label:客户端地址定位信息","default":""},{"name":"s_asn","type":"string","doc":"label:客户端ASN","default":""},{"name":"d_asn","type":"string","doc":"label:服务端ASN","default":""},{"name":"user_region","type":"string","doc":"label:用户自定义","default":""},{"name":"scene_file_id","type":"string","doc":"label:现场日志转储地址","default":""},{"name":"url","type":"string","doc":"label:HTTP_url地址","default":""},{"name":"referer","type":"string","doc":"label:HTTP访问referer","default":""},{"name":"user_agent","type":"string","doc":"label:HTTP访问user_agent","default":""},{"name":"http_seq","type":"int","doc":"label:HTTP请求会话序列号","default":0},{"name":"req_line","type":"string","doc":"label:请求行","default":""},{"name":"res_line","type":"string","doc":"label:应答行","default":""},{"name":"cookie","type":"string","doc":"label:cookie值","default":""},{"name":"content_type","type":"string","doc":"label:content_type值","default":""},{"name":"content_len","type":"long","doc":"label:content_len值","default":0},{"name":"doc_type","type":"string","doc":"label:文件类型","default":""},{"name":"req_body_file_path","type":"string","doc":"label:请求体转储文件地址","default":""},{"name":"res_body_file_path","type":"string","doc":"label:应答体转储文件地址","default":""}]} +schema.ntc-collect-ftp-doc-log={"type":"record","name":"collftpdoc","fields":[{"name":"cfg_id","type":"int","doc":"label:配置ID","default":0},{"name":"found_time","type":"int","doc":"label:发现时间","default":0},{"name":"recv_time","type":"int","doc":"label:接收时间","default":0},{"name":"trans_proto","type":"string","doc":"label:传输层协议","default":""},{"name":"addr_type","type":"int","doc":"label:地址类型","default":0},{"name":"d_ip","type":"string","doc":"label:目的IP","default":""},{"name":"s_ip","type":"string","doc":"label:源IP","default":""},{"name":"d_port","type":"int","doc":"label:目的端口","default":0},{"name":"s_port","type":"int","doc":"label:源端口","default":0},{"name":"device_id","type":"string","doc":"label:设备ID","default":""},{"name":"stream_dir","type":"int","doc":"label:流类型","default":0},{"name":"cap_ip","type":"string","doc":"label:捕包机IP","default":""},{"name":"addr_list","type":"string","doc":"label:嵌套地址列表","default":""},{"name":"server_locate","type":"string","doc":"label:服务端地址定位信息","default":""},{"name":"client_locate","type":"string","doc":"label:客户端地址定位信息","default":""},{"name":"s_asn","type":"string","doc":"label:客户端ASN","default":""},{"name":"d_asn","type":"string","doc":"label:服务端ASN","default":""},{"name":"user_region","type":"string","doc":"label:用户自定义","default":""},{"name":"scene_file_id","type":"string","doc":"label:现场日志转储地址","default":""},{"name":"ftp_url","type":"string","doc":"label:ftp_url地址","default":""},{"name":"content_path","type":"string","doc":"label:FTP文件转储文件地址","default":""}]} +schema.ntc-collect-telnet-log={"type":"record","name":"colltelnet","fields":[{"name":"cfg_id","type":"int","doc":"label:配置ID","default":0},{"name":"found_time","type":"int","doc":"label:发现时间","default":0},{"name":"recv_time","type":"int","doc":"label:接收时间","default":0},{"name":"trans_proto","type":"string","doc":"label:传输层协议","default":""},{"name":"addr_type","type":"int","doc":"label:地址类型","default":0},{"name":"d_ip","type":"string","doc":"label:目的IP","default":""},{"name":"s_ip","type":"string","doc":"label:源IP","default":""},{"name":"d_port","type":"int","doc":"label:目的端口","default":0},{"name":"s_port","type":"int","doc":"label:源端口","default":0},{"name":"device_id","type":"string","doc":"label:设备ID","default":""},{"name":"stream_dir","type":"int","doc":"label:流类型","default":0},{"name":"cap_ip","type":"string","doc":"label:捕包机IP","default":""},{"name":"addr_list","type":"string","doc":"label:嵌套地址列表","default":""},{"name":"server_locate","type":"string","doc":"label:服务端地址定位信息","default":""},{"name":"client_locate","type":"string","doc":"label:客户端地址定位信息","default":""},{"name":"s_asn","type":"string","doc":"label:客户端ASN","default":""},{"name":"d_asn","type":"string","doc":"label:服务端ASN","default":""},{"name":"user_region","type":"string","doc":"label:用户自定义","default":""},{"name":"scene_file_id","type":"string","doc":"label:现场日志转储地址","default":""},{"name":"username","type":"string","doc":"label:用户名","default":""},{"name":"password","type":"string","doc":"label:密码","default":""},{"name":"content_path","type":"string","doc":"label:telnet内容转储文件地址","default":""}]} +#特殊-和前端定义相关联 +schema.ntc-collect-file-log={"type":"record","name":"file_tag","fields":[{"type":"string","name":"tag_version","default":""},{"type":"int","name":"data_type","default":0},{"type":"int","name":"data_subtype","default":0},{"type":"int","name":"producer_id","default":0},{"type":"int","name":"data_source","default":0},{"type":{"type":"array","items":"int"},"name":"task_id","default":[]},{"type":"long","name":"file_id","default":0},{"type":"string","name":"file_path","default":""},{"type":"string","name":"file_name","default":""},{"type":"float","name":"file_size","default":0.0},{"type":"string","name":"md5","default":""},{"type":"string","name":"encrypted","default":""},{"type":"string","name":"timestamp","default":""},{"type":"long","name":"parent_id","default":0},{"type":"long","name":"flow_id","default":0}]} +schema.log-tag={"type":"record","name":"log_tag","fields":[{"type":"string","name":"tag_version","default":""},{"type":"int","name":"data_type","default":0},{"type":"int","name":"data_subtype","default":0},{"type":"int","name":"producer_id","default":0},{"type":"int","name":"data_source","default":0},{"type":{"type":"array","items":"int"},"name":"task_id","default":[]},{"type":"long","name":"data_id","default":0},{"type":"long","name":"flow_id","default":0},{"type":{"type":"array","items":"long"},"name":"file_id_list","default":[]},{"type":{"type":"array","items":"string"},"name":"file_path_list","default":[]}]} + +#完全自己定义(和前端无关) +schema.influx-sapp-bps-stat-log={"type":"record","name":"influxsappbpsstat","doc":"label:sapp流量统计状态","fields":[{"name":"internet_ethernet_bps","type":"string","doc":"label:互联网口的实时流量bps","default":"0"},{"name":"govern_ethernet_bps","type":"string","doc":"label:局域网内的实时流量bps","default":"0"},{"name":"internet_udp_bps","type":"string","doc":"label:互联网口的udp实时流量bps","default":"0"},{"name":"govern_udp_bps","type":"string","doc":"label:局域网内的udp实时流量bps","default":"0"},{"name":"internet_tcp_bps","type":"string","doc":"label:互联网口的tcp实时流量bps","default":"0"},{"name":"govern_tcp_bps","type":"string","doc":"label:局域网内的tcp实时流量bps","default":"0"},{"name":"internet_ftp_bps","type":"string","doc":"label:互联网口的ftp实时流量bps","default":"0"},{"name":"govern_ftp_bps","type":"string","doc":"label:局域网内的ftp实时流量bps","default":"0"},{"name":"internet_http_bps","type":"string","doc":"label:互联网口的http实时流量bps","default":"0"},{"name":"govern_http_bps","type":"string","doc":"label:局域网内的http实时流量bps","default":"0"},{"name":"internet_mail_bps","type":"string","doc":"label:互联网口的Emailstmp实时流量bps","default":"0"},{"name":"govern_mail_bps","type":"string","doc":"label:局域网内的Emailstmp实时流量bps","default":"0"}]} + +#总线定义或数据字典定义 +schema.monitor-msg={"type":"record","name":"SystemStatus","doc":"label:状态记录;title:{.system.id}{.system.status}","fields":[{"name":"time","type":"string","doc":"label:时间;view:datetime"},{"name":"system","doc":"label:系统状态","type":{"type":"record","name":"RealTimeStatus","doc":"label:系统实时状态","fields":[{"name":"id","type":"string","doc":"label:id"},{"name":"status","type":"string","doc":"label:状态"},{"name":"alarms","doc":"label:当前告警列表;view:table","type":{"type":"array","items":{"type":"record","name":"RtAlarm","fields":[{"name":"level","type":"string","doc":"label:级别"},{"name":"time","type":"string","doc":"label:时间;view:datetime"},{"name":"type","type":"string","doc":"label:类型"},{"name":"detail","type":"string","doc":"label:详情"}]}}},{"name":"stats","doc":"label:统计数据列表","type":{"type":"array","items":{"type":"record","name":"RtStatValue","fields":[{"name":"name","type":"string","doc":"label:名称;view:template;template:{.}"},{"name":"content","doc":"label:数据详情;view:inline","type":{"type":"array","items":{"type":"record","name":"RtNameValue","doc":"label:状态名称;view:template;template:{.name}:{.value}","fields":[{"name":"name","type":"string"},{"name":"value","type":"string"}]}}}]}}}]}},{"name":"components","doc":"label:组件状态","type":{"type":"array","items":"RealTimeStatus"}}]} +schema.status-tag={"type":"record","name":"status_tag","fields":[{"type":"string","name":"tag_version"},{"type":"int","name":"data_type"},{"type":"int","name":"data_subtype"},{"type":"int","name":"producer_id"},{"type":"string","name":"timestamp"}]} + + + + + + + + + + + + diff --git a/yb_http_avro_sink_file/src/main/resources/realtime_service_config.properties b/yb_http_avro_sink_file/src/main/resources/realtime_service_config.properties new file mode 100644 index 0000000..b4d124a --- /dev/null +++ b/yb_http_avro_sink_file/src/main/resources/realtime_service_config.properties @@ -0,0 +1,95 @@ +#A版:4, B版:2, C版:1 +active.system=2 + +#插入类型 只kafka:1 只hdfs:2 kafka和hdfs:3 +insert.type=2 + +#批量插入数据库条数 +batch.insert.num=1000 + +#kafka消费group id +#group.id=df-ip-port-log-1812291718 + +#group.id共用前置字符串 +group.id.prefix=1911111503 + +#group.id共用后置字符串 +group.id.suffix=1911141842 + +#管理kafka地址 +#数据源kafka地址 +#bootstrap.servers=192.168.40.119:9092,192.168.40.122:9092,192.168.40.123:9092 +bootstrap.servers=192.168.10.35:9092 + +#数据输出kafka地址 +#bootstrap.output.servers=192.168.40.119:9092,192.168.40.122:9092,192.168.40.123:9092 +bootstrap.output.servers=192.168.10.35:9092 + +#kafka broker下的topic名称 +fail.ntc.collect.mail.log=FAIL-NTC-COLLECT-MAIL-LOG +fail.ntc.collect.http.doc.log=FAIL-NTC-COLLECT-HTTP-DOC-LOG +fail.ntc.collect.ftp.doc.log=FAIL-NTC-COLLECT-FTP-DOC-LOG + + +#从kafka哪里开始读:earliest/latest +auto.offset.reset=latest +#kafka一次fetch请求,从一个broker中取得的records最大大小(默认52428800) +fetch.max.bytes=524288000 +#kafka每次单分区最大拉取字节数(默认1048576) +max.partition.fetch.bytes=104857600 +#kafka最大拉取间隔,超过这个时间不拉取会退出消费者组(默认300000) +max.poll.interval.ms=300000 +#kafka最大拉取量(默认500) +max.poll.records=50000 +#kafka的Consumer session过期时间,这个值必须设置在broker configuration中的group.min.session.timeout.ms 与 group.max.session.timeout.ms之间(默认10000,代码里未变更,依旧写的10000L) +session.timeout.ms=60000 + + +influx.table=filePostStatus +influx.action.msg=fileMsgCount +influx.action.file=fileCount +influx.action.file.bytes=fileBytesCount +influx.address=http://192.168.10.35:8086 +influx.username=admin +influx.password=telegraf +influx.database=telegraf + + +#用于状态上传的X_Tag,对应getXTAG()方法 +#yb-一部 +monitor.nofile.msg.xtag={"task_id":["1"],"data_type":"4","data_source":"8","producer_id":"24832","tag_version":"1.0","data_subtype":"16387"} +#gd-广东 +#monitor.nofile.msg.xtag={"tag_version":"1.0","data_subtype":"16387","data_type":"4","producer_id":"24832","task_id":["1025"]} + +#放到YbHttpAvroSinkFile文件中的monitorMsgXTag配置,目前来看作用是验证获取状态的Cookie +monitor.msg.sink.file.xtag=tag_acc_567acb520844000 + +#监控器类型:一部(yb):状态上报+日志打印;广东(gd):日志打印,然后靠外部脚本收集到influxdb上传 +#monitor.type=yb +monitor.type=gd + +#用于状态的系统组件编码 +#系统编码 +monitor.system.component.code=24832 +#组件编码-flume +monitor.system.component.code.flume=24833 + + +#注册获取的数据对象SchemaID +#一部-YB +#schema.id.ntc-collect-file-log=0 +#schema.id.ntc-conn-record-log=43 +#schema.id.ntc-collect-http-doc-log=45 +#schema.id.ntc-collect-mail-log=44 +#schema.id.monitor-msg=22 +#广东-GD +schema.id.ntc-collect-file-log=0 +schema.id.ntc-conn-record-log=43 +schema.id.ntc-collect-http-doc-log=45 +schema.id.ntc-collect-http-av-log=45 +schema.id.ntc-collect-mail-log=44 +schema.id.ntc-collect-ftp-doc-log=6 +schema.id.ntc-collect-telnet-log=6 +schema.id.monitor-msg=22 +schema.id.influx-sapp-bps-stat-log=22 + diff --git a/yb_http_avro_sink_file/yb_http_avro_sink_file.iml b/yb_http_avro_sink_file/yb_http_avro_sink_file.iml new file mode 100644 index 0000000..bd4c50f --- /dev/null +++ b/yb_http_avro_sink_file/yb_http_avro_sink_file.iml @@ -0,0 +1,84 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file