ZX Flume yb_flume_cus_sink_file Initial commit 202010091620
This commit is contained in:
8
.idea/.gitignore
generated
vendored
Normal file
8
.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Datasource local storage ignored files
|
||||
/../../../../../../../../../:\dev_code\testForFlume\flumeWork\YiBuDaGongCheng\multi-thread\for-lxk-git\yb_flume_cus_sink_file\.idea/dataSources/
|
||||
/dataSources.local.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
||||
7
.idea/codeStyles/Project.xml
generated
Normal file
7
.idea/codeStyles/Project.xml
generated
Normal file
@@ -0,0 +1,7 @@
|
||||
<component name="ProjectCodeStyleConfiguration">
|
||||
<code_scheme name="Project" version="173">
|
||||
<ScalaCodeStyleSettings>
|
||||
<option name="MULTILINE_STRING_CLOSING_QUOTES_ON_NEW_LINE" value="true" />
|
||||
</ScalaCodeStyleSettings>
|
||||
</code_scheme>
|
||||
</component>
|
||||
5
.idea/codeStyles/codeStyleConfig.xml
generated
Normal file
5
.idea/codeStyles/codeStyleConfig.xml
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
<component name="ProjectCodeStyleConfiguration">
|
||||
<state>
|
||||
<option name="PREFERRED_PROJECT_CODE_STYLE" value="Default" />
|
||||
</state>
|
||||
</component>
|
||||
17
.idea/compiler.xml
generated
Normal file
17
.idea/compiler.xml
generated
Normal file
@@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="CompilerConfiguration">
|
||||
<annotationProcessing>
|
||||
<profile name="Maven default annotation processors profile" enabled="true">
|
||||
<sourceOutputDir name="target/generated-sources/annotations" />
|
||||
<sourceTestOutputDir name="target/generated-test-sources/test-annotations" />
|
||||
<outputRelativeToContentRoot value="true" />
|
||||
<module name="yb_http_avro_sink_file" />
|
||||
</profile>
|
||||
</annotationProcessing>
|
||||
<bytecodeTargetLevel>
|
||||
<module name="yb_flume_cus_sink_file" target="1.5" />
|
||||
<module name="yb_http_avro_sink_file" target="1.8" />
|
||||
</bytecodeTargetLevel>
|
||||
</component>
|
||||
</project>
|
||||
7
.idea/encodings.xml
generated
Normal file
7
.idea/encodings.xml
generated
Normal file
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Encoding">
|
||||
<file url="file://$PROJECT_DIR$/yb_http_avro_sink_file/src/main/java" charset="UTF-8" />
|
||||
<file url="file://$PROJECT_DIR$/yb_http_avro_sink_file/src/main/resources" charset="UTF-8" />
|
||||
</component>
|
||||
</project>
|
||||
30
.idea/jarRepositories.xml
generated
Normal file
30
.idea/jarRepositories.xml
generated
Normal file
@@ -0,0 +1,30 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="RemoteRepositoriesConfiguration">
|
||||
<remote-repository>
|
||||
<option name="id" value="nexus" />
|
||||
<option name="name" value="Team Nexus Repository" />
|
||||
<option name="url" value="http://192.168.40.125:8099/content/groups/public" />
|
||||
</remote-repository>
|
||||
<remote-repository>
|
||||
<option name="id" value="central" />
|
||||
<option name="name" value="Central Repository" />
|
||||
<option name="url" value="http://repo1.maven.org/maven2/" />
|
||||
</remote-repository>
|
||||
<remote-repository>
|
||||
<option name="id" value="ebi" />
|
||||
<option name="name" value="www.ebi.ac.uk" />
|
||||
<option name="url" value="http://www.ebi.ac.uk/intact/maven/nexus/content/groups/public/" />
|
||||
</remote-repository>
|
||||
<remote-repository>
|
||||
<option name="id" value="central" />
|
||||
<option name="name" value="Maven Central repository" />
|
||||
<option name="url" value="https://repo1.maven.org/maven2" />
|
||||
</remote-repository>
|
||||
<remote-repository>
|
||||
<option name="id" value="jboss.community" />
|
||||
<option name="name" value="JBoss Community repository" />
|
||||
<option name="url" value="https://repository.jboss.org/nexus/content/repositories/public/" />
|
||||
</remote-repository>
|
||||
</component>
|
||||
</project>
|
||||
13
.idea/libraries/Maven__com_101tec_zkclient_0_10.xml
generated
Normal file
13
.idea/libraries/Maven__com_101tec_zkclient_0_10.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.101tec:zkclient:0.10">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/101tec/zkclient/0.10/zkclient-0.10.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/101tec/zkclient/0.10/zkclient-0.10-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/101tec/zkclient/0.10/zkclient-0.10-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_alibaba_fastjson_1_2_47.xml
generated
Normal file
13
.idea/libraries/Maven__com_alibaba_fastjson_1_2_47.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.alibaba:fastjson:1.2.47">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/1.2.47/fastjson-1.2.47.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/1.2.47/fastjson-1.2.47-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/alibaba/fastjson/1.2.47/fastjson-1.2.47-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_annotations_2_9_5.xml
generated
Normal file
13
.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_annotations_2_9_5.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.fasterxml.jackson.core:jackson-annotations:2.9.5">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-annotations/2.9.5/jackson-annotations-2.9.5-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_core_2_9_5.xml
generated
Normal file
13
.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_core_2_9_5.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.fasterxml.jackson.core:jackson-core:2.9.5">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-core/2.9.5/jackson-core-2.9.5-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_databind_2_9_1.xml
generated
Normal file
13
.idea/libraries/Maven__com_fasterxml_jackson_core_jackson_databind_2_9_1.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.fasterxml.jackson.core:jackson-databind:2.9.1">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.9.1/jackson-databind-2.9.1.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.9.1/jackson-databind-2.9.1-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/fasterxml/jackson/core/jackson-databind/2.9.1/jackson-databind-2.9.1-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_google_code_gson_gson_2_2_2.xml
generated
Normal file
13
.idea/libraries/Maven__com_google_code_gson_gson_2_2_2.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.google.code.gson:gson:2.2.2">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/google/code/gson/gson/2.2.2/gson-2.2.2.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/google/code/gson/gson/2.2.2/gson-2.2.2-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/google/code/gson/gson/2.2.2/gson-2.2.2-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_google_guava_guava_18_0.xml
generated
Normal file
13
.idea/libraries/Maven__com_google_guava_guava_18_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.google.guava:guava:18.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/google/guava/guava/18.0/guava-18.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/google/guava/guava/18.0/guava-18.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/google/guava/guava/18.0/guava-18.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_maxmind_db_maxmind_db_1_2_2.xml
generated
Normal file
13
.idea/libraries/Maven__com_maxmind_db_maxmind_db_1_2_2.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.maxmind.db:maxmind-db:1.2.2">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/maxmind/db/maxmind-db/1.2.2/maxmind-db-1.2.2.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/maxmind/db/maxmind-db/1.2.2/maxmind-db-1.2.2-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/maxmind/db/maxmind-db/1.2.2/maxmind-db-1.2.2-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_maxmind_geoip2_geoip2_2_12_0.xml
generated
Normal file
13
.idea/libraries/Maven__com_maxmind_geoip2_geoip2_2_12_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.maxmind.geoip2:geoip2:2.12.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/maxmind/geoip2/geoip2/2.12.0/geoip2-2.12.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/maxmind/geoip2/geoip2/2.12.0/geoip2-2.12.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/maxmind/geoip2/geoip2/2.12.0/geoip2-2.12.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_maxmind_geoip_geoip_api_1_3_1.xml
generated
Normal file
13
.idea/libraries/Maven__com_maxmind_geoip_geoip_api_1_3_1.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.maxmind.geoip:geoip-api:1.3.1">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/maxmind/geoip/geoip-api/1.3.1/geoip-api-1.3.1.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/maxmind/geoip/geoip-api/1.3.1/geoip-api-1.3.1-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/maxmind/geoip/geoip-api/1.3.1/geoip-api-1.3.1-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_squareup_okhttp3_okhttp_3_9_0.xml
generated
Normal file
13
.idea/libraries/Maven__com_squareup_okhttp3_okhttp_3_9_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.squareup.okhttp3:okhttp:3.9.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okhttp3/okhttp/3.9.0/okhttp-3.9.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okhttp3/okhttp/3.9.0/okhttp-3.9.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okhttp3/okhttp/3.9.0/okhttp-3.9.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_squareup_okhttp_okhttp_2_4_0.xml
generated
Normal file
13
.idea/libraries/Maven__com_squareup_okhttp_okhttp_2_4_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.squareup.okhttp:okhttp:2.4.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okhttp/okhttp/2.4.0/okhttp-2.4.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okhttp/okhttp/2.4.0/okhttp-2.4.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okhttp/okhttp/2.4.0/okhttp-2.4.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_squareup_okio_okio_1_13_0.xml
generated
Normal file
13
.idea/libraries/Maven__com_squareup_okio_okio_1_13_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.squareup.okio:okio:1.13.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okio/okio/1.13.0/okio-1.13.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okio/okio/1.13.0/okio-1.13.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/okio/okio/1.13.0/okio-1.13.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_squareup_retrofit_retrofit_1_9_0.xml
generated
Normal file
13
.idea/libraries/Maven__com_squareup_retrofit_retrofit_1_9_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.squareup.retrofit:retrofit:1.9.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/retrofit/retrofit/1.9.0/retrofit-1.9.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/retrofit/retrofit/1.9.0/retrofit-1.9.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/squareup/retrofit/retrofit/1.9.0/retrofit-1.9.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_thoughtworks_paranamer_paranamer_2_3.xml
generated
Normal file
13
.idea/libraries/Maven__com_thoughtworks_paranamer_paranamer_2_3.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.thoughtworks.paranamer:paranamer:2.3">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/thoughtworks/paranamer/paranamer/2.3/paranamer-2.3.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/thoughtworks/paranamer/paranamer/2.3/paranamer-2.3-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/thoughtworks/paranamer/paranamer/2.3/paranamer-2.3-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_yammer_metrics_metrics_core_2_2_0.xml
generated
Normal file
13
.idea/libraries/Maven__com_yammer_metrics_metrics_core_2_2_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.yammer.metrics:metrics-core:2.2.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/yammer/metrics/metrics-core/2.2.0/metrics-core-2.2.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__com_zdjizhi_galaxy_1_0_1.xml
generated
Normal file
13
.idea/libraries/Maven__com_zdjizhi_galaxy_1_0_1.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: com.zdjizhi:galaxy:1.0.1">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/zdjizhi/galaxy/1.0.1/galaxy-1.0.1.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/zdjizhi/galaxy/1.0.1/galaxy-1.0.1-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/com/zdjizhi/galaxy/1.0.1/galaxy-1.0.1-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__commons_cli_commons_cli_1_2.xml
generated
Normal file
13
.idea/libraries/Maven__commons_cli_commons_cli_1_2.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: commons-cli:commons-cli:1.2">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-cli/commons-cli/1.2/commons-cli-1.2.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-cli/commons-cli/1.2/commons-cli-1.2-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-cli/commons-cli/1.2/commons-cli-1.2-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__commons_codec_commons_codec_1_8.xml
generated
Normal file
13
.idea/libraries/Maven__commons_codec_commons_codec_1_8.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: commons-codec:commons-codec:1.8">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.8/commons-codec-1.8.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.8/commons-codec-1.8-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-codec/commons-codec/1.8/commons-codec-1.8-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__commons_collections_commons_collections_3_2_1.xml
generated
Normal file
13
.idea/libraries/Maven__commons_collections_commons_collections_3_2_1.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: commons-collections:commons-collections:3.2.1">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-collections/commons-collections/3.2.1/commons-collections-3.2.1.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-collections/commons-collections/3.2.1/commons-collections-3.2.1-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-collections/commons-collections/3.2.1/commons-collections-3.2.1-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__commons_io_commons_io_2_4.xml
generated
Normal file
13
.idea/libraries/Maven__commons_io_commons_io_2_4.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: commons-io:commons-io:2.4">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-io/commons-io/2.4/commons-io-2.4.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-io/commons-io/2.4/commons-io-2.4-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-io/commons-io/2.4/commons-io-2.4-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__commons_lang_commons_lang_2_6.xml
generated
Normal file
13
.idea/libraries/Maven__commons_lang_commons_lang_2_6.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: commons-lang:commons-lang:2.6">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-lang/commons-lang/2.6/commons-lang-2.6.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-lang/commons-lang/2.6/commons-lang-2.6-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-lang/commons-lang/2.6/commons-lang-2.6-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__commons_logging_commons_logging_1_2.xml
generated
Normal file
13
.idea/libraries/Maven__commons_logging_commons_logging_1_2.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: commons-logging:commons-logging:1.2">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-logging/commons-logging/1.2/commons-logging-1.2.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-logging/commons-logging/1.2/commons-logging-1.2-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/commons-logging/commons-logging/1.2/commons-logging-1.2-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__io_netty_netty_3_10_6_Final.xml
generated
Normal file
13
.idea/libraries/Maven__io_netty_netty_3_10_6_Final.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: io.netty:netty:3.10.6.Final">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/io/netty/netty/3.10.6.Final/netty-3.10.6.Final.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/io/netty/netty/3.10.6.Final/netty-3.10.6.Final-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/io/netty/netty/3.10.6.Final/netty-3.10.6.Final-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__javax_servlet_javax_servlet_api_3_1_0.xml
generated
Normal file
13
.idea/libraries/Maven__javax_servlet_javax_servlet_api_3_1_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: javax.servlet:javax.servlet-api:3.1.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__joda_time_joda_time_2_9_9.xml
generated
Normal file
13
.idea/libraries/Maven__joda_time_joda_time_2_9_9.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: joda-time:joda-time:2.9.9">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/joda-time/joda-time/2.9.9/joda-time-2.9.9.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/joda-time/joda-time/2.9.9/joda-time-2.9.9-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/joda-time/joda-time/2.9.9/joda-time-2.9.9-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__junit_junit_3_8_1.xml
generated
Normal file
13
.idea/libraries/Maven__junit_junit_3_8_1.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: junit:junit:3.8.1">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/junit/junit/3.8.1/junit-3.8.1.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/junit/junit/3.8.1/junit-3.8.1-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/junit/junit/3.8.1/junit-3.8.1-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__log4j_log4j_1_2_14.xml
generated
Normal file
13
.idea/libraries/Maven__log4j_log4j_1_2_14.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: log4j:log4j:1.2.14">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/log4j/log4j/1.2.14/log4j-1.2.14.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/log4j/log4j/1.2.14/log4j-1.2.14-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/log4j/log4j/1.2.14/log4j-1.2.14-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__net_sf_jopt_simple_jopt_simple_5_0_4.xml
generated
Normal file
13
.idea/libraries/Maven__net_sf_jopt_simple_jopt_simple_5_0_4.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: net.sf.jopt-simple:jopt-simple:5.0.4">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/net/sf/jopt-simple/jopt-simple/5.0.4/jopt-simple-5.0.4-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_avro_avro_1_7_4.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_avro_avro_1_7_4.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.avro:avro:1.7.4">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro/1.7.4/avro-1.7.4.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro/1.7.4/avro-1.7.4-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro/1.7.4/avro-1.7.4-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_avro_avro_ipc_1_7_4.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_avro_avro_ipc_1_7_4.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.avro:avro-ipc:1.7.4">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-ipc/1.7.4/avro-ipc-1.7.4.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-ipc/1.7.4/avro-ipc-1.7.4-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-ipc/1.7.4/avro-ipc-1.7.4-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_avro_avro_tools_1_7_4.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_avro_avro_tools_1_7_4.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.avro:avro-tools:1.7.4">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-tools/1.7.4/avro-tools-1.7.4.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-tools/1.7.4/avro-tools-1.7.4-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/avro/avro-tools/1.7.4/avro-tools-1.7.4-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_commons_commons_compress_1_4_1.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_commons_commons_compress_1_4_1.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.commons:commons-compress:1.4.1">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-compress/1.4.1/commons-compress-1.4.1.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-compress/1.4.1/commons-compress-1.4.1-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-compress/1.4.1/commons-compress-1.4.1-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_commons_commons_lang3_3_4.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_commons_commons_lang3_3_4.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.commons:commons-lang3:3.4">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-lang3/3.4/commons-lang3-3.4.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-lang3/3.4/commons-lang3-3.4-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/commons/commons-lang3/3.4/commons-lang3-3.4-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_flume_flume_ng_auth_1_9_0.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_flume_flume_ng_auth_1_9_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.flume:flume-ng-auth:1.9.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-auth/1.9.0/flume-ng-auth-1.9.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-auth/1.9.0/flume-ng-auth-1.9.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-auth/1.9.0/flume-ng-auth-1.9.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_flume_flume_ng_configfilters_flume_ng_config_filter_api_1_9_0.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_flume_flume_ng_configfilters_flume_ng_config_filter_api_1_9_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.flume.flume-ng-configfilters:flume-ng-config-filter-api:1.9.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-configfilters/flume-ng-config-filter-api/1.9.0/flume-ng-config-filter-api-1.9.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-configfilters/flume-ng-config-filter-api/1.9.0/flume-ng-config-filter-api-1.9.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-configfilters/flume-ng-config-filter-api/1.9.0/flume-ng-config-filter-api-1.9.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_flume_flume_ng_configuration_1_9_0.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_flume_flume_ng_configuration_1_9_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.flume:flume-ng-configuration:1.9.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-configuration/1.9.0/flume-ng-configuration-1.9.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-configuration/1.9.0/flume-ng-configuration-1.9.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-configuration/1.9.0/flume-ng-configuration-1.9.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_flume_flume_ng_core_1_9_0.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_flume_flume_ng_core_1_9_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.flume:flume-ng-core:1.9.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-core/1.9.0/flume-ng-core-1.9.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-core/1.9.0/flume-ng-core-1.9.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-core/1.9.0/flume-ng-core-1.9.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_flume_flume_ng_sdk_1_9_0.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_flume_flume_ng_sdk_1_9_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.flume:flume-ng-sdk:1.9.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-sdk/1.9.0/flume-ng-sdk-1.9.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-sdk/1.9.0/flume-ng-sdk-1.9.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/flume/flume-ng-sdk/1.9.0/flume-ng-sdk-1.9.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_httpcomponents_httpasyncclient_4_1_3.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_httpcomponents_httpasyncclient_4_1_3.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.httpcomponents:httpasyncclient:4.1.3">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpasyncclient/4.1.3/httpasyncclient-4.1.3.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpasyncclient/4.1.3/httpasyncclient-4.1.3-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpasyncclient/4.1.3/httpasyncclient-4.1.3-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_httpcomponents_httpclient_4_5_2.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_httpcomponents_httpclient_4_5_2.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.httpcomponents:httpclient:4.5.2">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_httpcomponents_httpcore_4_4_6.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_httpcomponents_httpcore_4_4_6.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.httpcomponents:httpcore:4.4.6">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpcore/4.4.6/httpcore-4.4.6.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpcore/4.4.6/httpcore-4.4.6-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpcore/4.4.6/httpcore-4.4.6-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_httpcomponents_httpcore_nio_4_4_6.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_httpcomponents_httpcore_nio_4_4_6.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.httpcomponents:httpcore-nio:4.4.6">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpcore-nio/4.4.6/httpcore-nio-4.4.6.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpcore-nio/4.4.6/httpcore-nio-4.4.6-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpcore-nio/4.4.6/httpcore-nio-4.4.6-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_httpcomponents_httpmime_4_3_1.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_httpcomponents_httpmime_4_3_1.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.httpcomponents:httpmime:4.3.1">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpmime/4.3.1/httpmime-4.3.1.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpmime/4.3.1/httpmime-4.3.1-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/httpcomponents/httpmime/4.3.1/httpmime-4.3.1-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_kafka_kafka_2_11_1_0_0.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_kafka_kafka_2_11_1_0_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.kafka:kafka_2.11:1.0.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/kafka/kafka_2.11/1.0.0/kafka_2.11-1.0.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/kafka/kafka_2.11/1.0.0/kafka_2.11-1.0.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/kafka/kafka_2.11/1.0.0/kafka_2.11-1.0.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_kafka_kafka_clients_1_0_0.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_kafka_kafka_clients_1_0_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.kafka:kafka-clients:1.0.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/kafka/kafka-clients/1.0.0/kafka-clients-1.0.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/kafka/kafka-clients/1.0.0/kafka-clients-1.0.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/kafka/kafka-clients/1.0.0/kafka-clients-1.0.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_mina_mina_core_2_0_4.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_mina_mina_core_2_0_4.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.mina:mina-core:2.0.4">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/mina/mina-core/2.0.4/mina-core-2.0.4.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/mina/mina-core/2.0.4/mina-core-2.0.4-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/mina/mina-core/2.0.4/mina-core-2.0.4-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_thrift_libthrift_0_9_3.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_thrift_libthrift_0_9_3.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.thrift:libthrift:0.9.3">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/thrift/libthrift/0.9.3/libthrift-0.9.3-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_velocity_velocity_1_7.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_velocity_velocity_1_7.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.velocity:velocity:1.7">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/velocity/velocity/1.7/velocity-1.7.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/velocity/velocity/1.7/velocity-1.7-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/velocity/velocity/1.7/velocity-1.7-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_apache_zookeeper_zookeeper_3_4_10.xml
generated
Normal file
13
.idea/libraries/Maven__org_apache_zookeeper_zookeeper_3_4_10.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.apache.zookeeper:zookeeper:3.4.10">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/zookeeper/zookeeper/3.4.10/zookeeper-3.4.10.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/zookeeper/zookeeper/3.4.10/zookeeper-3.4.10-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/apache/zookeeper/zookeeper/3.4.10/zookeeper-3.4.10-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_codehaus_jackson_jackson_core_asl_1_8_8.xml
generated
Normal file
13
.idea/libraries/Maven__org_codehaus_jackson_jackson_core_asl_1_8_8.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.codehaus.jackson:jackson-core-asl:1.8.8">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/jackson/jackson-core-asl/1.8.8/jackson-core-asl-1.8.8.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/jackson/jackson-core-asl/1.8.8/jackson-core-asl-1.8.8-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/jackson/jackson-core-asl/1.8.8/jackson-core-asl-1.8.8-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_codehaus_jackson_jackson_mapper_asl_1_8_8.xml
generated
Normal file
13
.idea/libraries/Maven__org_codehaus_jackson_jackson_mapper_asl_1_8_8.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.8.8">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/jackson/jackson-mapper-asl/1.8.8/jackson-mapper-asl-1.8.8.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/jackson/jackson-mapper-asl/1.8.8/jackson-mapper-asl-1.8.8-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/codehaus/jackson/jackson-mapper-asl/1.8.8/jackson-mapper-asl-1.8.8-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_http_9_4_6_v20170531.xml
generated
Normal file
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_http_9_4_6_v20170531.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.eclipse.jetty:jetty-http:9.4.6.v20170531">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-http/9.4.6.v20170531/jetty-http-9.4.6.v20170531.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-http/9.4.6.v20170531/jetty-http-9.4.6.v20170531-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-http/9.4.6.v20170531/jetty-http-9.4.6.v20170531-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_io_9_4_6_v20170531.xml
generated
Normal file
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_io_9_4_6_v20170531.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.eclipse.jetty:jetty-io:9.4.6.v20170531">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-io/9.4.6.v20170531/jetty-io-9.4.6.v20170531.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-io/9.4.6.v20170531/jetty-io-9.4.6.v20170531-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-io/9.4.6.v20170531/jetty-io-9.4.6.v20170531-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_jmx_9_4_6_v20170531.xml
generated
Normal file
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_jmx_9_4_6_v20170531.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.eclipse.jetty:jetty-jmx:9.4.6.v20170531">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-jmx/9.4.6.v20170531/jetty-jmx-9.4.6.v20170531.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-jmx/9.4.6.v20170531/jetty-jmx-9.4.6.v20170531-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-jmx/9.4.6.v20170531/jetty-jmx-9.4.6.v20170531-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_security_9_4_6_v20170531.xml
generated
Normal file
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_security_9_4_6_v20170531.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.eclipse.jetty:jetty-security:9.4.6.v20170531">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-security/9.4.6.v20170531/jetty-security-9.4.6.v20170531.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-security/9.4.6.v20170531/jetty-security-9.4.6.v20170531-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-security/9.4.6.v20170531/jetty-security-9.4.6.v20170531-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_server_9_4_6_v20170531.xml
generated
Normal file
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_server_9_4_6_v20170531.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.eclipse.jetty:jetty-server:9.4.6.v20170531">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-server/9.4.6.v20170531/jetty-server-9.4.6.v20170531.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-server/9.4.6.v20170531/jetty-server-9.4.6.v20170531-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-server/9.4.6.v20170531/jetty-server-9.4.6.v20170531-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_servlet_9_4_6_v20170531.xml
generated
Normal file
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_servlet_9_4_6_v20170531.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.eclipse.jetty:jetty-servlet:9.4.6.v20170531">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-servlet/9.4.6.v20170531/jetty-servlet-9.4.6.v20170531.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-servlet/9.4.6.v20170531/jetty-servlet-9.4.6.v20170531-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-servlet/9.4.6.v20170531/jetty-servlet-9.4.6.v20170531-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_util_9_4_6_v20170531.xml
generated
Normal file
13
.idea/libraries/Maven__org_eclipse_jetty_jetty_util_9_4_6_v20170531.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.eclipse.jetty:jetty-util:9.4.6.v20170531">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-util/9.4.6.v20170531/jetty-util-9.4.6.v20170531.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-util/9.4.6.v20170531/jetty-util-9.4.6.v20170531-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/eclipse/jetty/jetty-util/9.4.6.v20170531/jetty-util-9.4.6.v20170531-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_influxdb_influxdb_java_2_1.xml
generated
Normal file
13
.idea/libraries/Maven__org_influxdb_influxdb_java_2_1.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.influxdb:influxdb-java:2.1">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/influxdb/influxdb-java/2.1/influxdb-java-2.1.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/influxdb/influxdb-java/2.1/influxdb-java-2.1-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/influxdb/influxdb-java/2.1/influxdb-java-2.1-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_lz4_lz4_java_1_4.xml
generated
Normal file
13
.idea/libraries/Maven__org_lz4_lz4_java_1_4.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.lz4:lz4-java:1.4">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/lz4/lz4-java/1.4/lz4-java-1.4.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/lz4/lz4-java/1.4/lz4-java-1.4-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/lz4/lz4-java/1.4/lz4-java-1.4-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_mockito_mockito_all_1_10_19.xml
generated
Normal file
13
.idea/libraries/Maven__org_mockito_mockito_all_1_10_19.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.mockito:mockito-all:1.10.19">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/mockito/mockito-all/1.10.19/mockito-all-1.10.19-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_mortbay_jetty_jetty_6_1_26.xml
generated
Normal file
13
.idea/libraries/Maven__org_mortbay_jetty_jetty_6_1_26.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.mortbay.jetty:jetty:6.1.26">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_mortbay_jetty_jetty_util_6_1_26.xml
generated
Normal file
13
.idea/libraries/Maven__org_mortbay_jetty_jetty_util_6_1_26.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.mortbay.jetty:jetty-util:6.1.26">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_scala_lang_scala_library_2_11_11.xml
generated
Normal file
13
.idea/libraries/Maven__org_scala_lang_scala_library_2_11_11.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.scala-lang:scala-library:2.11.11">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/scala-lang/scala-library/2.11.11/scala-library-2.11.11.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/scala-lang/scala-library/2.11.11/scala-library-2.11.11-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/scala-lang/scala-library/2.11.11/scala-library-2.11.11-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_slf4j_slf4j_api_1_7_25.xml
generated
Normal file
13
.idea/libraries/Maven__org_slf4j_slf4j_api_1_7_25.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.slf4j:slf4j-api:1.7.25">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_tukaani_xz_1_0.xml
generated
Normal file
13
.idea/libraries/Maven__org_tukaani_xz_1_0.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.tukaani:xz:1.0">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/tukaani/xz/1.0/xz-1.0.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/tukaani/xz/1.0/xz-1.0-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/tukaani/xz/1.0/xz-1.0-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__org_xerial_snappy_snappy_java_1_1_4.xml
generated
Normal file
13
.idea/libraries/Maven__org_xerial_snappy_snappy_java_1_1_4.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: org.xerial.snappy:snappy-java:1.1.4">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/xerial/snappy/snappy-java/1.1.4/snappy-java-1.1.4-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
13
.idea/libraries/Maven__tech_allegro_schema_json2avro_converter_0_2_5.xml
generated
Normal file
13
.idea/libraries/Maven__tech_allegro_schema_json2avro_converter_0_2_5.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="Maven: tech.allegro.schema.json2avro:converter:0.2.5">
|
||||
<CLASSES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/tech/allegro/schema/json2avro/converter/0.2.5/converter-0.2.5.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/tech/allegro/schema/json2avro/converter/0.2.5/converter-0.2.5-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/tech/allegro/schema/json2avro/converter/0.2.5/converter-0.2.5-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
11
.idea/misc.xml
generated
Normal file
11
.idea/misc.xml
generated
Normal file
@@ -0,0 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="MavenProjectsManager">
|
||||
<option name="originalFiles">
|
||||
<list>
|
||||
<option value="$PROJECT_DIR$/pom.xml" />
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" project-jdk-name="1.8" project-jdk-type="JavaSDK" />
|
||||
</project>
|
||||
9
.idea/modules.xml
generated
Normal file
9
.idea/modules.xml
generated
Normal file
@@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/yb_flume_cus_sink_file.iml" filepath="$PROJECT_DIR$/yb_flume_cus_sink_file.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/yb_http_avro_sink_file/yb_http_avro_sink_file.iml" filepath="$PROJECT_DIR$/yb_http_avro_sink_file/yb_http_avro_sink_file.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
||||
16
pom.xml
Normal file
16
pom.xml
Normal file
@@ -0,0 +1,16 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>cn.ac.iie</groupId>
|
||||
<artifactId>yb_flume_cus_sink_file</artifactId>
|
||||
<packaging>pom</packaging>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<modules>
|
||||
<module>yb_http_avro_sink_file</module>
|
||||
</modules>
|
||||
|
||||
|
||||
</project>
|
||||
12
yb_flume_cus_sink_file.iml
Normal file
12
yb_flume_cus_sink_file.iml
Normal file
@@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module org.jetbrains.idea.maven.project.MavenProjectsManager.isMavenModule="true" type="JAVA_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_5">
|
||||
<output url="file://$MODULE_DIR$/target/classes" />
|
||||
<output-test url="file://$MODULE_DIR$/target/test-classes" />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/target" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
399
yb_http_avro_sink_file/pom.xml
Normal file
399
yb_http_avro_sink_file/pom.xml
Normal file
@@ -0,0 +1,399 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<artifactId>yb_flume_cus_sink_file</artifactId>
|
||||
<groupId>cn.ac.iie</groupId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>yb_http_avro_sink_file</artifactId>
|
||||
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>nexus</id>
|
||||
<name>Team Nexus Repository</name>
|
||||
<url>http://192.168.40.125:8099/content/groups/public</url>
|
||||
</repository>
|
||||
|
||||
<repository>
|
||||
<id>ebi</id>
|
||||
<name>www.ebi.ac.uk</name>
|
||||
<url>http://www.ebi.ac.uk/intact/maven/nexus/content/groups/public/</url>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<flume.version>1.9.0</flume.version>
|
||||
<kafka.version>1.0.0</kafka.version>
|
||||
</properties>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-shade-plugin</artifactId>
|
||||
<version>2.4.1</version>
|
||||
<configuration>
|
||||
<createDependencyReducedPom>true</createDependencyReducedPom>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>shade</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<transformers>
|
||||
<transformer
|
||||
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
|
||||
<transformer
|
||||
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
|
||||
<mainClass>cn.ac.iie.cusflume.sink.YbHttpAvroSinkFile</mainClass>
|
||||
</transformer>
|
||||
</transformers>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
<version>1.2.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>exec</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<executable>java</executable>
|
||||
<includeProjectDependencies>true</includeProjectDependencies>
|
||||
<includePluginDependencies>false</includePluginDependencies>
|
||||
<classpathScope>compile</classpathScope>
|
||||
<mainClass>cn.ac.iie.cusflume.sink.YbHttpAvroSinkFile</mainClass>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>2.3.2</version>
|
||||
<configuration>
|
||||
<source>1.8</source>
|
||||
<target>1.8</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
<resources>
|
||||
<!--<resource>-->
|
||||
<!--<directory>config</directory>-->
|
||||
<!--<includes>-->
|
||||
<!--<include>realtime_service_config.properties</include>-->
|
||||
<!--<include>flume_config.properties</include>-->
|
||||
<!--<include>db.properties</include>-->
|
||||
<!--<include>db_pz.properties</include>-->
|
||||
<!--<include>clickhouse.properties</include>-->
|
||||
<!--<!–<include>**/*.properties</include>–>-->
|
||||
<!--<!–<include>**/*.xml</include>–>-->
|
||||
<!--</includes>-->
|
||||
<!--<filtering>false</filtering>-->
|
||||
<!--</resource>-->
|
||||
<resource>
|
||||
<directory>src/main/resources</directory>
|
||||
<includes>
|
||||
<!--<include>log4j.properties</include>-->
|
||||
<include>realtime_service_config.properties</include>
|
||||
<include>flume_config.properties</include>
|
||||
</includes>
|
||||
<filtering>false</filtering>
|
||||
</resource>
|
||||
</resources>
|
||||
</build>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.flume</groupId>
|
||||
<artifactId>flume-ng-core</artifactId>
|
||||
<version>${flume.version}</version>
|
||||
<scope>provided</scope>
|
||||
<!--<exclusions>-->
|
||||
<!--<exclusion>-->
|
||||
<!--<groupId>org.apache.thrift</groupId>-->
|
||||
<!--<artifactId>libthrift</artifactId>-->
|
||||
<!--</exclusion>-->
|
||||
<!--<exclusion>-->
|
||||
<!--<groupId>com.google.guava</groupId>-->
|
||||
<!--<artifactId>guava</artifactId>-->
|
||||
<!--</exclusion>-->
|
||||
<!--</exclusions>-->
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.zdjizhi</groupId>
|
||||
<artifactId>galaxy</artifactId>
|
||||
<version>1.0.1</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>log4j-over-slf4j</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.alibaba</groupId>
|
||||
<artifactId>fastjson</artifactId>
|
||||
<version>1.2.47</version>
|
||||
</dependency>
|
||||
|
||||
<!--flume官方版jar包-->
|
||||
<dependency>
|
||||
<groupId>org.apache.flume</groupId>
|
||||
<artifactId>flume-ng-sdk</artifactId>
|
||||
<version>${flume.version}</version>
|
||||
<!--<exclusions>-->
|
||||
<!--<exclusion>-->
|
||||
<!--<groupId>org.apache.thrift</groupId>-->
|
||||
<!--<artifactId>libthrift</artifactId>-->
|
||||
<!--</exclusion>-->
|
||||
<!--</exclusions>-->
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.flume</groupId>
|
||||
<artifactId>flume-ng-configuration</artifactId>
|
||||
<version>${flume.version}</version>
|
||||
<!--<exclusions>-->
|
||||
<!--<exclusion>-->
|
||||
<!--<groupId>com.google.guava</groupId>-->
|
||||
<!--<artifactId>guava</artifactId>-->
|
||||
<!--</exclusion>-->
|
||||
<!--</exclusions>-->
|
||||
</dependency>
|
||||
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>org.slf4j</groupId>-->
|
||||
<!--<artifactId>slf4j-api</artifactId>-->
|
||||
<!--</dependency>-->
|
||||
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>com.google.guava</groupId>-->
|
||||
<!--<artifactId>guava</artifactId>-->
|
||||
<!--<!–<version>${guava.version}</version>–>-->
|
||||
<!--<version>18.0</version>-->
|
||||
<!--</dependency>-->
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/commons-lang/commons-lang -->
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
<version>2.6</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>junit</groupId>-->
|
||||
<!--<artifactId>junit</artifactId>-->
|
||||
<!--<scope>test</scope>-->
|
||||
<!--</dependency>-->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>3.8.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>org.slf4j</groupId>-->
|
||||
<!--<artifactId>slf4j-log4j12</artifactId>-->
|
||||
<!--<scope>test</scope>-->
|
||||
<!--</dependency>-->
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/org.mockito/mockito-all -->
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<version>1.10.19</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!--20190928因为可能无用,为避免jar包冲突移除-->
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>com.github.tomakehurst</groupId>-->
|
||||
<!--<artifactId>wiremock</artifactId>-->
|
||||
<!--<version>1.53</version>-->
|
||||
<!--<scope>test</scope>-->
|
||||
<!--</dependency>-->
|
||||
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>org.apache.httpcomponents</groupId>-->
|
||||
<!--<artifactId>httpclient</artifactId>-->
|
||||
<!--<scope>test</scope>-->
|
||||
<!--</dependency>-->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
<version>4.5.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpcore</artifactId>
|
||||
<version>4.4.6</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpmime</artifactId>
|
||||
<version>4.3.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpasyncclient</artifactId>
|
||||
<version>4.1.3</version>
|
||||
</dependency>
|
||||
|
||||
<!--okhttp3-->
|
||||
<dependency>
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>okhttp</artifactId>
|
||||
<version>3.9.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>2.4</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>tech.allegro.schema.json2avro</groupId>
|
||||
<artifactId>converter</artifactId>
|
||||
<version>0.2.5</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.avro</groupId>
|
||||
<artifactId>avro-tools</artifactId>
|
||||
<version>1.7.4</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.kafka</groupId>
|
||||
<artifactId>kafka_2.11</artifactId>
|
||||
<version>${kafka.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>log4j-over-slf4j</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.kafka</groupId>
|
||||
<artifactId>kafka-clients</artifactId>
|
||||
<version>${kafka.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>log4j-over-slf4j</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<!--<!–定时任务–>-->
|
||||
<!--<!– quartz –>-->
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>org.quartz-scheduler</groupId>-->
|
||||
<!--<artifactId>quartz</artifactId>-->
|
||||
<!--<version>2.3.2</version>-->
|
||||
<!--</dependency>-->
|
||||
<!--调度器核心包-->
|
||||
<!--https://mvnrepository.com/artifact/org.springframework/spring-context-support-->
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>org.springframework</groupId>-->
|
||||
<!--<artifactId>spring-context-support</artifactId>-->
|
||||
<!--<version>5.2.1.RELEASE</version>-->
|
||||
<!--</dependency>-->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.influxdb</groupId>
|
||||
<artifactId>influxdb-java</artifactId>
|
||||
<version>2.1</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>18.0</version>
|
||||
</dependency>
|
||||
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>org.influxdb</groupId>-->
|
||||
<!--<artifactId>influxdb-java</artifactId>-->
|
||||
<!--<!–<version>2.15</version>–>-->
|
||||
<!--<!–<version>2.10</version>–>-->
|
||||
<!--<version>2.1</version>-->
|
||||
<!--</dependency>-->
|
||||
|
||||
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>com.squareup.retrofit2</groupId>-->
|
||||
<!--<artifactId>converter-gson</artifactId>-->
|
||||
<!--<version>2.2.0</version>-->
|
||||
<!--</dependency>-->
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>com.squareup.retrofit2</groupId>-->
|
||||
<!--<artifactId>converter-moshi</artifactId>-->
|
||||
<!--<version>2.1.0</version>-->
|
||||
<!--</dependency>-->
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>com.squareup.retrofit2</groupId>-->
|
||||
<!--<artifactId>retrofit</artifactId>-->
|
||||
<!--<version>2.2.0</version>-->
|
||||
<!--</dependency>-->
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>com.squareup.okhttp3</groupId>-->
|
||||
<!--<artifactId>logging-interceptor</artifactId>-->
|
||||
<!--<version>3.8.0</version>-->
|
||||
<!--</dependency>-->
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>com.squareup.okio</groupId>-->
|
||||
<!--<artifactId>okio</artifactId>-->
|
||||
<!--<version>1.11.0</version>-->
|
||||
<!--</dependency>-->
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>com.google.code.gson</groupId>-->
|
||||
<!--<artifactId>gson</artifactId>-->
|
||||
<!--<version>2.8.0</version>-->
|
||||
<!--</dependency>-->
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>com.squareup.moshi</groupId>-->
|
||||
<!--<artifactId>moshi</artifactId>-->
|
||||
<!--<version>1.1.0</version>-->
|
||||
<!--</dependency>-->
|
||||
<!--<dependency>-->
|
||||
<!--<groupId>com.google.collections</groupId>-->
|
||||
<!--<artifactId>google-collections</artifactId>-->
|
||||
<!--<version>1.0-rc1</version>-->
|
||||
<!--</dependency>-->
|
||||
|
||||
</dependencies>
|
||||
|
||||
</project>
|
||||
@@ -0,0 +1,39 @@
|
||||
package cn.ac.iie.cusflume.sink.CommonUtils;
|
||||
|
||||
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import java.util.Base64;
|
||||
|
||||
|
||||
public class DecodeUtils {
|
||||
private static Logger logger = Logger.getLogger(DecodeUtils.class);
|
||||
|
||||
public static String base64Str(String encodedText, String subjectCharset) {
|
||||
Base64.Decoder decoder = Base64.getDecoder();
|
||||
String sub;
|
||||
try {
|
||||
if (StringUtil.isBlank(subjectCharset)) {
|
||||
sub = new String(decoder.decode(encodedText), "UTF-8");
|
||||
} else if ("ISO8859-16".equals(subjectCharset)) {
|
||||
sub = new String(decoder.decode(encodedText), "ISO8859-1");
|
||||
} else {
|
||||
sub = new String(decoder.decode(encodedText), subjectCharset);
|
||||
}
|
||||
return sub;
|
||||
} catch (Exception e) {
|
||||
logger.warn("This encodedText===>" + encodedText + "<===, " +
|
||||
"This subjectCharset===>" + subjectCharset + "<===, " +
|
||||
"Transform base64 String failed===>" + e.getMessage() + "<===.");
|
||||
return "";
|
||||
// e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
// public static void main(String[] args) {
|
||||
// String s = base64Str("eWJqQlRURmdVclNRbmJJLnR4dA==", " ");
|
||||
// System.out.println(s);
|
||||
// }
|
||||
|
||||
}
|
||||
@@ -0,0 +1,144 @@
|
||||
package cn.ac.iie.cusflume.sink.CommonUtils;
|
||||
|
||||
import cn.ac.iie.cusflume.sink.daoUtils.RealtimeCountConfig;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
|
||||
public class GetDataDictionaryCodeByTopicUtils {
|
||||
private static Logger logger = Logger.getLogger(GetDataDictionaryCodeByTopicUtils.class);
|
||||
|
||||
/**
|
||||
* 获取数据对象自身的类型编码
|
||||
*
|
||||
* @param topic
|
||||
* @return
|
||||
*/
|
||||
public static int getDataObjectTypeCodeByTopicName(String topic) {
|
||||
switch (topic) {
|
||||
/**
|
||||
* 非文件消息-log
|
||||
*/
|
||||
case "NTC-CONN-RECORD-LOG":
|
||||
case "NTC-COLLECT-HTTP-LOG":
|
||||
case "NTC-COLLECT-SSL-LOG":
|
||||
case "NTC-COLLECT-DNS-LOG":
|
||||
case "NTC-COLLECT-FILE-LOG"://此时没有数据对象,数据对象类型选择1-log日志
|
||||
return 1;
|
||||
/**
|
||||
* 文件消息-file
|
||||
*/
|
||||
case "NTC-COLLECT-FTP-DOC-LOG":
|
||||
case "NTC-COLLECT-HTTP-DOC-LOG":
|
||||
case "NTC-COLLECT-HTTP-AV-LOG"://20200810新增,等同于NTC-COLLECT-HTTP-DOC-LOG
|
||||
case "NTC-COLLECT-MAIL-LOG":
|
||||
case "NTC-COLLECT-TELNET-LOG"://20200325新增
|
||||
return 2;
|
||||
/**
|
||||
* 状态消息(监控消息)
|
||||
* status
|
||||
*/
|
||||
case "monitor-msg":
|
||||
case "INFLUX-SAPP-BPS-STAT-LOG"://20200113新增-广东流量状态上传
|
||||
return 4;
|
||||
default:
|
||||
logger.error("GetDataDictionaryCodeByTopicUtils--->getDataObjectTypeCodeByTopicName There is a unknown topic! topic name is :" + topic);
|
||||
break;
|
||||
}
|
||||
return 0;//标识错误
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取数据标签SchemaID
|
||||
* NTC-COLLECT-FILE-LOG填文件标签76
|
||||
* 其他日志都是读取自身的d_tag应该填写日志标签75
|
||||
* 状态上报(一部需要)暂时作为状态标签处理,暂定78
|
||||
* @param topic
|
||||
* @return
|
||||
*/
|
||||
public static int getDataTagSchemaIDByTopicName(String topic) {
|
||||
switch (topic) {
|
||||
/**
|
||||
* 文件标签-file_tag
|
||||
*/
|
||||
case "NTC-COLLECT-FILE-LOG"://此时没有数据对象,数据对象类型选择1-log日志,但数据标签的SchemaID选择文件标签76
|
||||
return 76;
|
||||
/**
|
||||
* 日志标签-log_tag
|
||||
*/
|
||||
case "NTC-CONN-RECORD-LOG":
|
||||
case "NTC-COLLECT-HTTP-LOG":
|
||||
case "NTC-COLLECT-SSL-LOG":
|
||||
case "NTC-COLLECT-DNS-LOG":
|
||||
case "NTC-COLLECT-FTP-DOC-LOG":
|
||||
case "NTC-COLLECT-HTTP-DOC-LOG":
|
||||
case "NTC-COLLECT-HTTP-AV-LOG"://20200810新增,等同于NTC-COLLECT-HTTP-DOC-LOG
|
||||
case "NTC-COLLECT-MAIL-LOG":
|
||||
case "NTC-COLLECT-TELNET-LOG"://20200325新增
|
||||
return 75;
|
||||
/**
|
||||
* 状态标签-status_tag
|
||||
*/
|
||||
case "monitor-msg":
|
||||
case "INFLUX-SAPP-BPS-STAT-LOG"://20200113新增-广东流量状态上传
|
||||
return 78;//不太清楚状态标签是使用日志标签还是使用状态标签,78为状态标签SchemaID
|
||||
default:
|
||||
logger.error("GetDataDictionaryCodeByTopicUtils--->getDataTagSchemaIDByTopicName There is a unknown topic! topic name is :" + topic);
|
||||
break;
|
||||
}
|
||||
return 0;//标识错误
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取数据对象SchemaID-需要到总线注册获取
|
||||
*
|
||||
* @param topic
|
||||
* @return
|
||||
*/
|
||||
public static int getDataObjectSchemaIDByTopicName(String topic) {
|
||||
switch (topic) {
|
||||
/**
|
||||
* 非文件消息-log
|
||||
*/
|
||||
case "NTC-COLLECT-FILE-LOG"://文件标签单独发送,作为消息特殊处理,此处的SchemaID为0(规定),即没有数据对象这一块
|
||||
return RealtimeCountConfig.SCHEMA_ID_NTC_COLLECT_FILE_LOG;
|
||||
case "NTC-CONN-RECORD-LOG":
|
||||
return RealtimeCountConfig.SCHEMA_ID_NTC_CONN_RECORD_LOG;
|
||||
case "NTC-COLLECT-HTTP-LOG":
|
||||
return 3;
|
||||
case "NTC-COLLECT-SSL-LOG":
|
||||
return 4;
|
||||
case "NTC-COLLECT-DNS-LOG":
|
||||
return 5;
|
||||
/**
|
||||
* 文件消息-file
|
||||
*/
|
||||
case "NTC-COLLECT-FTP-DOC-LOG":
|
||||
return RealtimeCountConfig.SCHEMA_ID_NTC_COLLECT_FTP_DOC_LOG;
|
||||
case "NTC-COLLECT-TELNET-LOG"://20200325新增
|
||||
return RealtimeCountConfig.SCHEMA_ID_NTC_COLLECT_TELNET_LOG;
|
||||
case "NTC-COLLECT-HTTP-DOC-LOG":
|
||||
return RealtimeCountConfig.SCHEMA_ID_NTC_COLLECT_HTTP_DOC_LOG;
|
||||
case "NTC-COLLECT-HTTP-AV-LOG"://20200810新增,等同于NTC-COLLECT-HTTP-DOC-LOG
|
||||
return RealtimeCountConfig.SCHEMA_ID_NTC_COLLECT_HTTP_AV_LOG;
|
||||
case "NTC-COLLECT-MAIL-LOG":
|
||||
return RealtimeCountConfig.SCHEMA_ID_NTC_COLLECT_MAIL_LOG;
|
||||
/**
|
||||
* 一部状态消息(监控消息)-一部数据对象SchemaID-总线本身已经提前注册
|
||||
* status
|
||||
*/
|
||||
case "monitor-msg":
|
||||
return RealtimeCountConfig.SCHEMA_ID_MONITOR_MSG;//此avro格式为总线规定的状态对象schema,数据字典数据子类编码-16387,schemaID-22
|
||||
/**
|
||||
* 广东状态消息(监控消息)-广东数据对象SchemaID
|
||||
* status
|
||||
*/
|
||||
case "INFLUX-SAPP-BPS-STAT-LOG":
|
||||
return RealtimeCountConfig.SCHEMA_ID_INFLUX_SAPP_BPS_STAT_LOG;
|
||||
default:
|
||||
logger.error("GetDataDictionaryCodeByTopicUtils--->getDataObjectSchemaIDByTopicName There is a unknown topic! topic name is :" + topic);
|
||||
break;
|
||||
}
|
||||
return 0;//0表示标识错误
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,139 @@
|
||||
package cn.ac.iie.cusflume.sink.CommonUtils;
|
||||
|
||||
import cn.ac.iie.cusflume.sink.bean.fileBean.*;
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
|
||||
public class GetFilePathByTopicUtils {
|
||||
private static Logger logger = Logger.getLogger(GetFilePathByTopicUtils.class);
|
||||
|
||||
public static String getFilePathByTopicName(String dataJson, String topic) {
|
||||
switch (topic) {
|
||||
/**
|
||||
* 非文件消息
|
||||
*/
|
||||
case "NTC-CONN-RECORD-LOG":
|
||||
return "-";
|
||||
case "NTC-COLLECT-HTTP-LOG":
|
||||
return "-";
|
||||
case "NTC-COLLECT-SSL-LOG":
|
||||
return "-";
|
||||
case "NTC-COLLECT-DNS-LOG":
|
||||
return "-";
|
||||
/**
|
||||
* 文件消息
|
||||
*/
|
||||
case "NTC-COLLECT-FTP-DOC-LOG":
|
||||
return getCollFtpDocFilePath(dataJson);//20200211新增
|
||||
case "NTC-COLLECT-HTTP-AV-LOG"://和NTC-COLLECT-HTTP-DOC-LOG共用相同Schema-20200904
|
||||
// return getCollHttpAvFilePath(dataJson);
|
||||
case "NTC-COLLECT-HTTP-DOC-LOG":
|
||||
return getCollHttpDocFilePath(dataJson);
|
||||
case "NTC-COLLECT-MAIL-LOG":
|
||||
return getCollMailFilePath(dataJson);
|
||||
case "NTC-COLLECT-FILE-LOG":
|
||||
// return getCollFileFilePath(dataJson);
|
||||
return "-";//由于这里Mail与Http都已经设置了传FilePath请求头,所以这里就没有设置再设置请求头,后期有需要可以加上
|
||||
case "NTC-COLLECT-TELNET-LOG":
|
||||
return getCollTelnetFilePath(dataJson);//20200325新增
|
||||
/**
|
||||
* 状态消息(监控消息)
|
||||
*/
|
||||
case "monitor-msg":
|
||||
return "-";
|
||||
case "INFLUX-SAPP-BPS-STAT-LOG":
|
||||
return "-";
|
||||
default:
|
||||
logger.error("GetFilePathByTopicUtils---There is a unknown topic! topic name is :" + topic);
|
||||
break;
|
||||
}
|
||||
return "-";
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取NTC-COLLECT-MAIL-LOG的FilePath
|
||||
*
|
||||
* @param dataJson
|
||||
* @return
|
||||
*/
|
||||
private static String getCollMailFilePath(String dataJson) {
|
||||
NTC_COLLECT_MAIL_LOG ntcCollectMailLog = JSONObject.parseObject(dataJson, NTC_COLLECT_MAIL_LOG.class);
|
||||
String filePathList = "[\"" + StringUtils.join(ntcCollectMailLog.getD_tag().getFile_path_list(), "\",\"") + "\"]";//新-20191222;//元素有双引号与转义\版本
|
||||
if (!("[\"\"]".equals(filePathList)) && StringUtil.isNotBlank(filePathList)) {
|
||||
return filePathList;
|
||||
} else {
|
||||
return "-";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取NTC-COLLECT-HTTP-DOC-LOG的FilePath
|
||||
* NTC-COLLECT-HTTP-AV-LOG复用NTC-COLLECT-HTTP-DOC-LOG的数据结构
|
||||
* @param dataJson
|
||||
* @return
|
||||
*/
|
||||
private static String getCollHttpDocFilePath(String dataJson) {
|
||||
NTC_COLLECT_HTTP_DOC_LOG ntcCollectHttpDocLog = JSONObject.parseObject(dataJson, NTC_COLLECT_HTTP_DOC_LOG.class);
|
||||
String filePathList = "[\"" + StringUtils.join(ntcCollectHttpDocLog.getD_tag().getFile_path_list(), "\",\"") + "\"]";//新-20191222;//元素有双引号与转义\版本
|
||||
if (!("[\"\"]".equals(filePathList)) && StringUtil.isNotBlank(filePathList)) {
|
||||
return filePathList;
|
||||
} else {
|
||||
return "-";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取NTC-COLLECT-FTP-DOC-LOG的FilePath
|
||||
*
|
||||
* @param dataJson
|
||||
* @return
|
||||
*/
|
||||
private static String getCollFtpDocFilePath(String dataJson) {
|
||||
NTC_COLLECT_FTP_DOC_LOG ntcCollectFtpDocLog = JSONObject.parseObject(dataJson, NTC_COLLECT_FTP_DOC_LOG.class);
|
||||
String filePathList = "[\"" + StringUtils.join(ntcCollectFtpDocLog.getD_tag().getFile_path_list(), "\",\"") + "\"]";
|
||||
if (!("[\"\"]".equals(filePathList)) && StringUtil.isNotBlank(filePathList)) {
|
||||
return filePathList;
|
||||
} else {
|
||||
return "-";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取NTC-COLLECT-TELNET-LOG的FilePath
|
||||
*
|
||||
* @param dataJson
|
||||
* @return
|
||||
*/
|
||||
private static String getCollTelnetFilePath(String dataJson) {
|
||||
NTC_COLLECT_TELNET_LOG ntcCollectTelnetLog = JSONObject.parseObject(dataJson, NTC_COLLECT_TELNET_LOG.class);
|
||||
String filePathList = "[\"" + StringUtils.join(ntcCollectTelnetLog.getD_tag().getFile_path_list(), "\",\"") + "\"]";
|
||||
if (!("[\"\"]".equals(filePathList)) && StringUtil.isNotBlank(filePathList)) {
|
||||
return filePathList;
|
||||
} else {
|
||||
return "-";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取NTC-COLLECT-FILE-LOG的FilePath
|
||||
*
|
||||
* @param dataJson
|
||||
* @return
|
||||
*/
|
||||
private static String getCollFileFilePath(String dataJson) {
|
||||
FILE_TAG_BEAN ntcCollectHttpDocLog = JSONObject.parseObject(dataJson, FILE_TAG_BEAN.class);
|
||||
String filePath = "[\"" + ntcCollectHttpDocLog.getFile_path() + "\"]";
|
||||
if (!("[\"\"]".equals(filePath)) && StringUtil.isNotBlank(filePath)) {
|
||||
return filePath;
|
||||
} else {
|
||||
return "-";
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,92 @@
|
||||
package cn.ac.iie.cusflume.sink.CommonUtils;
|
||||
|
||||
import cn.ac.iie.cusflume.sink.bean.fileBean.NTC_COLLECT_FTP_DOC_LOG;
|
||||
import cn.ac.iie.cusflume.sink.bean.fileBean.NTC_COLLECT_HTTP_DOC_LOG;
|
||||
import cn.ac.iie.cusflume.sink.bean.fileBean.NTC_COLLECT_MAIL_LOG;
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
|
||||
public class PutIdOnMsgByTopicUtils {
|
||||
private static Logger logger = Logger.getLogger(PutIdOnMsgByTopicUtils.class);
|
||||
|
||||
/**
|
||||
* 单条数据包含单个文件时使用
|
||||
*
|
||||
* @param topic
|
||||
* @param dataJson
|
||||
* @param fileId
|
||||
* @return
|
||||
*/
|
||||
public static String putIdOnMsgByTopicName(String topic, String dataJson, String fileId) {
|
||||
switch (topic) {
|
||||
// case "NTC-COLLECT-MAIL-LOG":
|
||||
// return compCollectMail();
|
||||
case "NTC-COLLECT-HTTP-DOC-LOG":
|
||||
return compCollectHttpDoc(topic, dataJson, fileId);
|
||||
case "NTC-COLLECT-FTP-DOC-LOG":
|
||||
return compCollectFtpDoc(topic, dataJson, fileId);
|
||||
default:
|
||||
logger.error("There is no corresponding topic! topic name is :" + topic);
|
||||
break;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* 单条数据包含多种文件时使用
|
||||
*
|
||||
* @param topic
|
||||
* @param dataJson
|
||||
* @param fileId
|
||||
* @param fileKind
|
||||
* @return
|
||||
*/
|
||||
public static String putIdOnMsgByTopicName(String topic, String dataJson, String fileId, String fileKind) {
|
||||
switch (topic) {
|
||||
case "NTC-COLLECT-MAIL-LOG":
|
||||
return compCollectMail(topic, dataJson, fileId, fileKind);
|
||||
default:
|
||||
logger.error("There is no corresponding topic! topic name is :" + topic);
|
||||
break;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static String compCollectMail(String topic, String dataJson, String fileId, String fileKind) {
|
||||
NTC_COLLECT_MAIL_LOG ntc_collect_mail_log = JSONObject.parseObject(dataJson, NTC_COLLECT_MAIL_LOG.class);
|
||||
if ("eml_file".equals(fileKind)) {
|
||||
ntc_collect_mail_log.setEml_file_id(fileId);
|
||||
} else if ("attachments".equals(fileKind)) {
|
||||
ntc_collect_mail_log.setAttachments_id(fileId);
|
||||
} else {
|
||||
logger.error("PutIdOnMsgByTopicUtils compCollectMail fileKind is unknown===>>>" + fileKind);
|
||||
}
|
||||
return JSONObject.toJSONString(ntc_collect_mail_log);
|
||||
}
|
||||
|
||||
|
||||
private static String compCollectFtpDoc(String topic, String dataJson, String fileId) {
|
||||
NTC_COLLECT_FTP_DOC_LOG ntc_collect_ftp_doc_log = JSONObject.parseObject(dataJson, NTC_COLLECT_FTP_DOC_LOG.class);
|
||||
// ntc_collect_ftp_doc_log.setRes_body_file_id(fileId);
|
||||
ntc_collect_ftp_doc_log.setContent_path(fileId);
|
||||
return JSONObject.toJSONString(ntc_collect_ftp_doc_log);
|
||||
}
|
||||
|
||||
|
||||
private static String compCollectHttpDoc(String topic, String dataJson, String fileId) {
|
||||
NTC_COLLECT_HTTP_DOC_LOG ntcCollectHttpDocLog = JSONObject.parseObject(dataJson, NTC_COLLECT_HTTP_DOC_LOG.class);
|
||||
if (StringUtils.isNotBlank(ntcCollectHttpDocLog.getReq_body_file())) {
|
||||
ntcCollectHttpDocLog.setReq_body_file_id(fileId);
|
||||
} else if (StringUtils.isNotBlank(ntcCollectHttpDocLog.getRes_body_file())) {
|
||||
ntcCollectHttpDocLog.setRes_body_file_id(fileId);
|
||||
} else {
|
||||
logger.error("PutIdOnMsgByTopicUtils there is no Req_body_file or Res_body_file in this message===>>>" + dataJson);
|
||||
}
|
||||
return JSONObject.toJSONString(ntcCollectHttpDocLog);
|
||||
}
|
||||
|
||||
/**---------------------------------删除的topic------------------------------------------------------------**/
|
||||
|
||||
}
|
||||
@@ -0,0 +1,260 @@
|
||||
package cn.ac.iie.cusflume.sink.HttpAsyncUtils;
|
||||
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.AvroMonitorTimerTask;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.DataCenterLoad;
|
||||
import cn.ac.iie.cusflume.sink.bean.configBean.ConfigInfo;
|
||||
import cn.ac.iie.cusflume.sink.daoUtils.KafkaDB;
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.ParseException;
|
||||
import org.apache.http.client.utils.HttpClientUtils;
|
||||
import org.apache.http.concurrent.FutureCallback;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 异步httpclient回调对象---GET文件
|
||||
* 执行成功,开始将get到的文件post至zx
|
||||
* 执行失败,重试3次
|
||||
*/
|
||||
public class AsyncHttpClientGetFileCallback implements FutureCallback<HttpResponse> {
|
||||
private static Logger LOG = Logger.getLogger(AsyncHttpClientGetFileCallback.class);
|
||||
|
||||
private KafkaDB kafkaDBAsyncGet;
|
||||
|
||||
private String postFileUrl;
|
||||
private String postMsgUrl;
|
||||
private String getFileUrl;
|
||||
private AsyncHttpClientPostFileCallback asyncHttpClientPostFileCallback;
|
||||
private ConfigInfo configInfo;
|
||||
private String sendMsg;//用于get文件失败后发往kafka
|
||||
private int count;
|
||||
|
||||
//暂未使用
|
||||
public AsyncHttpClientGetFileCallback(String postFileUrl, String postMsgUrl, String getFileUrl, AsyncHttpClientPostFileCallback asyncHttpClientPostFileCallback, int count) {
|
||||
this.postFileUrl = postFileUrl;
|
||||
this.postMsgUrl = postMsgUrl;
|
||||
this.getFileUrl = getFileUrl;
|
||||
this.asyncHttpClientPostFileCallback = asyncHttpClientPostFileCallback;
|
||||
this.count = count;//建议初始为0
|
||||
}
|
||||
|
||||
public AsyncHttpClientGetFileCallback(ConfigInfo configInfo, String getFileUrl, String sendMsg, AsyncHttpClientPostFileCallback asyncHttpClientPostFileCallback, int count) {
|
||||
this.configInfo = configInfo;
|
||||
this.postFileUrl = configInfo.getPostFileUrl();//通过configInfo赋值
|
||||
this.postMsgUrl = configInfo.getPostMsgUrl();//通过configInfo赋值
|
||||
this.getFileUrl = getFileUrl;
|
||||
this.sendMsg = sendMsg;
|
||||
this.asyncHttpClientPostFileCallback = asyncHttpClientPostFileCallback;
|
||||
this.count = count;//建议初始为0
|
||||
|
||||
//初始化入kafka程序
|
||||
kafkaDBAsyncGet = KafkaDB.getInstance();
|
||||
}
|
||||
|
||||
public String getPostFileUrl() {
|
||||
return postFileUrl;
|
||||
}
|
||||
|
||||
public void setPostFileUrl(String postFileUrl) {
|
||||
this.postFileUrl = postFileUrl;
|
||||
}
|
||||
|
||||
public String getPostMsgUrl() {
|
||||
return postMsgUrl;
|
||||
}
|
||||
|
||||
public void setPostMsgUrl(String postMsgUrl) {
|
||||
this.postMsgUrl = postMsgUrl;
|
||||
}
|
||||
|
||||
public String getGetFileUrl() {
|
||||
return getFileUrl;
|
||||
}
|
||||
|
||||
public void setGetFileUrl(String getFileUrl) {
|
||||
this.getFileUrl = getFileUrl;
|
||||
}
|
||||
|
||||
public AsyncHttpClientPostFileCallback getAsyncHttpClientPostFileCallback() {
|
||||
return asyncHttpClientPostFileCallback;
|
||||
}
|
||||
|
||||
public void setAsyncHttpClientPostFileCallback(AsyncHttpClientPostFileCallback asyncHttpClientPostFileCallback) {
|
||||
this.asyncHttpClientPostFileCallback = asyncHttpClientPostFileCallback;
|
||||
}
|
||||
|
||||
public ConfigInfo getConfigInfo() {
|
||||
return configInfo;
|
||||
}
|
||||
|
||||
public void setConfigInfo(ConfigInfo configInfo) {
|
||||
this.configInfo = configInfo;
|
||||
}
|
||||
|
||||
public String getSendMsg() {
|
||||
return sendMsg;
|
||||
}
|
||||
|
||||
public void setSendMsg(String sendMsg) {
|
||||
this.sendMsg = sendMsg;
|
||||
}
|
||||
|
||||
public int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(int count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AsyncHttpClientGetFileCallback{" +
|
||||
"postFileUrl='" + postFileUrl + '\'' +
|
||||
", postMsgUrl='" + postMsgUrl + '\'' +
|
||||
", getFileUrl='" + getFileUrl + '\'' +
|
||||
", asyncHttpClientPostFileCallback=" + asyncHttpClientPostFileCallback +
|
||||
", configInfo=" + configInfo +
|
||||
", sendMsg='" + sendMsg + '\'' +
|
||||
", count=" + count +
|
||||
'}';
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求完成后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void completed(HttpResponse response) {
|
||||
try {
|
||||
byte[] result = IOUtils.toByteArray(response.getEntity().getContent());
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode == 200) {
|
||||
asyncHttpClientPostFileCallback.setResultIs(result);//添加获取的文件流,用于post文件失败时重试使用
|
||||
LOG.info("AsyncHttpClientGetFileCallback completed,response status:{" + statusCode + "},get file success,post file to zx next.");
|
||||
|
||||
AvroMonitorTimerTask.fileReadyPostSum++;//文件获取流成功才会开始推送文件,计个数
|
||||
AvroMonitorTimerTask.fileBytesReadyPostSum += result.length;//文件获取流成功才会开始推送文件,计字节数
|
||||
|
||||
//文件获取成功后以流的形式开始发往总线
|
||||
HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), result, asyncHttpClientPostFileCallback);//失败时进入到的这个asyncHttpClientPostFileCallback内部是有文件流数据的
|
||||
} else if (statusCode == 404) {
|
||||
//文件未存在,重试
|
||||
retryFor404();
|
||||
} else {
|
||||
LOG.info("AsyncHttpClientGetFileCallback getFile failed,response status:{" + statusCode + "},this msg is===>" + sendMsg + "<===");
|
||||
}
|
||||
|
||||
HttpClientUtils.closeQuietly(response);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 由于404引起的重试,大概率是因为文件未完全生成
|
||||
*/
|
||||
private void retryFor404() {
|
||||
try {
|
||||
count++;
|
||||
if (count > 1) {
|
||||
LOG.warn("AsyncHttpClientGetFileCallback getFile is failed,statusCode is 404,retry count=" + count);
|
||||
}
|
||||
if (count > 4) {
|
||||
LOG.error("AsyncHttpClientGetFileCallback getFile is failed and already retry 3 times,statusCode is 404,This message is===>>>" + sendMsg + "<<<===");
|
||||
} else {
|
||||
Map map = JSONObject.parseObject(sendMsg, Map.class);
|
||||
int found_time = (int) map.get("found_time");
|
||||
long now_time = System.currentTimeMillis() / 1000;
|
||||
long timeDiffer = now_time - found_time;
|
||||
if (timeDiffer >= 20) {
|
||||
AsyncHttpClientGetFileCallback asyncHttpClientGetFileCallback = new AsyncHttpClientGetFileCallback(configInfo, getFileUrl, sendMsg, asyncHttpClientPostFileCallback, count);//注意此处count不再为0
|
||||
HttpClientUtil.getFileAndPostFile(getFileUrl, asyncHttpClientGetFileCallback);
|
||||
} else {
|
||||
Thread.sleep((20 - timeDiffer) * 1000);
|
||||
AsyncHttpClientGetFileCallback asyncHttpClientGetFileCallback = new AsyncHttpClientGetFileCallback(configInfo, getFileUrl, sendMsg, asyncHttpClientPostFileCallback, count);//注意此处count不再为0
|
||||
HttpClientUtil.getFileAndPostFile(getFileUrl, asyncHttpClientGetFileCallback);
|
||||
}
|
||||
}
|
||||
} catch (Exception e2) {
|
||||
LOG.error("AsyncHttpClientGetFileCallback retryFor404 getFile retry is error===>>>" + e2);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求取消后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void cancelled() {
|
||||
LOG.warn("request is cancelled...");
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求失败后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void failed(Exception e) {
|
||||
retryForFailed(e);
|
||||
}
|
||||
|
||||
/**
|
||||
* 由于请求失败引起的重试
|
||||
*
|
||||
* @param e
|
||||
*/
|
||||
private void retryForFailed(Exception e) {
|
||||
try {
|
||||
count++;
|
||||
if (count > 1) {
|
||||
LOG.warn("AsyncHttpClientGetFileCallback getFile is failed,retry count=" + count);
|
||||
}
|
||||
if (count > 4) {
|
||||
LOG.error("AsyncHttpClientGetFileCallback getFile is failed and already retry 3 times.error===>>>" + e + "<<<===.This message is===>>>" + sendMsg + "<<<===");
|
||||
} else {
|
||||
Map map = JSONObject.parseObject(sendMsg, Map.class);
|
||||
int found_time = (int) map.get("found_time");
|
||||
long now_time = System.currentTimeMillis() / 1000;
|
||||
long timeDiffer = now_time - found_time;
|
||||
if (timeDiffer >= 20) {
|
||||
AsyncHttpClientGetFileCallback asyncHttpClientGetFileCallback = new AsyncHttpClientGetFileCallback(configInfo, getFileUrl, sendMsg, asyncHttpClientPostFileCallback, count);//注意此处count不再为0
|
||||
HttpClientUtil.getFileAndPostFile(getFileUrl, asyncHttpClientGetFileCallback);
|
||||
} else {
|
||||
Thread.sleep((20 - timeDiffer) * 1000);
|
||||
AsyncHttpClientGetFileCallback asyncHttpClientGetFileCallback = new AsyncHttpClientGetFileCallback(configInfo, getFileUrl, sendMsg, asyncHttpClientPostFileCallback, count);//注意此处count不再为0
|
||||
HttpClientUtil.getFileAndPostFile(getFileUrl, asyncHttpClientGetFileCallback);
|
||||
}
|
||||
}
|
||||
} catch (Exception e2) {
|
||||
LOG.error("AsyncHttpClientGetFileCallback retryForFailed getFile retry is error===>>>" + e2);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected String getHttpContent(HttpResponse response) {
|
||||
HttpEntity entity = response.getEntity();
|
||||
String body = null;
|
||||
|
||||
if (entity == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
body = EntityUtils.toString(entity, "utf-8");
|
||||
} catch (ParseException e) {
|
||||
LOG.warn("the response's content inputstream is corrupt", e);
|
||||
} catch (IOException e) {
|
||||
LOG.warn("the response's content inputstream is corrupt", e);
|
||||
}
|
||||
return body;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,293 @@
|
||||
package cn.ac.iie.cusflume.sink.HttpAsyncUtils;
|
||||
|
||||
//import com.google.api.client.util.Lists;
|
||||
//import com.mesa.miniotohttp.tools.OssUtil;
|
||||
//import com.mesa.miniotohttp.tools.TimeTask;
|
||||
|
||||
import cn.ac.iie.cusflume.sink.CommonUtils.PutIdOnMsgByTopicUtils;
|
||||
import cn.ac.iie.cusflume.sink.HttpAsyncUtils.ybBean.PutFileInfo;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.AvroMonitorTimerTask;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.DataCenterLoad;
|
||||
import cn.ac.iie.cusflume.sink.bean.configBean.ConfigInfo;
|
||||
import cn.ac.iie.cusflume.sink.bean.postFileBean.PostFileResBody;
|
||||
import cn.ac.iie.cusflume.sink.bean.producer.Res.ProResBody;
|
||||
import cn.ac.iie.cusflume.sink.daoUtils.KafkaDB;
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.ParseException;
|
||||
import org.apache.http.client.utils.HttpClientUtils;
|
||||
import org.apache.http.concurrent.FutureCallback;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 异步httpclient回调对象---POST文件
|
||||
* 在get文件成功后开始执行post文件操作
|
||||
* post文件成功时,获取到文件id加入到message中,开始执行post消息操作,
|
||||
* 为此需要message(kafka获取)以及post消息url(配置文件获取),
|
||||
* 还需要User-Agent以及X-Tag用于验证
|
||||
* <p>
|
||||
* post失败时,重试三次
|
||||
*/
|
||||
public class AsyncHttpClientPostFileCallback implements FutureCallback<HttpResponse> {
|
||||
private static Logger LOG = Logger.getLogger(AsyncHttpClientPostFileCallback.class);
|
||||
|
||||
private DataCenterLoad dclAsyncPost;
|
||||
private KafkaDB kafkaDBAsyncPost;
|
||||
|
||||
private String postFileUrl;
|
||||
private String postMsgUrl;
|
||||
private byte[] resultIs;//get到的文件流,用于post文件失败时重试-字节数组版本
|
||||
private ConfigInfo configInfo;//1.存储post文件,信息url(配置文件获取);2.存储入库前验证文件,信息url(配置文件获取);3.存储文件,信息SessionCookie(请求总线获取);4.存储msg作为avro入库所需的topicName,userAgent,xTag,batchSize
|
||||
|
||||
private String sendMsg;//用于post成功后补全id发往zx或者post失败发往kafka
|
||||
|
||||
private PostFileResBody postFileResBody;//存储post文件成功后返回的响应信息,内部包含文件id,key名为id
|
||||
private int count;
|
||||
|
||||
private PutFileInfo putFileInfo;//用于存放发送文件时的需要携带的相关信息
|
||||
|
||||
public AsyncHttpClientPostFileCallback(ConfigInfo configInfo, String sendMsg, int count) {
|
||||
this.configInfo = configInfo;
|
||||
this.postFileUrl = configInfo.getPostFileUrl();
|
||||
this.postMsgUrl = configInfo.getPostMsgUrl();
|
||||
this.sendMsg = sendMsg;//存放对应于url的数据
|
||||
this.count = count;
|
||||
|
||||
//初始化入库程序
|
||||
dclAsyncPost = new DataCenterLoad();
|
||||
kafkaDBAsyncPost = KafkaDB.getInstance();
|
||||
}
|
||||
|
||||
//暂未使用
|
||||
public AsyncHttpClientPostFileCallback(String postFileUrl, String postMsgUrl, int count) {
|
||||
this.postFileUrl = postFileUrl;
|
||||
this.postMsgUrl = postMsgUrl;
|
||||
this.count = count;
|
||||
|
||||
//初始化入库程序
|
||||
dclAsyncPost = new DataCenterLoad();
|
||||
kafkaDBAsyncPost = KafkaDB.getInstance();
|
||||
}
|
||||
|
||||
//暂未使用
|
||||
public AsyncHttpClientPostFileCallback(String postFileUrl, String postMsgUrl, byte[] resultIs, int count) {
|
||||
this.postFileUrl = postFileUrl;
|
||||
this.postMsgUrl = postMsgUrl;
|
||||
this.resultIs = resultIs;
|
||||
this.count = count;
|
||||
|
||||
//初始化入库程序
|
||||
dclAsyncPost = new DataCenterLoad();
|
||||
kafkaDBAsyncPost = KafkaDB.getInstance();
|
||||
}
|
||||
|
||||
public String getPostFileUrl() {
|
||||
return postFileUrl;
|
||||
}
|
||||
|
||||
public void setPostFileUrl(String postFileUrl) {
|
||||
this.postFileUrl = postFileUrl;
|
||||
}
|
||||
|
||||
public String getPostMsgUrl() {
|
||||
return postMsgUrl;
|
||||
}
|
||||
|
||||
public void setPostMsgUrl(String postMsgUrl) {
|
||||
this.postMsgUrl = postMsgUrl;
|
||||
}
|
||||
|
||||
public byte[] getResultIs() {
|
||||
return resultIs;
|
||||
}
|
||||
|
||||
public void setResultIs(byte[] resultIs) {
|
||||
this.resultIs = resultIs;
|
||||
}
|
||||
|
||||
public ConfigInfo getConfigInfo() {
|
||||
return configInfo;
|
||||
}
|
||||
|
||||
public void setConfigInfo(ConfigInfo configInfo) {
|
||||
this.configInfo = configInfo;
|
||||
}
|
||||
|
||||
public String getSendMsg() {
|
||||
return sendMsg;
|
||||
}
|
||||
|
||||
public void setSendMsg(String sendMsg) {
|
||||
this.sendMsg = sendMsg;
|
||||
}
|
||||
|
||||
public PostFileResBody getPostFileResBody() {
|
||||
return postFileResBody;
|
||||
}
|
||||
|
||||
public void setPostFileResBody(PostFileResBody postFileResBody) {
|
||||
this.postFileResBody = postFileResBody;
|
||||
}
|
||||
|
||||
public int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(int count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AsyncHttpClientPostFileCallback{" +
|
||||
"postFileUrl='" + postFileUrl + '\'' +
|
||||
", postMsgUrl='" + postMsgUrl + '\'' +
|
||||
", configInfo=" + configInfo +
|
||||
", sendMsg='" + sendMsg + '\'' +
|
||||
", postFileResBody=" + postFileResBody +
|
||||
", count=" + count +
|
||||
'}';
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求完成后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void completed(HttpResponse response) {
|
||||
try {
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode == 200) {
|
||||
LOG.info("AsyncHttpClientPostFileCallback completed,post file statuscode is:{" + statusCode + "},now start to send message to zx.");
|
||||
//post文件成功后可以获取文件id
|
||||
String postResBody = getHttpContent(response);
|
||||
System.out.println("Post File to zx resBody====>" + postResBody);
|
||||
if (StringUtils.isNotBlank(postResBody)) {
|
||||
this.postFileResBody = JSONObject.parseObject(postResBody, PostFileResBody.class);
|
||||
String fileId = postFileResBody.getMsg();//获取文件id----新---20191115---返回体格式:{"code": "200", "msg": "2019111507094164188567821505", "data": "不需要额外数据"}
|
||||
AvroMonitorTimerTask.fileSuccessSum++;//成功获取到ID的文件计数,即成功推送的文件数
|
||||
|
||||
AvroMonitorTimerTask.fileBytesSuccessSum += resultIs.length;//成功获取到ID的文件计字节数,即成功推送的文件字节数
|
||||
|
||||
this.sendMsg = PutIdOnMsgByTopicUtils.putIdOnMsgByTopicName(configInfo.getTopicName(), sendMsg, fileId);//补全数据
|
||||
|
||||
AvroMonitorTimerTask.msgReadyPostSum++;//一个文件对应一条消息
|
||||
//往总线发送
|
||||
ProResBody proResBody = dclAsyncPost.avroDataLoad(configInfo.getPostMsgUrl(), configInfo.getTopicName(), sendMsg, configInfo.getBatchSize(), configInfo.getUserAgent(), configInfo.getMsgSessionCookie());
|
||||
LOG.info("Send message with fileId to zx over,this responseBody is===>" + proResBody.toString());
|
||||
} else {
|
||||
LOG.warn("AsyncHttpClientPostFileCallback post file success but postResBody(response body) is null.");
|
||||
}
|
||||
} else if (statusCode == 403) {//空文件,不再重试发送
|
||||
LOG.error("AsyncHttpClientPostFileCallback post zxFile statusCode is 403 so get the fileIs but this minio file is empty.This message is===>>>" + sendMsg + "<<<===");
|
||||
} else {
|
||||
RetryAsyncHttpClientPostFileFailed(statusCode);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOG.error("AsyncHttpClientPostFileCallback post zxFile or send finalMsg is error .This message is===>>>" + sendMsg + "<<<===");
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
HttpClientUtils.closeQuietly(response);
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求取消后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void cancelled() {
|
||||
LOG.info("AsyncHttpClientPostFileCallback is cancelled... ...");
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求失败后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void failed(Exception e) {
|
||||
//重试
|
||||
RetryAsyncHttpClientPostFileFailed(e);
|
||||
}
|
||||
|
||||
/**
|
||||
* failed被调用时的重试
|
||||
*
|
||||
* @param e
|
||||
*/
|
||||
private void RetryAsyncHttpClientPostFileFailed(Exception e) {
|
||||
try {
|
||||
count++;
|
||||
if (count > 1) {
|
||||
LOG.warn("AsyncHttpClientPostFileCallback post zxFile is failed,retry count=" + count);
|
||||
}
|
||||
if (count > 4) {
|
||||
LOG.error("AsyncHttpClientPostFileCallback post zxFile is failed and already retry 3 times.error===>>>" + e + "<<<===.This message is===>>>" + sendMsg + "<<<===");
|
||||
} else {
|
||||
if (configInfo != null && StringUtils.isNotBlank(sendMsg) && resultIs != null) {
|
||||
//重试发送文件
|
||||
AsyncHttpClientPostFileCallback asyncHttpClientPostFileCallback = new AsyncHttpClientPostFileCallback(configInfo, sendMsg, count);//注意此处count不再为0
|
||||
//重试时也是以之前存储的流的形式发往总线
|
||||
HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), resultIs, asyncHttpClientPostFileCallback);
|
||||
}
|
||||
}
|
||||
} catch (Exception e2) {
|
||||
LOG.error("AsyncHttpClientGetFileCallback retry is error===>>>" + e2);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 流不为空但文件异常情况下的重试
|
||||
*
|
||||
* @param statusCode
|
||||
*/
|
||||
private void RetryAsyncHttpClientPostFileFailed(int statusCode) {
|
||||
try {
|
||||
count++;
|
||||
if (count > 1) {
|
||||
LOG.warn("AsyncHttpClientPostFileCallback post zxFile statusCode is abnormal,retry count=" + count);
|
||||
}
|
||||
if (count > 4) {
|
||||
LOG.error("AsyncHttpClientPostFileCallback post zxFile statusCode is abnormal and already retry 3 times.statusCode is{" + statusCode + "}.This message is===>>>" + sendMsg + "<<<===");
|
||||
} else {
|
||||
if (configInfo != null && StringUtils.isNotBlank(sendMsg) && resultIs != null) {
|
||||
//重试发送文件
|
||||
AsyncHttpClientPostFileCallback asyncHttpClientPostFileCallback = new AsyncHttpClientPostFileCallback(configInfo, sendMsg, count);//注意此处count不再为0
|
||||
//重试时也是以之前存储的流的形式发往总线
|
||||
HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), resultIs, asyncHttpClientPostFileCallback);
|
||||
}
|
||||
}
|
||||
} catch (Exception e2) {
|
||||
LOG.error("AsyncHttpClientGetFileCallback retry is error===>>>" + e2);
|
||||
}
|
||||
}
|
||||
|
||||
protected String getHttpContent(HttpResponse response) {
|
||||
|
||||
HttpEntity entity = response.getEntity();
|
||||
String body = null;
|
||||
if (entity == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
body = EntityUtils.toString(entity, "utf-8");
|
||||
} catch (ParseException e) {
|
||||
LOG.warn("the response's content inputstream is corrupt", e);
|
||||
} catch (IOException e) {
|
||||
LOG.warn("the response's content inputstream is corrupt", e);
|
||||
}
|
||||
return body;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,195 @@
|
||||
package cn.ac.iie.cusflume.sink.HttpAsyncUtils;
|
||||
|
||||
import cn.ac.iie.cusflume.sink.daoUtils.RealtimeCountConfig;
|
||||
import org.apache.http.Consts;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.auth.AuthSchemeProvider;
|
||||
import org.apache.http.auth.AuthScope;
|
||||
import org.apache.http.auth.MalformedChallengeException;
|
||||
import org.apache.http.auth.UsernamePasswordCredentials;
|
||||
import org.apache.http.client.CredentialsProvider;
|
||||
import org.apache.http.client.config.AuthSchemes;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.config.ConnectionConfig;
|
||||
import org.apache.http.config.Lookup;
|
||||
import org.apache.http.config.Registry;
|
||||
import org.apache.http.config.RegistryBuilder;
|
||||
import org.apache.http.conn.ssl.NoopHostnameVerifier;
|
||||
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
|
||||
import org.apache.http.conn.ssl.SSLContexts;
|
||||
import org.apache.http.impl.auth.*;
|
||||
import org.apache.http.impl.client.BasicCookieStore;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
|
||||
import org.apache.http.impl.nio.client.HttpAsyncClients;
|
||||
import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager;
|
||||
import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor;
|
||||
import org.apache.http.impl.nio.reactor.IOReactorConfig;
|
||||
import org.apache.http.nio.conn.NoopIOSessionStrategy;
|
||||
import org.apache.http.nio.conn.SchemeIOSessionStrategy;
|
||||
import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy;
|
||||
import org.apache.http.nio.reactor.ConnectingIOReactor;
|
||||
import org.apache.http.nio.reactor.IOReactorException;
|
||||
|
||||
import javax.net.ssl.SSLContext;
|
||||
import javax.net.ssl.TrustManager;
|
||||
import javax.net.ssl.X509TrustManager;
|
||||
import java.nio.charset.CodingErrorAction;
|
||||
import java.security.KeyManagementException;
|
||||
import java.security.KeyStoreException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.UnrecoverableKeyException;
|
||||
import java.security.cert.CertificateException;
|
||||
import java.security.cert.X509Certificate;
|
||||
|
||||
/**
|
||||
* 异步的HTTP请求对象,可设置代理
|
||||
*/
|
||||
public class HttpAsyncClient {
|
||||
|
||||
/*
|
||||
private static int socketTimeout = 60000;//设置等待数据超时时间60秒钟 根据业务调整
|
||||
private static int connectTimeout = 60000;//连接超时
|
||||
private static int poolSize = 5000;//连接池最大连接数
|
||||
private static int maxPerRoute = 2500;//每个主机的并发最多只有1500
|
||||
private static int connectionRequestTimeout = 90000; //从连接池中后去连接的timeout时间
|
||||
*/
|
||||
private static int socketTimeout = RealtimeCountConfig.HTTP_ASYNC_SOCKETTIMEOUT;//设置等待数据超时时间60秒钟 根据业务调整
|
||||
private static int connectTimeout = RealtimeCountConfig.HTTP_ASYNC_CONNECTTIMEOUT;//连接超时
|
||||
private static int poolSize = RealtimeCountConfig.HTTP_ASYNC_POOLSIZE;//连接池最大连接数
|
||||
private static int maxPerRoute = RealtimeCountConfig.HTTP_ASYNC_MAXPERROUTE;//每个主机的并发最多只有1500
|
||||
private static int connectionRequestTimeout = RealtimeCountConfig.HTTP_ASYNC_CONNECTIONREQUESTTIMEOUT; //从连接池中后去连接的timeout时间
|
||||
|
||||
// http代理相关参数
|
||||
private String host = "";
|
||||
private int port = 0;
|
||||
private String username = "";
|
||||
private String password = "";
|
||||
|
||||
// 异步httpclient
|
||||
private CloseableHttpAsyncClient asyncHttpClient;
|
||||
|
||||
// 异步加代理的httpclient
|
||||
private CloseableHttpAsyncClient proxyAsyncHttpClient;
|
||||
|
||||
public HttpAsyncClient() {
|
||||
try {
|
||||
this.asyncHttpClient = createAsyncClient(false);
|
||||
this.proxyAsyncHttpClient = createAsyncClient(true);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* 新版createAsyncClient(boolean proxy)---20200425注释
|
||||
* @param proxy
|
||||
* @return
|
||||
* @throws KeyManagementException
|
||||
* @throws UnrecoverableKeyException
|
||||
* @throws NoSuchAlgorithmException
|
||||
* @throws KeyStoreException
|
||||
* @throws MalformedChallengeException
|
||||
* @throws IOReactorException
|
||||
*/
|
||||
public CloseableHttpAsyncClient createAsyncClient(boolean proxy)
|
||||
throws KeyManagementException, UnrecoverableKeyException,
|
||||
NoSuchAlgorithmException, KeyStoreException,
|
||||
MalformedChallengeException, IOReactorException {
|
||||
|
||||
RequestConfig requestConfig = RequestConfig.custom()
|
||||
.setConnectionRequestTimeout(connectionRequestTimeout)
|
||||
.setConnectTimeout(connectTimeout)
|
||||
.setSocketTimeout(socketTimeout).build();
|
||||
|
||||
SSLContext sslcontext = SSLContext.getInstance(SSLConnectionSocketFactory.TLS);
|
||||
X509TrustManager tm = new X509TrustManager() {
|
||||
@Override
|
||||
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public X509Certificate[] getAcceptedIssuers() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
sslcontext.init(null, new TrustManager[]{tm}, null);
|
||||
|
||||
|
||||
UsernamePasswordCredentials credentials = new UsernamePasswordCredentials(
|
||||
username, password);
|
||||
|
||||
CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
|
||||
credentialsProvider.setCredentials(AuthScope.ANY, credentials);
|
||||
|
||||
// 设置协议http和https对应的处理socket链接工厂的对象
|
||||
Registry<SchemeIOSessionStrategy> sessionStrategyRegistry = RegistryBuilder
|
||||
.<SchemeIOSessionStrategy> create()
|
||||
.register("http", NoopIOSessionStrategy.INSTANCE)
|
||||
.register("https", new SSLIOSessionStrategy(sslcontext, NoopHostnameVerifier.INSTANCE))
|
||||
.build();
|
||||
|
||||
// 配置io线程
|
||||
IOReactorConfig ioReactorConfig = IOReactorConfig.custom().setSoKeepAlive(false).setTcpNoDelay(true)
|
||||
.setIoThreadCount(Runtime.getRuntime().availableProcessors())
|
||||
.build();
|
||||
// 设置连接池大小
|
||||
ConnectingIOReactor ioReactor;
|
||||
ioReactor = new DefaultConnectingIOReactor(ioReactorConfig);
|
||||
PoolingNHttpClientConnectionManager conMgr = new PoolingNHttpClientConnectionManager(
|
||||
ioReactor, null, sessionStrategyRegistry, null);
|
||||
|
||||
if (poolSize > 0) {
|
||||
conMgr.setMaxTotal(poolSize);
|
||||
}
|
||||
|
||||
if (maxPerRoute > 0) {
|
||||
conMgr.setDefaultMaxPerRoute(maxPerRoute);
|
||||
} else {
|
||||
conMgr.setDefaultMaxPerRoute(10);
|
||||
}
|
||||
|
||||
ConnectionConfig connectionConfig = ConnectionConfig.custom()
|
||||
.setMalformedInputAction(CodingErrorAction.IGNORE)
|
||||
.setUnmappableInputAction(CodingErrorAction.IGNORE)
|
||||
.setCharset(Consts.UTF_8).build();
|
||||
|
||||
Lookup<AuthSchemeProvider> authSchemeRegistry = RegistryBuilder
|
||||
.<AuthSchemeProvider> create()
|
||||
.register(AuthSchemes.BASIC, new BasicSchemeFactory())
|
||||
.register(AuthSchemes.DIGEST, new DigestSchemeFactory())
|
||||
.register(AuthSchemes.NTLM, new NTLMSchemeFactory())
|
||||
.register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory())
|
||||
.register(AuthSchemes.KERBEROS, new KerberosSchemeFactory())
|
||||
.build();
|
||||
conMgr.setDefaultConnectionConfig(connectionConfig);
|
||||
|
||||
if (proxy) {
|
||||
return HttpAsyncClients.custom().setConnectionManager(conMgr)
|
||||
.setDefaultCredentialsProvider(credentialsProvider)
|
||||
.setDefaultAuthSchemeRegistry(authSchemeRegistry)
|
||||
.setProxy(new HttpHost(host, port))
|
||||
.setDefaultCookieStore(new BasicCookieStore())
|
||||
.setDefaultRequestConfig(requestConfig).build();
|
||||
} else {
|
||||
return HttpAsyncClients.custom().setConnectionManager(conMgr)
|
||||
.setDefaultCredentialsProvider(credentialsProvider)
|
||||
.setDefaultAuthSchemeRegistry(authSchemeRegistry)
|
||||
.setDefaultCookieStore(new BasicCookieStore()).build();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public CloseableHttpAsyncClient getAsyncHttpClient() {
|
||||
return asyncHttpClient;
|
||||
}
|
||||
|
||||
public CloseableHttpAsyncClient getProxyAsyncHttpClient() {
|
||||
return proxyAsyncHttpClient;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
package cn.ac.iie.cusflume.sink.HttpAsyncUtils;
|
||||
|
||||
/**
|
||||
*
|
||||
* httpclient 工厂类
|
||||
* */
|
||||
public class HttpClientFactory {
|
||||
|
||||
private static HttpAsyncClient httpAsyncClient = new HttpAsyncClient();
|
||||
|
||||
private static HttpSyncClient httpSyncClient = new HttpSyncClient();
|
||||
|
||||
private static OkClient okClient = new OkClient();
|
||||
|
||||
private HttpClientFactory() {
|
||||
}
|
||||
|
||||
private static HttpClientFactory httpClientFactory = new HttpClientFactory();
|
||||
|
||||
public static HttpClientFactory getInstance() {
|
||||
|
||||
return httpClientFactory;
|
||||
|
||||
}
|
||||
|
||||
protected HttpAsyncClient getHttpAsyncClientPool() {
|
||||
return httpAsyncClient;
|
||||
}
|
||||
|
||||
protected HttpSyncClient getHttpSyncClientPool() {
|
||||
return httpSyncClient;
|
||||
}
|
||||
|
||||
protected OkClient getOkClientPool() {
|
||||
return okClient;
|
||||
}
|
||||
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,82 @@
|
||||
package cn.ac.iie.cusflume.sink.HttpAsyncUtils;
|
||||
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.config.Registry;
|
||||
import org.apache.http.config.RegistryBuilder;
|
||||
import org.apache.http.config.SocketConfig;
|
||||
import org.apache.http.conn.socket.ConnectionSocketFactory;
|
||||
import org.apache.http.conn.socket.PlainConnectionSocketFactory;
|
||||
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
|
||||
import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
||||
import org.apache.http.ssl.SSLContexts;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import javax.net.ssl.HostnameVerifier;
|
||||
import javax.net.ssl.SSLContext;
|
||||
import java.security.KeyManagementException;
|
||||
import java.security.KeyStoreException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
|
||||
/**
|
||||
* 同步的HTTP请求对象,支持post与get方法以及可设置代理
|
||||
*/
|
||||
public class HttpSyncClient {
|
||||
|
||||
private static Logger logger = Logger.getLogger(HttpSyncClient.class);
|
||||
|
||||
|
||||
private PoolingHttpClientConnectionManager poolConnManager;
|
||||
private final int maxTotalPool = 2000;// 连接池最大连接数
|
||||
private final int maxConPerRoute = 200;// 每个主机的并发最多只有20
|
||||
private final int socketTimeout = 20000;// 设置等待数据超时时间5秒钟 根据业务调整
|
||||
private final int connectionRequestTimeout = 30000;
|
||||
private final int connectTimeout = 20000;// 连接超时
|
||||
|
||||
// 同步httpclient
|
||||
private CloseableHttpClient httpClient;
|
||||
|
||||
public HttpSyncClient() {
|
||||
try {
|
||||
this.httpClient = init();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public CloseableHttpClient init() throws KeyStoreException, NoSuchAlgorithmException, KeyManagementException {
|
||||
|
||||
SSLContext sslcontext = SSLContexts.custom().loadTrustMaterial(null,
|
||||
new TrustSelfSignedStrategy())
|
||||
.build();
|
||||
HostnameVerifier hostnameVerifier = SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER;
|
||||
SSLConnectionSocketFactory sslsf = new SSLConnectionSocketFactory(
|
||||
sslcontext, hostnameVerifier);
|
||||
Registry<ConnectionSocketFactory> socketFactoryRegistry = RegistryBuilder.<ConnectionSocketFactory>create()
|
||||
.register("http", PlainConnectionSocketFactory.getSocketFactory())
|
||||
.register("https", sslsf)
|
||||
.build();
|
||||
poolConnManager = new PoolingHttpClientConnectionManager(socketFactoryRegistry);
|
||||
// Increase max total connection to 200
|
||||
poolConnManager.setMaxTotal(maxTotalPool);
|
||||
// Increase default max connection per route to 20
|
||||
poolConnManager.setDefaultMaxPerRoute(maxConPerRoute);
|
||||
SocketConfig socketConfig = SocketConfig.custom().setSoTimeout(socketTimeout).build();
|
||||
poolConnManager.setDefaultSocketConfig(socketConfig);
|
||||
|
||||
RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(connectionRequestTimeout)
|
||||
.setConnectTimeout(connectTimeout).setSocketTimeout(socketTimeout).build();
|
||||
CloseableHttpClient httpClient = HttpClients.custom()
|
||||
.setConnectionManager(poolConnManager).setDefaultRequestConfig(requestConfig).build();
|
||||
if (poolConnManager != null && poolConnManager.getTotalStats() != null) {
|
||||
logger.info("now client pool " + poolConnManager.getTotalStats().toString());
|
||||
}
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
public CloseableHttpClient getHttpClient() {
|
||||
return httpClient;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
package cn.ac.iie.cusflume.sink.HttpAsyncUtils;
|
||||
|
||||
import okhttp3.OkHttpClient;
|
||||
|
||||
public class OkClient {
|
||||
|
||||
private static OkHttpClient client = new OkHttpClient();
|
||||
|
||||
public OkHttpClient getHttpClient(){
|
||||
return client;
|
||||
}
|
||||
|
||||
String bowlingJson(String player1, String player2) {
|
||||
return "{'winCondition':'HIGH_SCORE',"
|
||||
+ "'name':'Bowling',"
|
||||
+ "'round':4,"
|
||||
+ "'lastSaved':1367702411696,"
|
||||
+ "'dateStarted':1367702378785,"
|
||||
+ "'players':["
|
||||
+ "{'name':'" + player1 + "','history':[10,8,6,7,8],'color':-13388315,'total':39},"
|
||||
+ "{'name':'" + player2 + "','history':[6,10,5,10,10],'color':-48060,'total':41}"
|
||||
+ "]}";
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
@@ -0,0 +1,364 @@
|
||||
package cn.ac.iie.cusflume.sink.HttpAsyncUtils.mail;
|
||||
|
||||
import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.AvroMonitorTimerTask;
|
||||
import cn.ac.iie.cusflume.sink.bean.configBean.ConfigInfo;
|
||||
import cn.ac.iie.cusflume.sink.daoUtils.KafkaDB;
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.ParseException;
|
||||
import org.apache.http.client.utils.HttpClientUtils;
|
||||
import org.apache.http.concurrent.FutureCallback;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 用于处理 NTC_COLLECT_MAIL_LOG
|
||||
* 异步httpclient回调对象---GET文件
|
||||
* 执行成功,开始将get到的文件post至zx
|
||||
* 执行失败,重试3次
|
||||
*/
|
||||
public class AsyncGetMailFilesCallback implements FutureCallback<HttpResponse> {
|
||||
private static Logger LOG = Logger.getLogger(AsyncGetMailFilesCallback.class);
|
||||
|
||||
private KafkaDB kafkaDBAsyncGet;
|
||||
|
||||
private String postFileUrl;
|
||||
private String postMsgUrl;
|
||||
private String getFileUrl;//此处初始用于存放eml_file_url,多文件的url存放在attachmentsUrl,先处理eml_file_url成功后再处理attachmentsUrl
|
||||
private AsyncPostMailFilesCallback asyncPostMailFilesCallback;
|
||||
private ConfigInfo configInfo;
|
||||
private String sendMsg;//用于get文件失败后发往kafka
|
||||
private int count;//重试次数,初始为0
|
||||
|
||||
//为MAIL新增
|
||||
private int dealUrlCount;//已处理计数,初始为0
|
||||
private int urlCount;//需要处理的总数
|
||||
private LinkedList<String> attachmentsUrl;//需要处理的attachmentsUrl
|
||||
|
||||
//初始调用时count=0,dealUrlCount=0,urlCount=1+attachmentsUrl.size()
|
||||
//后续调用count=0,dealUrlCount=0按照情况增加,urlCount不变
|
||||
public AsyncGetMailFilesCallback(ConfigInfo configInfo, String getFileUrl, String sendMsg, AsyncPostMailFilesCallback asyncPostMailFilesCallback, int count, int urlCount, int dealUrlCount, LinkedList<String> attachmentsUrl) {
|
||||
this.configInfo = configInfo;
|
||||
this.postFileUrl = configInfo.getPostFileUrl();//通过configInfo赋值
|
||||
this.postMsgUrl = configInfo.getPostMsgUrl();//通过configInfo赋值
|
||||
this.getFileUrl = getFileUrl;//初次存储的是eml_file_url,后续每次存储的都是当次需要请求的文件路径
|
||||
this.sendMsg = sendMsg;
|
||||
this.asyncPostMailFilesCallback = asyncPostMailFilesCallback;
|
||||
this.count = count;//建议初始为0
|
||||
this.urlCount = urlCount;
|
||||
this.dealUrlCount = dealUrlCount;//建议初始为0
|
||||
this.attachmentsUrl = attachmentsUrl;
|
||||
|
||||
// //初始化入kafka程序
|
||||
// kafkaDBAsyncGet = KafkaDB.getInstance();
|
||||
}
|
||||
|
||||
public String getPostFileUrl() {
|
||||
return postFileUrl;
|
||||
}
|
||||
|
||||
public void setPostFileUrl(String postFileUrl) {
|
||||
this.postFileUrl = postFileUrl;
|
||||
}
|
||||
|
||||
public String getPostMsgUrl() {
|
||||
return postMsgUrl;
|
||||
}
|
||||
|
||||
public void setPostMsgUrl(String postMsgUrl) {
|
||||
this.postMsgUrl = postMsgUrl;
|
||||
}
|
||||
|
||||
public String getGetFileUrl() {
|
||||
return getFileUrl;
|
||||
}
|
||||
|
||||
public void setGetFileUrl(String getFileUrl) {
|
||||
this.getFileUrl = getFileUrl;
|
||||
}
|
||||
|
||||
public AsyncPostMailFilesCallback getAsyncPostMailFilesCallback() {
|
||||
return asyncPostMailFilesCallback;
|
||||
}
|
||||
|
||||
public void setAsyncPostMailFilesCallback(AsyncPostMailFilesCallback asyncPostMailFilesCallback) {
|
||||
this.asyncPostMailFilesCallback = asyncPostMailFilesCallback;
|
||||
}
|
||||
|
||||
public ConfigInfo getConfigInfo() {
|
||||
return configInfo;
|
||||
}
|
||||
|
||||
public void setConfigInfo(ConfigInfo configInfo) {
|
||||
this.configInfo = configInfo;
|
||||
}
|
||||
|
||||
public String getSendMsg() {
|
||||
return sendMsg;
|
||||
}
|
||||
|
||||
public void setSendMsg(String sendMsg) {
|
||||
this.sendMsg = sendMsg;
|
||||
}
|
||||
|
||||
public int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(int count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public int getDealUrlCount() {
|
||||
return dealUrlCount;
|
||||
}
|
||||
|
||||
public void setDealUrlCount(int dealUrlCount) {
|
||||
this.dealUrlCount = dealUrlCount;
|
||||
}
|
||||
|
||||
public int getUrlCount() {
|
||||
return urlCount;
|
||||
}
|
||||
|
||||
public void setUrlCount(int urlCount) {
|
||||
this.urlCount = urlCount;
|
||||
}
|
||||
|
||||
public LinkedList<String> getAttachmentsUrl() {
|
||||
return attachmentsUrl;
|
||||
}
|
||||
|
||||
public void setAttachmentsUrl(LinkedList<String> attachmentsUrl) {
|
||||
this.attachmentsUrl = attachmentsUrl;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AsyncGetMailFilesCallback{" +
|
||||
"kafkaDBAsyncGet=" + kafkaDBAsyncGet +
|
||||
", postFileUrl='" + postFileUrl + '\'' +
|
||||
", postMsgUrl='" + postMsgUrl + '\'' +
|
||||
", getFileUrl='" + getFileUrl + '\'' +
|
||||
", asyncPostMailFilesCallback=" + asyncPostMailFilesCallback +
|
||||
", configInfo=" + configInfo +
|
||||
", sendMsg='" + sendMsg + '\'' +
|
||||
", count=" + count +
|
||||
", dealUrlCount=" + dealUrlCount +
|
||||
", urlCount=" + urlCount +
|
||||
", attachmentsUrl=" + attachmentsUrl +
|
||||
'}';
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求完成后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void completed(HttpResponse response) {
|
||||
try {
|
||||
// LOG.warn("response:{}", getHttpContent(response));
|
||||
//拉取文件成功后,将文件转为流
|
||||
// InputStream result = IOUtils.toBufferedInputStream(response.getEntity().getContent());
|
||||
//新版使用字节数组
|
||||
byte[] result = IOUtils.toByteArray(response.getEntity().getContent());
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode == 200) {
|
||||
dealUrlCount++;
|
||||
// asyncPostMailFilesCallback.setResultIs(result);//添加获取的文件流,用于post文件失败时重试使用--------老版
|
||||
if (dealUrlCount >= 2) {//说明拉取的是attachments的file
|
||||
// LinkedList<InputStream> attachmentsResultIsList = asyncPostMailFilesCallback.getAttachmentsResultIsList();
|
||||
LinkedList<byte[]> attachmentsResultIsList = asyncPostMailFilesCallback.getAttachmentsResultIsList();
|
||||
if (attachmentsResultIsList != null && attachmentsResultIsList.size() > 0) {//说明已经存有其他流
|
||||
attachmentsResultIsList.add(result);
|
||||
} else {//说明还没有存储流,new一个新的存放流
|
||||
// attachmentsResultIsList = new LinkedList<InputStream>();
|
||||
attachmentsResultIsList = new LinkedList<byte[]>();
|
||||
attachmentsResultIsList.add(result);
|
||||
}
|
||||
asyncPostMailFilesCallback.setAttachmentsResultIsList(attachmentsResultIsList);
|
||||
LOG.info("AsyncGetMailFilesCallback getAttachmentsFile completed,response status:{" + statusCode + "}.");
|
||||
} else if (dealUrlCount == 1) {//说明拉取的是eml_file的流
|
||||
asyncPostMailFilesCallback.setEmlFileResultIs(result);
|
||||
LOG.info("AsyncGetMailFilesCallback getEmlFile completed,response status:{" + statusCode + "}.");
|
||||
}
|
||||
|
||||
//开始发送文件或者获取余下的文件
|
||||
if (dealUrlCount == urlCount) {
|
||||
//文件全部获取成功,以流的形式开始发往总线,首先发送eml_file的流,后续的attachments的流进入到asyncPostMailFilesCallback内部进行发送并获取返回的id
|
||||
// InputStream emlFileResultIs = asyncPostMailFilesCallback.getEmlFileResultIs();
|
||||
byte[] emlFileResultIs = asyncPostMailFilesCallback.getEmlFileResultIs();
|
||||
if (emlFileResultIs != null) {
|
||||
//文件全部获取成功时,流才表示readyPostFile
|
||||
AvroMonitorTimerTask.fileReadyPostSum++;//加上Eml_file
|
||||
AvroMonitorTimerTask.fileReadyPostSum += asyncPostMailFilesCallback.getAttachmentsResultIsList().size();//加上attachments_file
|
||||
|
||||
//文件全部获取成功时,获取成功的流才计算ReadyPostFileBytes
|
||||
// ByteArrayOutputStream byteArrayOutputStream = AvroMonitorTimerTask.getByteArrayOutputStream(emlFileResultIs);
|
||||
// emlFileResultIs = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());//重新填充被消耗的emlFileResultIs
|
||||
|
||||
AvroMonitorTimerTask.fileBytesReadyPostSum += emlFileResultIs.length;//加上Eml_file字节数
|
||||
|
||||
// LinkedList<ByteArrayOutputStream> byteArrayOutputStreamsList = AvroMonitorTimerTask.getByteArrayOutputStream(asyncPostMailFilesCallback.getAttachmentsResultIsList());
|
||||
//
|
||||
// //重新填充被消耗的attachmentsResultIsList
|
||||
// asyncPostMailFilesCallback.getAttachmentsResultIsList().clear();
|
||||
// for (int i = 0; i < byteArrayOutputStreamsList.size(); i++) {
|
||||
// asyncPostMailFilesCallback.getAttachmentsResultIsList().add(new ByteArrayInputStream(byteArrayOutputStreamsList.get(i).toByteArray()));
|
||||
// }
|
||||
|
||||
for (int i = 0; i < asyncPostMailFilesCallback.getAttachmentsResultIsList().size(); i++) {
|
||||
AvroMonitorTimerTask.fileBytesReadyPostSum += asyncPostMailFilesCallback.getAttachmentsResultIsList().get(i).length;//加上attachments_file字节数
|
||||
}
|
||||
|
||||
HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), emlFileResultIs, asyncPostMailFilesCallback);//失败时进入到的这个asyncPostMailFilesCallback内部是有文件流数据的
|
||||
} else {
|
||||
LOG.error("dealUrlCount == urlCount but emlFileResultIs is null,message is===>>>" + sendMsg);
|
||||
}
|
||||
} else if (dealUrlCount < urlCount) {
|
||||
//说明文件没有全部获取成功,继续获取
|
||||
String attachmentUrl = attachmentsUrl.get(dealUrlCount - 1);//获取(dealUrlCount - 1)索引位置的url
|
||||
AsyncGetMailFilesCallback asyncGetMailFilesCallback = new AsyncGetMailFilesCallback(configInfo, attachmentUrl, sendMsg, asyncPostMailFilesCallback, count, urlCount, dealUrlCount, attachmentsUrl);//asyncPostMailFilesCallback也包含configInfo
|
||||
HttpClientUtil.httpAsyncGetFile(attachmentUrl, asyncGetMailFilesCallback);//回调时都是最新的数据
|
||||
}
|
||||
} else if (statusCode == 404) {
|
||||
//404-重试,注意此次重试大概率由于文件未生成
|
||||
retryFor404();
|
||||
} else {
|
||||
// LOG.info("AsyncGetMailFilesCallback getFiles failed,response status:{" + statusCode + "},this msg is===>" + sendMsg + "<===");
|
||||
LOG.info("AsyncGetMailFilesCallback getFiles failed,response status:{" + statusCode + "},dealUrlCount is {" + dealUrlCount + "},this msg is===>" + sendMsg + "<===");
|
||||
}
|
||||
|
||||
HttpClientUtils.closeQuietly(response);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 由于404引起的重试
|
||||
*/
|
||||
private void retryFor404() {
|
||||
try {
|
||||
count++;
|
||||
if (count > 1) {
|
||||
LOG.warn("AsyncGetMailFilesCallback getFile is failed,retry count=" + count);
|
||||
LOG.info("Now AsyncGetMailFilesCallback dealUrlCount is===>{" + dealUrlCount + "}");
|
||||
}
|
||||
if (count > 4) {
|
||||
LOG.error("AsyncGetMailFilesCallback getFile is failed and already retry 3 times.This failed message is===>>>" + sendMsg + "<<<===");
|
||||
} else {
|
||||
Map map = JSONObject.parseObject(sendMsg, Map.class);
|
||||
int found_time = (int) map.get("found_time");
|
||||
long now_time = System.currentTimeMillis() / 1000;
|
||||
long timeDiffer = now_time - found_time;
|
||||
if (timeDiffer >= 20) {//直接重试
|
||||
//上一次请求失败的url存储在getFileUrl中
|
||||
AsyncGetMailFilesCallback asyncHttpClientGetFileCallback = new AsyncGetMailFilesCallback(configInfo, getFileUrl, sendMsg, asyncPostMailFilesCallback, count, urlCount, dealUrlCount, attachmentsUrl);//注意此处count不再为0
|
||||
HttpClientUtil.httpAsyncGetFile(getFileUrl, asyncHttpClientGetFileCallback);
|
||||
} else {//sleep后重试
|
||||
Thread.sleep((20 - timeDiffer) * 1000);
|
||||
|
||||
//上一次请求失败的url存储在getFileUrl中
|
||||
AsyncGetMailFilesCallback asyncHttpClientGetFileCallback = new AsyncGetMailFilesCallback(configInfo, getFileUrl, sendMsg, asyncPostMailFilesCallback, count, urlCount, dealUrlCount, attachmentsUrl);//注意此处count不再为0
|
||||
HttpClientUtil.httpAsyncGetFile(getFileUrl, asyncHttpClientGetFileCallback);
|
||||
}
|
||||
}
|
||||
} catch (Exception e2) {
|
||||
LOG.error("AsyncGetMailFilesCallback getFile retry is error===>>>" + e2);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求取消后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void cancelled() {
|
||||
LOG.warn("request is cancelled...");
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求失败后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void failed(Exception e) {
|
||||
// LOG.warn("AsyncGetMailFilesCallback getFile is failed,message is===>>>" + sendMsg + "<<<===");
|
||||
retryForFailed(e);
|
||||
}
|
||||
|
||||
/**
|
||||
* 由于请求失败引起的重试
|
||||
*
|
||||
* @param e
|
||||
*/
|
||||
private void retryForFailed(Exception e) {
|
||||
try {
|
||||
count++;
|
||||
if (count > 1) {
|
||||
LOG.warn("AsyncGetMailFilesCallback getFile is failed,retry count=" + count);
|
||||
LOG.info("Now AsyncGetMailFilesCallback dealUrlCount is===>{" + dealUrlCount + "}");
|
||||
}
|
||||
if (count > 4) {
|
||||
LOG.error("AsyncGetMailFilesCallback getFile is failed and already retry 3 times.error===>>>" + e + "<<<===.This failed message is===>>>" + sendMsg + "<<<===");
|
||||
// if (StringUtils.isNotBlank(configInfo.getTopicName()) && StringUtils.isNotBlank(sendMsg)) {
|
||||
// //将数据发往kafka,用于后续重试
|
||||
//// kafkaDBAsyncGet.postFailMsgToKafka(configInfo.getTopicName(), sendMsg);
|
||||
// LOG.error("Send to Kafka(Temp)...test!!!");
|
||||
// }
|
||||
} else {
|
||||
Map map = JSONObject.parseObject(sendMsg, Map.class);
|
||||
int found_time = (int) map.get("found_time");
|
||||
long now_time = System.currentTimeMillis() / 1000;
|
||||
long timeDiffer = now_time - found_time;
|
||||
if (timeDiffer >= 20) {//直接重试
|
||||
//上一次请求失败的url存储在getFileUrl中
|
||||
AsyncGetMailFilesCallback asyncHttpClientGetFileCallback = new AsyncGetMailFilesCallback(configInfo, getFileUrl, sendMsg, asyncPostMailFilesCallback, count, urlCount, dealUrlCount, attachmentsUrl);//注意此处count不再为0
|
||||
HttpClientUtil.httpAsyncGetFile(getFileUrl, asyncHttpClientGetFileCallback);
|
||||
} else {//sleep后重试
|
||||
Thread.sleep((20 - timeDiffer) * 1000);
|
||||
|
||||
//上一次请求失败的url存储在getFileUrl中
|
||||
AsyncGetMailFilesCallback asyncHttpClientGetFileCallback = new AsyncGetMailFilesCallback(configInfo, getFileUrl, sendMsg, asyncPostMailFilesCallback, count, urlCount, dealUrlCount, attachmentsUrl);//注意此处count不再为0
|
||||
HttpClientUtil.httpAsyncGetFile(getFileUrl, asyncHttpClientGetFileCallback);
|
||||
}
|
||||
}
|
||||
} catch (Exception e2) {
|
||||
LOG.error("AsyncGetMailFilesCallback getFile retry is error===>>>" + e2);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected String getHttpContent(HttpResponse response) {
|
||||
HttpEntity entity = response.getEntity();
|
||||
String body = null;
|
||||
|
||||
if (entity == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
body = EntityUtils.toString(entity, "utf-8");
|
||||
} catch (ParseException e) {
|
||||
LOG.warn("the response's content inputstream is corrupt", e);
|
||||
} catch (IOException e) {
|
||||
LOG.warn("the response's content inputstream is corrupt", e);
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
// @Override
|
||||
// public Object call() throws Exception {
|
||||
// return asyncHttpClientPostFileCallback.getPostFileResBody();
|
||||
// }
|
||||
}
|
||||
@@ -0,0 +1,386 @@
|
||||
package cn.ac.iie.cusflume.sink.HttpAsyncUtils.mail;
|
||||
|
||||
import cn.ac.iie.cusflume.sink.CommonUtils.PutIdOnMsgByTopicUtils;
|
||||
import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.AvroMonitorTimerTask;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.DataCenterLoad;
|
||||
import cn.ac.iie.cusflume.sink.bean.configBean.ConfigInfo;
|
||||
import cn.ac.iie.cusflume.sink.bean.postFileBean.PostFileResBody;
|
||||
import cn.ac.iie.cusflume.sink.bean.producer.Res.ProResBody;
|
||||
import cn.ac.iie.cusflume.sink.daoUtils.KafkaDB;
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.ParseException;
|
||||
import org.apache.http.client.utils.HttpClientUtils;
|
||||
import org.apache.http.concurrent.FutureCallback;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedList;
|
||||
|
||||
/**
|
||||
* 用于处理NTC_COLLECT_MAIL_LOG
|
||||
* 异步httpclient回调对象---POST文件
|
||||
* 在get文件成功后开始执行post文件操作
|
||||
* post文件成功时,获取到文件id加入到message中,开始执行post消息操作,
|
||||
* 为此需要message(kafka获取)以及post消息url(配置文件获取),
|
||||
* 还需要User-Agent以及X-Tag用于验证
|
||||
* <p>
|
||||
* post失败时,重试三次
|
||||
*/
|
||||
public class AsyncPostMailFilesCallback implements FutureCallback<HttpResponse> {
|
||||
private static Logger LOG = Logger.getLogger(AsyncPostMailFilesCallback.class);
|
||||
|
||||
private DataCenterLoad dclAsyncPost;
|
||||
// private KafkaDB kafkaDBAsyncPost;
|
||||
|
||||
private String postFileUrl;
|
||||
private String postMsgUrl;
|
||||
// private InputStream resultIs;//get到的文件流,用于post文件失败时重试---这里MAIL未使用此对象
|
||||
private ConfigInfo configInfo;//1.存储post文件,信息url(配置文件获取);2.存储入库前验证文件,信息url(配置文件获取);3.存储文件,信息SessionCookie(请求总线获取);4.存储msg作为avro入库所需的topicName,userAgent,xTag,batchSize
|
||||
|
||||
private String sendMsg;//用于post成功后补全id发往zx或者post失败发往kafka
|
||||
|
||||
private PostFileResBody postFileResBody;//存储post文件成功后返回的响应信息,内部包含文件id,key名为id,这个不走构造方法,用于获取返回响应体时使用,也不需要保存
|
||||
private int count;
|
||||
|
||||
//为MAIL新增
|
||||
private int dealIsCount;//存放已处理的流的个数
|
||||
private int isCount;//存放流的总数
|
||||
|
||||
// private InputStream emlFileResultIs;//存放get到的emlFile文件流,用于post文件失败时重试,注意这个不走构造方法--流模式
|
||||
private byte[] emlFileResultIs;//存放get到的emlFile文件流,用于post文件失败时重试,注意这个不走构造方法---字节数组模式
|
||||
|
||||
// private LinkedList<InputStream> attachmentsResultIsList;//存放get到的attachments集合文件流,用于post文件失败时重试,注意这个不走构造方法---流模式
|
||||
private LinkedList<byte[]> attachmentsResultIsList;//存放get到的attachments集合文件流,用于post文件失败时重试,注意这个不走构造方法---字节数组模式
|
||||
|
||||
private LinkedList<String> attachmentsIdList;//用于临时存放获取的attachmentsId
|
||||
|
||||
public AsyncPostMailFilesCallback(ConfigInfo configInfo, String sendMsg, int count, int isCount, int dealIsCount, LinkedList<String> attachmentsIdList) {
|
||||
this.configInfo = configInfo;
|
||||
this.postFileUrl = configInfo.getPostFileUrl();
|
||||
this.postMsgUrl = configInfo.getPostMsgUrl();
|
||||
this.sendMsg = sendMsg;//存放对应于url的数据
|
||||
this.count = count;//初始为0
|
||||
this.isCount = isCount;//初始总数为url总数
|
||||
this.dealIsCount = dealIsCount;//初始为0
|
||||
this.attachmentsIdList = attachmentsIdList;//初始为空
|
||||
|
||||
//初始化入库程序
|
||||
dclAsyncPost = new DataCenterLoad();
|
||||
// kafkaDBAsyncPost = KafkaDB.getInstance();
|
||||
}
|
||||
|
||||
public String getPostFileUrl() {
|
||||
return postFileUrl;
|
||||
}
|
||||
|
||||
public void setPostFileUrl(String postFileUrl) {
|
||||
this.postFileUrl = postFileUrl;
|
||||
}
|
||||
|
||||
public String getPostMsgUrl() {
|
||||
return postMsgUrl;
|
||||
}
|
||||
|
||||
public void setPostMsgUrl(String postMsgUrl) {
|
||||
this.postMsgUrl = postMsgUrl;
|
||||
}
|
||||
|
||||
// public InputStream getResultIs() {
|
||||
// return resultIs;
|
||||
// }
|
||||
//
|
||||
// public void setResultIs(InputStream resultIs) {
|
||||
// this.resultIs = resultIs;
|
||||
// }
|
||||
|
||||
public ConfigInfo getConfigInfo() {
|
||||
return configInfo;
|
||||
}
|
||||
|
||||
public void setConfigInfo(ConfigInfo configInfo) {
|
||||
this.configInfo = configInfo;
|
||||
}
|
||||
|
||||
public String getSendMsg() {
|
||||
return sendMsg;
|
||||
}
|
||||
|
||||
public void setSendMsg(String sendMsg) {
|
||||
this.sendMsg = sendMsg;
|
||||
}
|
||||
|
||||
public PostFileResBody getPostFileResBody() {
|
||||
return postFileResBody;
|
||||
}
|
||||
|
||||
public void setPostFileResBody(PostFileResBody postFileResBody) {
|
||||
this.postFileResBody = postFileResBody;
|
||||
}
|
||||
|
||||
public int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(int count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public int getDealIsCount() {
|
||||
return dealIsCount;
|
||||
}
|
||||
|
||||
public void setDealIsCount(int dealIsCount) {
|
||||
this.dealIsCount = dealIsCount;
|
||||
}
|
||||
|
||||
public int getIsCount() {
|
||||
return isCount;
|
||||
}
|
||||
|
||||
public void setIsCount(int isCount) {
|
||||
this.isCount = isCount;
|
||||
}
|
||||
|
||||
// public InputStream getEmlFileResultIs() {
|
||||
public byte[] getEmlFileResultIs() {
|
||||
return emlFileResultIs;
|
||||
}
|
||||
|
||||
// public void setEmlFileResultIs(InputStream emlFileResultIs) {
|
||||
public void setEmlFileResultIs(byte[] emlFileResultIs) {
|
||||
this.emlFileResultIs = emlFileResultIs;
|
||||
}
|
||||
|
||||
// public LinkedList<InputStream> getAttachmentsResultIsList() {
|
||||
public LinkedList<byte[]> getAttachmentsResultIsList() {
|
||||
return attachmentsResultIsList;
|
||||
}
|
||||
|
||||
// public void setAttachmentsResultIsList(LinkedList<InputStream> attachmentsResultIsList) {
|
||||
public void setAttachmentsResultIsList(LinkedList<byte[]> attachmentsResultIsList) {
|
||||
this.attachmentsResultIsList = attachmentsResultIsList;
|
||||
}
|
||||
|
||||
public LinkedList<String> getAttachmentsIdList() {
|
||||
return attachmentsIdList;
|
||||
}
|
||||
|
||||
public void setAttachmentsIdList(LinkedList<String> attachmentsIdList) {
|
||||
this.attachmentsIdList = attachmentsIdList;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AsyncPostMailFilesCallback{" +
|
||||
"postFileUrl='" + postFileUrl + '\'' +
|
||||
", postMsgUrl='" + postMsgUrl + '\'' +
|
||||
", configInfo=" + configInfo +
|
||||
", sendMsg='" + sendMsg + '\'' +
|
||||
", postFileResBody=" + postFileResBody +
|
||||
", count=" + count +
|
||||
", dealIsCount=" + dealIsCount +
|
||||
", isCount=" + isCount +
|
||||
", attachmentsIdList=" + attachmentsIdList +
|
||||
'}';
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求完成后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void completed(HttpResponse response) {
|
||||
try {
|
||||
// LOG.info("filename:" + source.getFilename() + " status: " + response.getStatusLine().getStatusCode() + " response:" + getHttpContent(response));
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
if (statusCode == 200) {
|
||||
dealIsCount++;
|
||||
String postResBody = getHttpContent(response);
|
||||
if (dealIsCount >= 2) {//说明获取的id是attachments的id
|
||||
//post文件成功后可以获取文件id
|
||||
if (StringUtils.isNotBlank(postResBody)) {
|
||||
LOG.info("AsyncPostMailFilesCallback post attachmentFile completed,post file statuscode is:{" + statusCode + "},this attachmentFile resBody is " + postResBody);
|
||||
this.postFileResBody = JSONObject.parseObject(postResBody, PostFileResBody.class);
|
||||
// String attachmentsFileId = postFileResBody.getData().get("id");//获取文件id------旧
|
||||
String attachmentsFileId = postFileResBody.getMsg();//获取文件id------新-20191115-返回体格式-{"code": "200", "msg": "2019111507094164188567821505", "data": "不需要额外数据"}
|
||||
if (StringUtils.isNotBlank(attachmentsFileId)) {
|
||||
if (attachmentsIdList != null) {//已经存放attachmentsId,至少已经初始化attachmentsIdList
|
||||
attachmentsIdList.add(attachmentsFileId);
|
||||
} else {//未初始化
|
||||
this.attachmentsIdList = new LinkedList<String>();
|
||||
attachmentsIdList.add(attachmentsFileId);
|
||||
}
|
||||
} else {
|
||||
LOG.warn("AsyncPostMailFilesCallback post attachments_file success and postResBody(response body) is not null but attachmentsFileId is null,this postResBody is===>>>" + postResBody);
|
||||
}
|
||||
} else {
|
||||
LOG.warn("AsyncPostMailFilesCallback post attachments_file success but postResBody(response body) is null.");
|
||||
}
|
||||
} else if (dealIsCount == 1) {//说明获取的id是eml_file
|
||||
//post文件成功后补全eml_file_id
|
||||
if (StringUtils.isNotBlank(postResBody)) {
|
||||
LOG.info("AsyncPostMailFilesCallback post eml_file completed,post file statuscode is:{" + statusCode + "},this eml_file resBody is " + postResBody);
|
||||
this.postFileResBody = JSONObject.parseObject(postResBody, PostFileResBody.class);
|
||||
// String emlFileId = postFileResBody.getData().get("id");//获取文件id---------旧方法
|
||||
String emlFileId = postFileResBody.getMsg();//获取文件id---------新方法---返回体格式---{"code": "200", "msg": "2019111507094164188567821505", "data": "不需要额外数据"}
|
||||
// putIdOnMsg(fileId,configInfo.getTopicName());
|
||||
if (StringUtils.isNotBlank(emlFileId)) {
|
||||
this.sendMsg = PutIdOnMsgByTopicUtils.putIdOnMsgByTopicName(configInfo.getTopicName(), sendMsg, emlFileId, "eml_file");//补全数据
|
||||
} else {
|
||||
LOG.info("AsyncPostMailFilesCallback post eml_file completed and eml_file resBody is not blank but emlFileId is null ,this eml_file resBody is " + postResBody);
|
||||
}
|
||||
// //往总线发送
|
||||
// //String urlProducer, String topicName, String jsonData, int batchSize, String userAgent, String msgSessionCookie
|
||||
// dclAsyncPost.avroDataLoad(configInfo.getPostMsgUrl(), configInfo.getTopicName(), sendMsg, configInfo.getBatchSize(), configInfo.getUserAgent(), configInfo.getMsgSessionCookie());
|
||||
} else {
|
||||
LOG.warn("AsyncPostMailFilesCallback post eml_file success but postResBody(response body) is null.");
|
||||
}
|
||||
}
|
||||
|
||||
//开始补全msg并推送msg至ZX,或者推送余下文件至ZX返回id
|
||||
if (dealIsCount == isCount) {//说明文件已经全部推送并且id已经全部获取
|
||||
|
||||
//全部获取到id后标识文件推送完成,开始计算成功推送的文件和文件大小
|
||||
AvroMonitorTimerTask.fileSuccessSum++;//eml_file的计数
|
||||
AvroMonitorTimerTask.fileSuccessSum += attachmentsIdList.size();//attachments_file的计数
|
||||
//计算成功推送的文件的字节大小
|
||||
// ByteArrayOutputStream byteArrayOutputStream = AvroMonitorTimerTask.getByteArrayOutputStream(emlFileResultIs);
|
||||
// this.emlFileResultIs = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());
|
||||
AvroMonitorTimerTask.fileBytesSuccessSum += emlFileResultIs.length;//eml_file字节数的计数
|
||||
|
||||
// LinkedList<ByteArrayOutputStream> byteArrayOutputStreamsList = AvroMonitorTimerTask.getByteArrayOutputStream(attachmentsResultIsList);
|
||||
//
|
||||
// //重新填充被消耗的attachmentsResultIsList
|
||||
// attachmentsResultIsList.clear();
|
||||
// for (int i = 0; i < byteArrayOutputStreamsList.size(); i++) {
|
||||
// attachmentsResultIsList.add(new ByteArrayInputStream(byteArrayOutputStreamsList.get(i).toByteArray()));
|
||||
// }
|
||||
|
||||
for (int i = 0; i < attachmentsResultIsList.size(); i++) {
|
||||
AvroMonitorTimerTask.fileBytesSuccessSum += attachmentsResultIsList.get(i).length;//attachments_file字节数的计数
|
||||
}
|
||||
|
||||
//将临时保存的attachmentsIdList转化为字符串补全入message
|
||||
String attachmentsIdStr = attachmentsIdList.toString();
|
||||
this.sendMsg = PutIdOnMsgByTopicUtils.putIdOnMsgByTopicName(configInfo.getTopicName(), sendMsg, attachmentsIdStr, "attachments");//补全数据
|
||||
|
||||
//计算推送消息
|
||||
AvroMonitorTimerTask.msgReadyPostSum++;//多个文件对应一条消息
|
||||
|
||||
//开始推送消息进入总线
|
||||
ProResBody proResBody = dclAsyncPost.avroDataLoad(configInfo.getPostMsgUrl(), configInfo.getTopicName(), sendMsg, configInfo.getBatchSize(), configInfo.getUserAgent(), configInfo.getMsgSessionCookie());
|
||||
LOG.info("Send message with many fileId to zx over,this responseBody is===>" + proResBody.toString());
|
||||
} else if (dealIsCount < isCount) {//继续发送余下文件流获取id
|
||||
// 注意此处dealIsCount必然是>=1的,所以不需要考虑eml_file,因为eml_file必然已经完成,直接发送attachmentsResultIsList中的流获取id
|
||||
// InputStream attachmentResultIs = attachmentsResultIsList.get(dealIsCount - 1);
|
||||
byte[] attachmentResultIs = attachmentsResultIsList.get(dealIsCount - 1);
|
||||
//更新AsyncPostMailFilesCallback类的内容
|
||||
AsyncPostMailFilesCallback asyncPostMailFilesCallback = updateFieldsAsyncPostMailFilesCallback();
|
||||
//继续发送余下的流获取id
|
||||
HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), attachmentResultIs, asyncPostMailFilesCallback);
|
||||
}
|
||||
} else if (statusCode == 403) {//空文件,不再重试发送
|
||||
LOG.info("AsyncPostMailFilesCallback post Files statusCode is 403 so get the fileIs but this minio file is empty.This message is===>>>" + sendMsg + "<<<===");
|
||||
} else {
|
||||
LOG.info("AsyncPostMailFilesCallback post Files statusCode is " + statusCode + " so get the fileIs but this minio file has some problem.This message is===>>>" + sendMsg + "<<<===");
|
||||
}
|
||||
|
||||
HttpClientUtils.closeQuietly(response);
|
||||
// TimeTask.successsum ++;
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
private AsyncPostMailFilesCallback updateFieldsAsyncPostMailFilesCallback() {
|
||||
AsyncPostMailFilesCallback asyncHttpClientPostFileCallback = new AsyncPostMailFilesCallback(configInfo, sendMsg, count, isCount, dealIsCount, attachmentsIdList);
|
||||
asyncHttpClientPostFileCallback.setEmlFileResultIs(emlFileResultIs);
|
||||
asyncHttpClientPostFileCallback.setAttachmentsResultIsList(attachmentsResultIsList);
|
||||
return asyncHttpClientPostFileCallback;
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求取消后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void cancelled() {
|
||||
LOG.info("AsyncPostMailFilesCallback is cancelled... ...");
|
||||
// LOG.error("filename: " + source.getFilename() + " cancelled");
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求失败后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void failed(Exception e) {
|
||||
// LOG.warn("AsyncPostMailFilesCallback postFile is failed,message is===>>>" + sendMsg + "<<<===");
|
||||
try {
|
||||
count++;
|
||||
if (count > 1) {
|
||||
LOG.warn("AsyncPostMailFilesCallback post zxFile is failed,retry count=" + count);
|
||||
LOG.info("Now AsyncPostMailFilesCallback dealIsCount is===>{" + dealIsCount + "}");
|
||||
}
|
||||
if (count > 4) {
|
||||
LOG.error("AsyncPostMailFilesCallback post zxFile is failed and already retry 3 times.error===>>>" + e + "<<<===.This failed message is===>>>" + sendMsg + "<<<===.Now attachmentsIdList is===>{" + attachmentsIdList.toString() + "}<===");
|
||||
} else {
|
||||
if (dealIsCount == 0) {//说明失败的是emlFileResultIs,是从AsyncGetMailFilesCallback传送过来的
|
||||
if (configInfo != null && StringUtils.isNotBlank(sendMsg) && emlFileResultIs != null) {
|
||||
//更新AsyncPostMailFilesCallback内部存储的信息
|
||||
AsyncPostMailFilesCallback asyncPostMailFilesCallback = updateFieldsAsyncPostMailFilesCallback();//注意此处count不再为0
|
||||
//重试时也是以上一次失败时存储的流的形式发往总线
|
||||
HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), emlFileResultIs, asyncPostMailFilesCallback);
|
||||
} else {
|
||||
LOG.error("AsyncPostMailFilesCallback when post zxFile the emlFileResultIs is null,this msg is===>>>" + sendMsg);
|
||||
}
|
||||
} else if (dealIsCount >= 1) {//说明失败的是attachmentsResultIsList中的流
|
||||
//假设dealIsCount=1,说明eml更新成功并获得id,但attachments第0索引的失败了,此时重试应该使用第0索引的流,即dealIsCount-1
|
||||
//假设dealIsCount=3,说明eml,attachments第0,1索引的流都成功了,但索引2失败了,此时应该重试索引2,即dealIsCount-1
|
||||
// InputStream attachmentResultIs = attachmentsResultIsList.get(dealIsCount - 1);
|
||||
byte[] attachmentResultIs = attachmentsResultIsList.get(dealIsCount - 1);
|
||||
if (configInfo != null && StringUtils.isNotBlank(sendMsg) && attachmentResultIs != null) {
|
||||
//更新AsyncPostMailFilesCallback类的内容
|
||||
AsyncPostMailFilesCallback asyncPostMailFilesCallback = updateFieldsAsyncPostMailFilesCallback();
|
||||
//继续发送余下的流获取id,这个流的索引是根据dealIsCount获取的
|
||||
HttpClientUtil.httpAsyncPostFileToZx(configInfo.getPostFileUrl(), attachmentResultIs, asyncPostMailFilesCallback);
|
||||
} else {
|
||||
LOG.error("AsyncPostMailFilesCallback when post zxFile the attachmentResultIs is null,this msg is===>>>" + sendMsg);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
} catch (Exception e2) {
|
||||
LOG.error("AsyncPostMailFilesCallback retry is error===>>>" + e2);
|
||||
}
|
||||
}
|
||||
|
||||
// protected void getHttpContent(HttpResponse response) {
|
||||
protected String getHttpContent(HttpResponse response) {
|
||||
|
||||
HttpEntity entity = response.getEntity();
|
||||
String body = null;
|
||||
if (entity == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
body = EntityUtils.toString(entity, "utf-8");
|
||||
} catch (ParseException e) {
|
||||
LOG.warn("the response's content inputstream is corrupt", e);
|
||||
} catch (IOException e) {
|
||||
LOG.warn("the response's content inputstream is corrupt", e);
|
||||
}
|
||||
return body;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,198 @@
|
||||
package cn.ac.iie.cusflume.sink.HttpAsyncUtils.msgCallBack;
|
||||
|
||||
import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil;
|
||||
import cn.ac.iie.cusflume.sink.YbHttpAvroSinkFile;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.AvroMonitorTimerTask;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.MD5Utils;
|
||||
import cn.ac.iie.cusflume.sink.bean.redirectBean.ResRedirBody;
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.utils.HttpClientUtils;
|
||||
import org.apache.http.concurrent.FutureCallback;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 被回调的对象,给异步的httpclient使用
|
||||
*/
|
||||
public class AsyncPostMsgCallBack implements FutureCallback<HttpResponse> {
|
||||
private static Logger logger = Logger.getLogger(AsyncPostMsgCallBack.class);
|
||||
|
||||
private String postMsgUrl;
|
||||
private String topicName;
|
||||
private String dataJson;
|
||||
private String userAgent;
|
||||
private String msgSessionCookie;
|
||||
private int count;
|
||||
|
||||
public AsyncPostMsgCallBack(String postMsgUrl, String topicName, String dataJson, String userAgent, String msgSessionCookie, int count) {
|
||||
this.postMsgUrl = postMsgUrl;
|
||||
this.topicName = topicName;
|
||||
this.dataJson = dataJson;
|
||||
this.userAgent = userAgent;
|
||||
this.msgSessionCookie = msgSessionCookie;
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public String getPostMsgUrl() {
|
||||
return postMsgUrl;
|
||||
}
|
||||
|
||||
public void setPostMsgUrl(String postMsgUrl) {
|
||||
this.postMsgUrl = postMsgUrl;
|
||||
}
|
||||
|
||||
public String getTopicName() {
|
||||
return topicName;
|
||||
}
|
||||
|
||||
public void setTopicName(String topicName) {
|
||||
this.topicName = topicName;
|
||||
}
|
||||
|
||||
public String getDataJson() {
|
||||
return dataJson;
|
||||
}
|
||||
|
||||
public void setDataJson(String dataJson) {
|
||||
this.dataJson = dataJson;
|
||||
}
|
||||
|
||||
public String getUserAgent() {
|
||||
return userAgent;
|
||||
}
|
||||
|
||||
public void setUserAgent(String userAgent) {
|
||||
this.userAgent = userAgent;
|
||||
}
|
||||
|
||||
public String getMsgSessionCookie() {
|
||||
return msgSessionCookie;
|
||||
}
|
||||
|
||||
public void setMsgSessionCookie(String msgSessionCookie) {
|
||||
this.msgSessionCookie = msgSessionCookie;
|
||||
}
|
||||
|
||||
public int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(int count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求完成后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void completed(HttpResponse response) {
|
||||
// ProResBody proResBody = null;
|
||||
try {
|
||||
int statuCode = response.getStatusLine().getStatusCode();
|
||||
HttpEntity entity = response.getEntity();
|
||||
String ret = EntityUtils.toString(entity);
|
||||
logger.info("返回的生产原始响应体String数据为:" + ret);
|
||||
|
||||
/**
|
||||
* 不直接进行对象转换,除非数据加载不成功
|
||||
*/
|
||||
Map map = JSONObject.parseObject(ret, Map.class);
|
||||
int resRedirBodyCode = (int) map.get("code");
|
||||
// int resRedirBodyCode = resRedirBody.getCode();
|
||||
|
||||
logger.debug("生产数据==>" + dataJson + "<==," +
|
||||
"生产数据checksum==>" + MD5Utils.md5Encode(dataJson) + "<==," +
|
||||
"返回的生产原始响应体String数据为:" + ret);
|
||||
/**
|
||||
* 20200818-接口细化响应码
|
||||
*/
|
||||
if (statuCode == 200 && resRedirBodyCode == 200) {
|
||||
logger.info("数据加载成功,返回码: " + statuCode);
|
||||
logger.debug("生产数据==>" + dataJson + "<==," +
|
||||
"生产数据checksum==>" + MD5Utils.md5Encode(dataJson) + "<==," +
|
||||
"数据加载成功,返回码: " + statuCode);
|
||||
|
||||
AvroMonitorTimerTask.msgSuccessSum++;
|
||||
EntityUtils.consume(entity);
|
||||
} else {
|
||||
switch (resRedirBodyCode) {
|
||||
case 300:
|
||||
logger.info("AsyncPostMsgCallBack==>重定向响应体-redirect-ret==>" + ret + "<==,statuCode:" + statuCode + ",resRedirBodyCode:300,当前服务节点负载过高,将向其他通信地址发送请求.");
|
||||
//若不包含对应字段,则不进行对象转换,减少报错
|
||||
if (ret.contains("redirect")) {
|
||||
ResRedirBody resRedirBody = JSONObject.parseObject(ret, ResRedirBody.class);
|
||||
String redirectUrl = resRedirBody.getData().getRedirect();
|
||||
if (StringUtils.isNotBlank(redirectUrl)) {
|
||||
YbHttpAvroSinkFile.changeUrl(redirectUrl);
|
||||
}
|
||||
} else {
|
||||
logger.error("AsyncPostMsgCallBack==>服务端响应体中ResRedirBody.data.redirect字段不存在或格式不正确!!!");
|
||||
}
|
||||
|
||||
YbHttpAvroSinkFile.redirectContents.add(dataJson);
|
||||
break;
|
||||
case 301:
|
||||
logger.info("AsyncPostMsgCallBack==>statuCode:" + statuCode + ",resRedirBodyCode:301,当前所有服务端节点均负载过高,暂无可用资源,请等待.");
|
||||
YbHttpAvroSinkFile.redirectContents.add(dataJson);
|
||||
break;
|
||||
case 410:
|
||||
logger.info("AsyncPostMsgCallBack==>statuCode:" + statuCode + ",resRedirBodyCode:410,Cookie已过期或Cookie错误,将开始更新Cookie.");
|
||||
// YbHttpAvroSinkFile.redirectContents.add(dataJson);
|
||||
YbHttpAvroSinkFile.updateCookie();
|
||||
break;
|
||||
case 500:
|
||||
logger.info("AsyncPostMsgCallBack==>statuCode:" + statuCode + ",resRedirBodyCode:500,处理请求过程出现系统错误.");
|
||||
YbHttpAvroSinkFile.updateCookie();
|
||||
break;
|
||||
default:
|
||||
logger.error("AsyncPostMsgCallBack==>数据加载失败,响应体:" + ret + "---statuCode:" + statuCode + "---resRedirBodyCode:" + resRedirBodyCode + "---失败数据为:\n" + dataJson);
|
||||
AvroMonitorTimerTask.msgFailedSum++;
|
||||
break;
|
||||
}
|
||||
EntityUtils.consume(entity);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("AsyncPostMsgCallBack Get response from ZX is error===>>>" + e.getMessage() + "<<<===Message is==>" + dataJson + "<==");//细化日志
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
HttpClientUtils.closeQuietly(response);
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求取消后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void cancelled() {
|
||||
logger.error("AsyncPostMagCallBack Request is cancelled");
|
||||
}
|
||||
|
||||
/**
|
||||
* 请求失败后调用该函数
|
||||
*/
|
||||
@Override
|
||||
public void failed(Exception e) {
|
||||
count++;
|
||||
logger.info("AsyncPostMagCallBack Request is Failed,This Failed data is ==>" + dataJson + "<==,Retry count=" + count);
|
||||
if (count > 3) {
|
||||
AvroMonitorTimerTask.msgFailedSum++;
|
||||
logger.error("dataJson:" + dataJson + " send failed finally,error:" + e.toString());
|
||||
} else {
|
||||
HttpClientUtil.asyncProducerAvroToZX(postMsgUrl, topicName, dataJson, userAgent, msgSessionCookie, count);//failed失败时重试
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
package cn.ac.iie.cusflume.sink.HttpAsyncUtils.ybBean;
|
||||
|
||||
public class PutFileInfo {
|
||||
private String hostIp;
|
||||
private String authorization;
|
||||
|
||||
|
||||
public String getHostIp() {
|
||||
return hostIp;
|
||||
}
|
||||
|
||||
public void setHostIp(String hostIp) {
|
||||
this.hostIp = hostIp;
|
||||
}
|
||||
|
||||
public String getAuthorization() {
|
||||
return authorization;
|
||||
}
|
||||
|
||||
public void setAuthorization(String authorization) {
|
||||
this.authorization = authorization;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "PutFileInfo{" +
|
||||
"hostIp='" + hostIp + '\'' +
|
||||
", authorization='" + authorization + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,493 @@
|
||||
package cn.ac.iie.cusflume.sink;
|
||||
|
||||
import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.AvroMonitorTimerTask;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.DataCenterLoad;
|
||||
import cn.ac.iie.cusflume.sink.bean.ac.Res.AcResBody;
|
||||
import cn.ac.iie.cusflume.sink.bean.configBean.ConfigInfo;
|
||||
import cn.ac.iie.cusflume.sink.daoUtils.RealtimeCountConfig;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.base.Throwables;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.flume.*;
|
||||
import org.apache.flume.conf.Configurable;
|
||||
import org.apache.flume.sink.AbstractSink;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
public class YbHttpAvroSinkFile extends AbstractSink implements Configurable {
|
||||
|
||||
private static Logger logger = Logger.getLogger(YbHttpAvroSinkFile.class);
|
||||
|
||||
protected static ExecutorService pool = Executors.newFixedThreadPool(RealtimeCountConfig.HTTP_ASYNC_PARALLELISM);
|
||||
|
||||
private static DataCenterLoad dcl;
|
||||
|
||||
private static String postMsgUrl;//发送消息路径,配置文件获取,发送文件与发送消息皆需要
|
||||
|
||||
private String postFileUrl;//发送文件路径,配置文件获取,仅发送文件时需要---若只发送消息,则此路径与postMsgUrl设置相同即可
|
||||
|
||||
private static int batchSize;//配置文件获取,每次从channel中取出的数据条数
|
||||
|
||||
//http验证
|
||||
private static String checkMsgUrl;//消息认证地址,配置文件获取,发送文件与发送消息皆需要
|
||||
|
||||
private String checkFileUrl;//文件认证地址,配置文件获取,仅发送文件时需要---若只发送消息,则此路径与checkUrl设置相同即可---暂未设置配置文件获取步骤
|
||||
|
||||
private static String userAgent;//业务系统编码systemId,该字段为系统的唯一编码,配置文件获取
|
||||
|
||||
private static String xTag;//标签编码tag,在总线中唯一标识该标签,配置文件获取--20191217笔记--貌似现在已经不需要这个参数作为头部了
|
||||
|
||||
private static String msgSessionCookie;//消息会话标识,由响应返回
|
||||
private static String fileSessionCookie;//文件会话标识,由响应返回,仅发送文件时需要---若只发送消息,则fileSessionCookie会一直为空-即仅广东需要
|
||||
|
||||
private static String monitorSessionCookie;//状态回传会话标识,由响应返回
|
||||
|
||||
private static String monitorMsgXTag = RealtimeCountConfig.MONITOR_MSG_SINK_FILE_XTAG;//20191217更新-因为此参数为总线提供,此处固定,提取到realtime_service_config.properties配置文件中
|
||||
|
||||
private static ConfigInfo configInfo = new ConfigInfo();//用于存放文件与消息的验证,发送以及会话标识
|
||||
|
||||
//http入库
|
||||
private static String topicName;//配置文件获取,表示入库表结构为哪一个topic,后面也将根据此topic获取avro的schema
|
||||
|
||||
private static boolean monitorStart = false;
|
||||
|
||||
private static boolean checkTimerStart = false;//定时获取Cookie启动器
|
||||
|
||||
private static boolean redirectContentsPostStart = false;//定时post重定向数据集合
|
||||
|
||||
/**
|
||||
* 用于存储由于服务器资源不足所造成的未发送数据
|
||||
*/
|
||||
public static List<String> redirectContents;
|
||||
|
||||
/**
|
||||
* 用于存放验证以及连接的url的各组成部分,方便调取
|
||||
*/
|
||||
private static HashMap<String, String> urlToolHm;
|
||||
|
||||
public YbHttpAvroSinkFile() {
|
||||
logger.warn("YbHttpAvroSinkFile start ... ...");
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void start() {
|
||||
super.start();
|
||||
dcl = new DataCenterLoad();
|
||||
|
||||
redirectContents = new ArrayList<>();//初始化
|
||||
/**
|
||||
* 拆解初始化获取的url后缀,用于填充urlToolHm,用于后续动态负载均衡中的url变更
|
||||
*/
|
||||
if (StringUtils.isNotBlank(checkMsgUrl) && StringUtils.isNotBlank(postMsgUrl)) {
|
||||
urlToolHm = new HashMap<>();
|
||||
makeUrlSplitMap(checkMsgUrl, "check");
|
||||
makeUrlSplitMap(postMsgUrl, "post");
|
||||
} else {
|
||||
logger.error("Starting YbHttpAvroSinkFile is error==>checkMsgUrl and postMsgUrl can not be null!!!!");
|
||||
}
|
||||
|
||||
logger.warn("Starting YbHttpAvroSinkFile ... ...");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configure(Context context) {
|
||||
try {
|
||||
postMsgUrl = context.getString("postMsgUrl", "");
|
||||
Preconditions.checkNotNull("".equals(postMsgUrl), "postMsgUrl must be set!!");
|
||||
logger.info("Read Post Message URL from configuration : " + postMsgUrl);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("Endpoint Message URL invalid", e);
|
||||
} catch (Exception e) {
|
||||
logger.error("Get postMsgUrl is error : " + e);
|
||||
}
|
||||
|
||||
try {
|
||||
batchSize = context.getInteger("batchSize", 100);
|
||||
Preconditions.checkNotNull(batchSize > 0, "batchSize must be a positive number!!");
|
||||
logger.info("Read BatchSize from configuration : " + batchSize);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("BatchSize invalid", e);
|
||||
} catch (Exception e) {
|
||||
logger.error("Get batchSize is error : " + e);
|
||||
}
|
||||
|
||||
try {
|
||||
checkMsgUrl = context.getString("checkMsgUrl", "");
|
||||
Preconditions.checkNotNull("".equals(checkMsgUrl), "checkMsgUrl must be set!!");
|
||||
logger.info("Read Check Message Url from configuration : " + checkMsgUrl);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("Check Message URL invalid", e);
|
||||
} catch (Exception e) {
|
||||
logger.error("Get checkMsgUrl is error : " + e);
|
||||
}
|
||||
|
||||
try {
|
||||
userAgent = context.getString("userAgent", "");
|
||||
Preconditions.checkNotNull("".equals(userAgent), "userAgent must be set!!");
|
||||
logger.info("Read UserAgent from configuration : " + userAgent);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("UserAgent invalid", e);
|
||||
} catch (Exception e) {
|
||||
logger.error("Get userAgent is error : " + e);
|
||||
}
|
||||
|
||||
try {
|
||||
xTag = context.getString("xTag", "");
|
||||
Preconditions.checkNotNull("".equals(xTag), "xTag must be set!!");
|
||||
logger.info("Read XTag from configuration : " + xTag);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("XTag invalid", e);
|
||||
} catch (Exception e) {
|
||||
logger.error("Get xTag is error : " + e);
|
||||
}
|
||||
|
||||
try {
|
||||
topicName = context.getString("topicName", "");
|
||||
Preconditions.checkNotNull("".equals(topicName), "topicName must be set!!");
|
||||
logger.info("Read topicName from configuration : " + topicName);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("topicName invalid", e);
|
||||
} catch (Exception e) {
|
||||
logger.error("Get topicName is error : " + e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public synchronized void stop() {
|
||||
super.stop();
|
||||
logger.warn("Stopping YbHttpAvroSinkFile ... ...");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Status process() throws EventDeliveryException {
|
||||
Status result = Status.READY;
|
||||
Channel channel = getChannel();
|
||||
Transaction transaction = null;
|
||||
// AcResBody acCheckResBody = null;
|
||||
// ProResBody producerResBody = null;
|
||||
try {
|
||||
transaction = channel.getTransaction();
|
||||
transaction.begin();
|
||||
Event event = null;
|
||||
String content = null;
|
||||
List<String> contents = new ArrayList<>();
|
||||
for (int i = 0; i < batchSize; i++) {
|
||||
event = channel.take();
|
||||
if (event != null) {
|
||||
content = new String(event.getBody());
|
||||
contents.add(content);
|
||||
} else {
|
||||
result = Status.BACKOFF;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (contents.size() > 0) {
|
||||
switch (topicName) {
|
||||
//作为单条发送-新分类-20191219
|
||||
/**
|
||||
* 非文件消息
|
||||
*/
|
||||
case "NTC-CONN-RECORD-LOG":
|
||||
case "NTC-COLLECT-DNS-LOG":
|
||||
case "NTC-COLLECT-SSL-LOG":
|
||||
/**
|
||||
* 文件消息
|
||||
*/
|
||||
case "NTC-COLLECT-FILE-LOG"://发送独立出来的文件标签
|
||||
case "NTC-COLLECT-HTTP-DOC-LOG":
|
||||
case "NTC-COLLECT-HTTP-AV-LOG"://schema等同于NTC-COLLECT-HTTP-DOC-LOG
|
||||
case "NTC-COLLECT-FTP-DOC-LOG":
|
||||
case "NTC-COLLECT-MAIL-LOG":
|
||||
case "NTC-COLLECT-TELNET-LOG":
|
||||
/**
|
||||
* 状态消息
|
||||
*/
|
||||
case "INFLUX-SAPP-BPS-STAT-LOG"://读取回写的influxDB合计数据用作状态上传
|
||||
sendMsgLog(transaction, contents);//20191209移除文件发送,仅处理消息
|
||||
break;
|
||||
default:
|
||||
logger.error("YbHttpAvroSinkFile can't find this topic:" + topicName + ".Please confirm this topicName is correct!!!");
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
transaction.commit();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
try {
|
||||
if (transaction != null) {
|
||||
transaction.rollback();
|
||||
}
|
||||
} catch (Exception e2) {
|
||||
logger.error("Exception in rollback. Rollback might not have been successful.", e2);
|
||||
}
|
||||
logger.error("Failed to commit transaction.Transaction rolled back.", e);
|
||||
Throwables.propagate(e);
|
||||
} finally {
|
||||
if (transaction != null) {
|
||||
transaction.close();
|
||||
logger.debug("close Transaction");
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取消息SessionCookie
|
||||
*/
|
||||
private static void getMsgSessionCookie() {
|
||||
AcResBody acCheckMsgResBody;
|
||||
int requestType = 10;
|
||||
int statusCode = 0;
|
||||
acCheckMsgResBody = acCheck(checkMsgUrl, userAgent, xTag, requestType);//getMsgSessionCookie()
|
||||
if (acCheckMsgResBody != null) {
|
||||
statusCode = acCheckMsgResBody.getCode();
|
||||
}
|
||||
if (statusCode == 200) {
|
||||
if (StringUtils.isNotBlank(acCheckMsgResBody.getSessionId())) {
|
||||
logger.warn("AC msg successfully,msg sessionId is ===>" + acCheckMsgResBody.getSessionId());
|
||||
msgSessionCookie = acCheckMsgResBody.getSessionId();
|
||||
}
|
||||
} else if (statusCode == 0) {
|
||||
logger.error("This statusCode is 0,so AC msg from ZX acCheckMsgResBody maybe null or not be set");
|
||||
} else {
|
||||
logger.error("AC msg from ZX is error,statusCode is " + statusCode + "(case)=" + acCheckMsgResBody.getCode() + "(getMethod)<===");
|
||||
logger.error("This " + statusCode + " ResponseBody(contain sessionId) is ===>" + acCheckMsgResBody.toString() + "<===");
|
||||
}
|
||||
updateConfigInfo();//getMsgSessionCookie()更新
|
||||
// return producerResBody;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取状态回传SessionCookie
|
||||
*/
|
||||
private static void getMonitorSessionCookie() {
|
||||
AcResBody acCheckMsgMonitorResBody;
|
||||
int requestType = 10;
|
||||
int statusCode = 0;
|
||||
acCheckMsgMonitorResBody = acCheck(checkMsgUrl, userAgent, monitorMsgXTag, requestType);//getMonitorSessionCookie()
|
||||
if (acCheckMsgMonitorResBody != null) {
|
||||
statusCode = acCheckMsgMonitorResBody.getCode();
|
||||
}
|
||||
if (statusCode == 200) {
|
||||
if (StringUtils.isNotBlank(acCheckMsgMonitorResBody.getSessionId())) {
|
||||
logger.warn("getMonitorSessionCookie-Thread.currentThread().getName()===>" + Thread.currentThread().getName());
|
||||
logger.warn("AC msgMonitor successfully,msgMonitor sessionId is ===>" + acCheckMsgMonitorResBody.getSessionId());
|
||||
monitorSessionCookie = acCheckMsgMonitorResBody.getSessionId();
|
||||
}
|
||||
} else if (statusCode == 0) {
|
||||
logger.error("This statusCode is 0,so AC msgMonitor from ZX acCheckMsgMonitorResBody maybe null or not be set");
|
||||
} else {
|
||||
logger.error("AC msgMonitor from ZX is error,statusCode is " + statusCode + "(case)=" + acCheckMsgMonitorResBody.getCode() + "(getMethod)<===");
|
||||
logger.error("This " + statusCode + " ResponseBody(contain sessionId) is ===>" + acCheckMsgMonitorResBody.toString() + "<===");
|
||||
}
|
||||
updateConfigInfo();//getMonitorSessionCookie()更新
|
||||
}
|
||||
|
||||
private static ConfigInfo updateConfigInfo() {
|
||||
configInfo.setCheckMsgUrl(checkMsgUrl);
|
||||
configInfo.setPostMsgUrl(postMsgUrl);
|
||||
configInfo.setMsgSessionCookie(msgSessionCookie);
|
||||
|
||||
configInfo.setMonitorSessionCookie(monitorSessionCookie);//缓存monitorSessionCookie
|
||||
|
||||
configInfo.setTopicName(topicName);
|
||||
configInfo.setUserAgent(userAgent);
|
||||
configInfo.setxTag(xTag);
|
||||
configInfo.setBatchSize(batchSize);
|
||||
|
||||
return configInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* 往zx发送文件数据的消息,即发送文件的message数据(结构化数据)
|
||||
* 本来是作为文件消息发送,现该方法主要用于单条发送数据-20191224
|
||||
*
|
||||
* @param transaction
|
||||
* @param contents
|
||||
*/
|
||||
private void sendMsgLog(Transaction transaction, List<String> contents) {
|
||||
try {
|
||||
//获取状态回传sessionID
|
||||
if (StringUtils.isBlank(monitorSessionCookie)) {
|
||||
getMonitorSessionCookie();//sendMsgLog-首次获取monitorSessionCookie
|
||||
|
||||
if (!checkTimerStart) {
|
||||
checkCookieEveryWeek();//sendMsgLog-第一次启动检测到monitorSessionCookie为空时启动任务但不进行验证,后续间隔一段时间后开始验证,每次申请monitorSessionCookie和msgSessionCookie两个Cookie
|
||||
checkTimerStart = true;
|
||||
logger.warn("CheckMsgAndFileCookie Timer is started......");
|
||||
}
|
||||
|
||||
if (!monitorStart) {//消息定时上报
|
||||
AvroMonitorTimerTask.monitorMsg(monitorSessionCookie, postMsgUrl, "monitor-msg", 1, userAgent, topicName);//sendMsgLog-日志消息
|
||||
monitorStart = true;
|
||||
logger.warn("MonitorMsg Timer is started......");
|
||||
}
|
||||
}
|
||||
|
||||
AvroMonitorTimerTask.msgTotalSum += contents.size();//消息使用,文件+消息不使用
|
||||
//检查认证是否存在
|
||||
if (StringUtils.isBlank(msgSessionCookie)) {
|
||||
getMsgSessionCookie();//sendMsgLog-msgSessionCookie为空,首次接入验证
|
||||
|
||||
/**
|
||||
* 开启定时扫描重定向数据集合
|
||||
*/
|
||||
if (!redirectContentsPostStart) {
|
||||
postRedirectDataEveryMin();
|
||||
redirectContentsPostStart = true;
|
||||
logger.warn("RedirectContents Timer Post is started......");
|
||||
}
|
||||
|
||||
AvroMonitorTimerTask.msgReadyPostSum += contents.size();
|
||||
for (String content : contents) {
|
||||
HttpClientUtil.asyncProducerAvroToZX(postMsgUrl, topicName, content, userAgent, msgSessionCookie, 0);//初始发送count计数为0
|
||||
}
|
||||
} else {//sessionCookie不为空
|
||||
logger.info("AC msg sessionId already exists,msg sessionId is ===>" + msgSessionCookie);
|
||||
AvroMonitorTimerTask.msgReadyPostSum += contents.size();
|
||||
for (String content : contents) {
|
||||
pool.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
HttpClientUtil.asyncProducerAvroToZX(postMsgUrl, topicName, content, userAgent, msgSessionCookie, 0);//初始发送count计数为0
|
||||
} catch (Exception e) {
|
||||
logger.error("sendMsgLog multi-thread is error==>" + e + "<==,Thread is==>" + Thread.currentThread().getName() + "<==.");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("YbHttpAvroSinkFile sendFileMsg is error===>" + e + "<===");
|
||||
transaction.commit();
|
||||
} finally {
|
||||
if (transaction != null) {
|
||||
transaction.commit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 总线入库前验证
|
||||
*
|
||||
* @param checkMsgUrl
|
||||
* @param userAgent
|
||||
* @param xTag
|
||||
* @param requestType
|
||||
* @return
|
||||
*/
|
||||
private static AcResBody acCheck(String checkMsgUrl, String userAgent, String xTag, int requestType) {
|
||||
return dcl.zxAcCheck(checkMsgUrl, userAgent, xTag, requestType);
|
||||
}
|
||||
|
||||
/**
|
||||
* 验证定时器,每隔一段时间验证一次-验证获取新的Cookie
|
||||
*/
|
||||
private void checkCookieEveryWeek() {
|
||||
Timer timer = new Timer();
|
||||
timer.scheduleAtFixedRate(new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
getMsgSessionCookie();//定时进行消息Cookie验证更新-7天
|
||||
getMonitorSessionCookie();//定时进行状态上报Cookie验证更新-7天
|
||||
} catch (Exception e) {
|
||||
logger.error("CheckCookieEveryWeek to zx everyWeek is error===>>>" + e + "<===");
|
||||
}
|
||||
}
|
||||
}, 1000 * 60 * 60 * 24 * 7, 1000 * 60 * 60 * 24 * 7);//每隔7天执行一次
|
||||
}
|
||||
|
||||
/**
|
||||
* 重定向数据集上传定时器,每隔一段时间扫描并上传一次-用于上传因为服务器资源紧张暂未上传的数据
|
||||
*/
|
||||
private void postRedirectDataEveryMin() {
|
||||
Timer timer = new Timer();
|
||||
timer.scheduleAtFixedRate(new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
if (redirectContents.size() > 0) {
|
||||
List<String> tmpListFreq = new ArrayList<>(redirectContents);
|
||||
redirectContents.clear();
|
||||
AvroMonitorTimerTask.msgReadyPostSum += tmpListFreq.size();
|
||||
for (String content : tmpListFreq) {
|
||||
HttpClientUtil.asyncProducerAvroToZX(postMsgUrl, topicName, content, userAgent, msgSessionCookie, 0);//postRedirectDataEveryMin定时器-初始发送count计数为0
|
||||
}
|
||||
logger.info("PostRedirectDataEveryMin post to zx RedirectData size==>" + tmpListFreq.size() + "<==.");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("PostRedirectDataEveryMin to zx everyMin is error===>>>" + e + "<===");
|
||||
}
|
||||
}
|
||||
}, 1000 * 60, 1000 * 60);//每隔1分钟执行一次
|
||||
}
|
||||
|
||||
/**
|
||||
* 动态负载均衡变更cookie-20200818
|
||||
*
|
||||
* @param redirectUrlPort
|
||||
*/
|
||||
public static void changeUrl(String redirectUrlPort) {
|
||||
|
||||
/**
|
||||
* 变更postMsgUrl与checkMsgUrl
|
||||
*/
|
||||
postMsgUrl = redirectUrlPort + urlToolHm.get("post_suf_path");
|
||||
checkMsgUrl = redirectUrlPort + urlToolHm.get("check_suf_path");
|
||||
|
||||
/**
|
||||
* 变更url后需要重新获取cookie
|
||||
*/
|
||||
updateCookie();
|
||||
|
||||
logger.info("YbHttpAvroSinkFile->changeUrl->change postMsgUrl:" + postMsgUrl + ",change checkMsgUrl:" + checkMsgUrl);
|
||||
}
|
||||
|
||||
/**
|
||||
* 动态负载均衡更新cookie
|
||||
*/
|
||||
public static void updateCookie() {
|
||||
getMonitorSessionCookie();//动态负载均衡修改url,重新获取cookie
|
||||
getMsgSessionCookie();//动态负载均衡修改url,重新获取cookie
|
||||
|
||||
logger.info("YbHttpAvroSinkFile->updateCookie update cookie,postMsgUrl:" + postMsgUrl
|
||||
+ ",checkMsgUrl:" + checkMsgUrl
|
||||
+ ",获取monitorSessionCookie:" + monitorSessionCookie
|
||||
+ ",获取msgSessionCookie:" + msgSessionCookie);
|
||||
}
|
||||
|
||||
/**
|
||||
* 用于切分url,便于后续动态负载均衡的url变更
|
||||
*
|
||||
* @param oldUrlPath
|
||||
* @param urlType
|
||||
*/
|
||||
private static void makeUrlSplitMap(String oldUrlPath, String urlType) {
|
||||
String[] split = oldUrlPath.replace("http://", "").replace("https://", "").split("/", 2);
|
||||
|
||||
if (split.length == 2) {
|
||||
/*
|
||||
验证地址:{"check_pre_https":"https://","check_pre_http":"http://","check_old_url_port":"192.168.126.4:22451","check_suf_path":"/v1/system/connect"}
|
||||
生产地址:{"post_suf_path":"/v1/data/sendData","post_pre_http":"http://","post_old_url_port":"192.168.126.4:22451","post_pre_https":"https://"}
|
||||
一般会存放到一起:{"post_suf_path":"/v1/data/sendData","check_pre_https":"https://","post_pre_http":"http://","check_pre_http":"http://","check_old_url_port":"192.168.126.4:22451","post_old_url_port":"192.168.126.4:22451","check_suf_path":"/v1/system/connect","post_pre_https":"https://"}
|
||||
*/
|
||||
urlToolHm.put(urlType + "_pre_http", "http://");
|
||||
urlToolHm.put(urlType + "_pre_https", "https://");
|
||||
urlToolHm.put(urlType + "_old_url_port", split[0]);
|
||||
urlToolHm.put(urlType + "_suf_path", "/" + split[1]);
|
||||
} else {
|
||||
logger.error("makeUrlSplitMap-->String[] split size is not correct(split.length must be 2).");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,349 @@
|
||||
package cn.ac.iie.cusflume.sink.avroUtils;
|
||||
|
||||
|
||||
import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.avroMonitorBean.*;
|
||||
import cn.ac.iie.cusflume.sink.bean.producer.Res.ProResBody;
|
||||
import cn.ac.iie.cusflume.sink.daoUtils.InfluxdbUtils;
|
||||
import cn.ac.iie.cusflume.sink.daoUtils.RealtimeCountConfig;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class AvroMonitorTimerTask {
|
||||
|
||||
private static Logger logger = Logger.getLogger(AvroMonitorTimerTask.class);
|
||||
|
||||
//消息计数
|
||||
public static long msgSuccessSum = 0;
|
||||
public static long msgFailedSum = 0;
|
||||
public static long msgTotalSum = 0;
|
||||
public static long msgReadyPostSum = 0;
|
||||
|
||||
//文件计数
|
||||
public static long fileSuccessSum = 0;
|
||||
public static long fileFailedSum = 0;
|
||||
public static long fileTotalSum = 0;
|
||||
public static long fileReadyPostSum = 0;
|
||||
|
||||
//文件字节数计数
|
||||
public static long fileBytesSuccessSum = 0;
|
||||
public static long fileBytesFailedSum = 0;
|
||||
public static long fileBytesTotalSum = 0;
|
||||
public static long fileBytesReadyPostSum = 0;
|
||||
|
||||
public static boolean startFileMonitor = false;//默认false
|
||||
|
||||
/**
|
||||
* 消息---Java自定义定时器
|
||||
*/
|
||||
public static void monitorMsg(String monitorSessionCookie, String postMonitorUrl, String monitorCategory, int batchSize, String userAgent, String topicType) {//20200428新增
|
||||
Timer timer = new Timer();
|
||||
timer.scheduleAtFixedRate(new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
AvroMonitorTimerTask.msgTotalSum++;
|
||||
AvroMonitorTimerTask.msgReadyPostSum++;
|
||||
|
||||
try {
|
||||
if ("yb".equals(RealtimeCountConfig.MONITOR_TYPE)) {//只有当类型为一部(yb)时才进行状态上报
|
||||
String sendMsg = getJson(RealtimeCountConfig.MONITOR_SYSTEM_COMPONENT_CODE, RealtimeCountConfig.MONITOR_SYSTEM_COMPONENT_CODE_FLUME, topicType);//新版-20200428
|
||||
logger.info("Send monitor message is===>>>" + sendMsg + "<<<===");
|
||||
HttpClientUtil.asyncProducerAvroToZX(postMonitorUrl, monitorCategory, sendMsg, userAgent, monitorSessionCookie, 0);//静态方法无返回值用于多线程,初始发送count计数为0
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("Send monitorMsg to zx is error===>>>" + e + "<===");
|
||||
}
|
||||
/**
|
||||
* 20200427移除一部"YB monitorMsg"的info日志,加入"last min"日志,方便脚本在warn状态下采集
|
||||
* 此时RealtimeCountConfig.MONITOR_TYPE填写yb,则消息上报与warn统计信息都有
|
||||
* 若填写gd,则只有warn统计信息,不会有状态上报
|
||||
*/
|
||||
// if ("gd".equals(RealtimeCountConfig.MONITOR_TYPE)) {//只有当类型为广东(gd)时才进行warn类型日志计数打印,此时需要搭配外部脚本计数
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmm");
|
||||
String timeFormat = sdf.format(new Date(System.currentTimeMillis()));
|
||||
long lastMinTime = Long.parseLong(timeFormat) - 1;
|
||||
//发送消息统计情况-warn类型便于脚本收集信息打印
|
||||
logger.warn("last min " + lastMinTime + " monitorMsg count==>msgSuccessSum:{ " + msgSuccessSum + " },==>msgFailedSum:{ " + msgFailedSum + " },==>msgReadyPostSum:{ " + msgReadyPostSum + " },==>msgTotalSum:{ " + msgTotalSum + " }.");
|
||||
// }
|
||||
//重置为0
|
||||
msgSuccessSum = 0;
|
||||
msgFailedSum = 0;
|
||||
msgTotalSum = 0;
|
||||
msgReadyPostSum = 0;
|
||||
}
|
||||
}, 60000, 60000);
|
||||
}
|
||||
|
||||
/**
|
||||
* 消息和文件-单线程版
|
||||
*/
|
||||
public static void monitorMsgAndFile() {
|
||||
Timer timer = new Timer();
|
||||
// InfluxdbUtils influxdbUtils = new InfluxdbUtils();
|
||||
timer.scheduleAtFixedRate(new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmm");
|
||||
String timeFormat = sdf.format(new Date(System.currentTimeMillis()));
|
||||
long lastMinTime = Long.parseLong(timeFormat) - 1;
|
||||
|
||||
//发送消息及文件的统计情况-仅计算将要推送与成功推送的文件个数与字节数
|
||||
logger.warn("last min " + lastMinTime + " monitorMsgAndFile count==>msgSuccessSum:{ " + msgSuccessSum + " },==>msgFailedSum:{ " + msgFailedSum + " },==>msgReadyPostSum:{ " + msgReadyPostSum + " }," +
|
||||
"==>fileSuccessSum:{ " + fileSuccessSum + " },==>fileReadyPostSum:{ " + fileReadyPostSum + " }," +
|
||||
"==>fileBytesSuccessSum:{ " + fileBytesSuccessSum + " },==>fileBytesReadyPostSum:{ " + fileBytesReadyPostSum + " }.");
|
||||
|
||||
//重置为0
|
||||
msgSuccessSum = 0;//根据响应计算-索引14
|
||||
msgFailedSum = 0;//根据响应计算-索引16
|
||||
msgReadyPostSum = 0;//直接在推送处++,多个文件对应一个消息-索引18
|
||||
|
||||
fileSuccessSum = 0;//同字节数-索引20
|
||||
fileReadyPostSum = 0;//同字节数-索引22
|
||||
|
||||
fileBytesSuccessSum = 0;//根据ID个数计算(id是推送成功后返回的)-索引24
|
||||
fileBytesReadyPostSum = 0;//根据流的个数计算-索引26
|
||||
}
|
||||
// }, 60000, 300000);//正式
|
||||
}, 60000, 60000);//测试
|
||||
}
|
||||
|
||||
private static String getJson(String systemComponentCode, String systemComponentCodeFlume, String topicType) {
|
||||
SystemStatus systemStatus = new SystemStatus();
|
||||
/**
|
||||
* 第一部分-设置时间-time
|
||||
*/
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
|
||||
String timeFormat = sdf.format(new Date(System.currentTimeMillis()));//格式2019-11-18 19:56:37
|
||||
systemStatus.setTime(timeFormat);
|
||||
|
||||
/**
|
||||
* 第二部分-设置系统状态-system
|
||||
*/
|
||||
RealTimeStatus realTimeStatus = new RealTimeStatus();
|
||||
realTimeStatus.setId(systemComponentCode);
|
||||
realTimeStatus.setStatus("在线");
|
||||
|
||||
//无问题时不设置状态
|
||||
if (msgFailedSum >= 100000L) {
|
||||
List<RtAlarm> rtAlarmsList = new LinkedList<RtAlarm>();
|
||||
RtAlarm rtAlarm = new RtAlarm();
|
||||
rtAlarm.setLevel("低");
|
||||
rtAlarm.setTime(timeFormat);
|
||||
rtAlarm.setType("运行出现失败数据");
|
||||
rtAlarm.setDetail("系统正常运行但是出现了失败数据");
|
||||
rtAlarmsList.add(rtAlarm);
|
||||
realTimeStatus.setAlarms(rtAlarmsList);
|
||||
} else {
|
||||
List<RtAlarm> rtAlarmsList = new LinkedList<RtAlarm>();
|
||||
realTimeStatus.setAlarms(rtAlarmsList);
|
||||
}
|
||||
|
||||
LinkedList<RtStatValue> rtStatValuesList = new LinkedList<>();
|
||||
RtStatValue rtStatValue = new RtStatValue();
|
||||
rtStatValue.setName("当前批次数据量");
|
||||
|
||||
LinkedList<RtNameValue> rtNameValuesList = new LinkedList<>();
|
||||
//总数
|
||||
RtNameValue totalCount = new RtNameValue();
|
||||
totalCount.setName("总数据量");
|
||||
totalCount.setValue(msgTotalSum + "");
|
||||
//成功
|
||||
RtNameValue successCount = new RtNameValue();
|
||||
successCount.setName("成功");
|
||||
successCount.setValue(msgSuccessSum + "");
|
||||
//准备发送
|
||||
RtNameValue readyPostCount = new RtNameValue();
|
||||
readyPostCount.setName("准备导入");
|
||||
readyPostCount.setValue(msgReadyPostSum + "");
|
||||
//失败
|
||||
RtNameValue failCount = new RtNameValue();
|
||||
failCount.setName("失败");
|
||||
failCount.setValue(msgFailedSum + "");
|
||||
|
||||
rtNameValuesList.add(totalCount);
|
||||
rtNameValuesList.add(successCount);
|
||||
rtNameValuesList.add(readyPostCount);
|
||||
rtNameValuesList.add(failCount);
|
||||
rtStatValue.setContent(rtNameValuesList);
|
||||
|
||||
rtStatValuesList.add(rtStatValue);
|
||||
realTimeStatus.setStats(rtStatValuesList);
|
||||
systemStatus.setSystem(realTimeStatus);
|
||||
|
||||
|
||||
/**
|
||||
* 第三部分-设置组件状态-components
|
||||
*/
|
||||
LinkedList<RealTimeStatus> realTimeStatusesList = new LinkedList<>();
|
||||
// RealTimeStatus realTimeStatus1 = new RealTimeStatus();
|
||||
// realTimeStatus1.setId(xTag);
|
||||
// realTimeStatus1.setStatus("专用设备");
|
||||
//
|
||||
// LinkedList<RtAlarm> rtAlarmsList2 = new LinkedList<>();
|
||||
// RtAlarm rtAlarm2 = new RtAlarm();
|
||||
// rtAlarm2.setLevel("Low");
|
||||
// rtAlarm2.setTime(timeFormat);
|
||||
// rtAlarm2.setType("Working");
|
||||
// rtAlarm2.setDetail("Component is Working");
|
||||
// rtAlarmsList2.add(rtAlarm2);
|
||||
// realTimeStatus1.setAlarms(rtAlarmsList2);
|
||||
//
|
||||
// LinkedList<RtStatValue> rtStatValuesList2 = new LinkedList<>();
|
||||
// RtStatValue rtStatValue2 = new RtStatValue();
|
||||
// rtStatValue2.setName("Last 5 MinS Message Data Counts");
|
||||
//
|
||||
// LinkedList<RtNameValue> rtNameValuesList2 = new LinkedList<>();
|
||||
// //总数
|
||||
// RtNameValue totalComponentCount = new RtNameValue();
|
||||
// totalComponentCount.setName("TotalComponentMsg");
|
||||
// totalComponentCount.setValue(msgTotalSum + "");
|
||||
// //成功
|
||||
// RtNameValue successComponentCount = new RtNameValue();
|
||||
// successComponentCount.setName("SuccessfulComponentMsg");
|
||||
// successComponentCount.setValue(msgSuccessSum + "");
|
||||
// //准备发送
|
||||
// RtNameValue readyPostComponentCount = new RtNameValue();
|
||||
// readyPostComponentCount.setName("ReadyPostComponentMsg");
|
||||
// readyPostComponentCount.setValue(msgReadyPostSum + "");
|
||||
// //失败
|
||||
// RtNameValue failComponentCount = new RtNameValue();
|
||||
// failComponentCount.setName("FailedComponentMsg");
|
||||
// failComponentCount.setValue(msgFailedSum + "");
|
||||
//
|
||||
// rtNameValuesList2.add(totalComponentCount);
|
||||
// rtNameValuesList2.add(successComponentCount);
|
||||
// rtNameValuesList2.add(readyPostComponentCount);
|
||||
// rtNameValuesList2.add(failComponentCount);
|
||||
//
|
||||
// rtStatValue2.setContent(rtNameValuesList2);
|
||||
//
|
||||
// rtStatValuesList2.add(rtStatValue2);
|
||||
// realTimeStatus1.setStats(rtStatValuesList2);
|
||||
// realTimeStatusesList.add(realTimeStatus1);
|
||||
|
||||
/**
|
||||
* 20200428新增--↓--增加Flume状态
|
||||
*/
|
||||
|
||||
RealTimeStatus realTimeStatus1 = new RealTimeStatus();
|
||||
realTimeStatus1.setId(systemComponentCodeFlume);
|
||||
realTimeStatus1.setStatus("正常");
|
||||
|
||||
//无问题时不设置状态
|
||||
if (msgFailedSum >= 100000L) {
|
||||
LinkedList<RtAlarm> rtAlarmsList2 = new LinkedList<>();
|
||||
RtAlarm rtAlarm2 = new RtAlarm();
|
||||
rtAlarm2.setLevel("低");
|
||||
rtAlarm2.setTime(timeFormat);
|
||||
rtAlarm2.setType("设备处理数据出现异常");
|
||||
rtAlarm2.setDetail("设备正常运行但出现了失败数据,数据种类:" + topicType);
|
||||
rtAlarmsList2.add(rtAlarm2);
|
||||
realTimeStatus1.setAlarms(rtAlarmsList2);
|
||||
} else {
|
||||
LinkedList<RtAlarm> rtAlarmsList2 = new LinkedList<>();
|
||||
realTimeStatus1.setAlarms(rtAlarmsList2);
|
||||
}
|
||||
|
||||
|
||||
LinkedList<RtStatValue> rtStatValuesList2 = new LinkedList<>();
|
||||
RtStatValue rtStatValue2 = new RtStatValue();
|
||||
rtStatValue2.setName("设备处理信息");
|
||||
|
||||
LinkedList<RtNameValue> rtNameValuesList2 = new LinkedList<>();
|
||||
//数据种类-其实就是topic种类
|
||||
RtNameValue componentType = new RtNameValue();
|
||||
componentType.setName("数据种类");
|
||||
componentType.setValue(topicType);
|
||||
// //成功
|
||||
// RtNameValue successComponentCount = new RtNameValue();
|
||||
// successComponentCount.setName("SuccessfulComponentMsg");
|
||||
// successComponentCount.setValue(msgSuccessSum + "");
|
||||
// //准备发送
|
||||
// RtNameValue readyPostComponentCount = new RtNameValue();
|
||||
// readyPostComponentCount.setName("ReadyPostComponentMsg");
|
||||
// readyPostComponentCount.setValue(msgReadyPostSum + "");
|
||||
// //失败
|
||||
// RtNameValue failComponentCount = new RtNameValue();
|
||||
// failComponentCount.setName("FailedComponentMsg");
|
||||
// failComponentCount.setValue(msgFailedSum + "");
|
||||
|
||||
rtNameValuesList2.add(componentType);
|
||||
// rtNameValuesList2.add(successComponentCount);
|
||||
// rtNameValuesList2.add(readyPostComponentCount);
|
||||
// rtNameValuesList2.add(failComponentCount);
|
||||
|
||||
rtStatValue2.setContent(rtNameValuesList2);
|
||||
|
||||
rtStatValuesList2.add(rtStatValue2);
|
||||
realTimeStatus1.setStats(rtStatValuesList2);
|
||||
realTimeStatusesList.add(realTimeStatus1);
|
||||
|
||||
/**
|
||||
* 20200428新增--↑--增加Flume状态
|
||||
*/
|
||||
systemStatus.setComponents(realTimeStatusesList);//中间全部注释表示不设置Component,未注释时(有代码时)表示设置Component
|
||||
|
||||
// return JSONObject.toJSONString(systemStatus);
|
||||
return systemStatus.toString();
|
||||
}
|
||||
|
||||
public static long getFileBytesFromInputStream(InputStream fileIS) {
|
||||
try {
|
||||
byte[] bytes = IOUtils.toByteArray(fileIS);
|
||||
return bytes.length;
|
||||
} catch (Exception e) {
|
||||
logger.error("GetFileBytes is error,return 0,error is===>" + e);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
public static ByteArrayOutputStream getByteArrayOutputStream(InputStream input) throws IOException {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
byte[] buffer = new byte[1024];
|
||||
int len;
|
||||
while ((len = input.read(buffer)) > -1) {
|
||||
baos.write(buffer, 0, len);
|
||||
}
|
||||
baos.flush();
|
||||
baos.close();
|
||||
return baos;
|
||||
}
|
||||
|
||||
|
||||
public static LinkedList<ByteArrayOutputStream> getByteArrayOutputStream(LinkedList<InputStream> attachmentsResultIsList) throws IOException {
|
||||
LinkedList<ByteArrayOutputStream> byteArrayOutputStreamsList = new LinkedList<>();
|
||||
for (int i = 0; i < attachmentsResultIsList.size(); i++) {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
byte[] buffer = new byte[1024];
|
||||
int len;
|
||||
while ((len = attachmentsResultIsList.get(i).read(buffer)) > -1) {
|
||||
baos.write(buffer, 0, len);
|
||||
}
|
||||
baos.flush();
|
||||
baos.close();
|
||||
byteArrayOutputStreamsList.add(baos);
|
||||
}
|
||||
|
||||
return byteArrayOutputStreamsList;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
msgSuccessSum = 2;
|
||||
msgFailedSum = 1;
|
||||
msgTotalSum = 3;
|
||||
msgReadyPostSum = 3;
|
||||
// String sendMsg = AvroMonitorTimerTask.getJson("userAgent", "xTag");
|
||||
String sendMsg = AvroMonitorTimerTask.getJson(RealtimeCountConfig.MONITOR_SYSTEM_COMPONENT_CODE, RealtimeCountConfig.MONITOR_SYSTEM_COMPONENT_CODE_FLUME, "NTC-CONN-RECORD-LOG");
|
||||
System.out.println(sendMsg);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,251 @@
|
||||
package cn.ac.iie.cusflume.sink.avroUtils;
|
||||
|
||||
import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.avroSchema.GetAvroSchemaByTopicUtils;
|
||||
import cn.ac.iie.cusflume.sink.bean.ac.Res.AcResBody;
|
||||
import cn.ac.iie.cusflume.sink.bean.producer.Res.ProResBody;
|
||||
import org.apache.avro.Schema;
|
||||
import org.apache.avro.file.DataFileWriter;
|
||||
import org.apache.avro.generic.GenericData;
|
||||
import org.apache.avro.generic.GenericDatumWriter;
|
||||
import org.apache.avro.generic.GenericRecord;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.entity.ByteArrayEntity;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.InputStream;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
//import org.apache.log4j.Logger;
|
||||
|
||||
|
||||
public class DataCenterLoad {
|
||||
//private static final Logger logger = Logger.getLogger(DataCenterLoad.class);
|
||||
private static final String url = HttpManager.getInfoLoadInstance().getAddress();
|
||||
private static Logger logger = Logger.getLogger(DataCenterLoad.class);
|
||||
|
||||
public DataCenterLoad() {
|
||||
}
|
||||
|
||||
//总线入库认证
|
||||
public AcResBody zxAcCheck(String checkMsgUrl, String userAgent, String xTag, int requestType) {
|
||||
AcResBody acResBody = null;
|
||||
try {
|
||||
acResBody = HttpManager.getInfoLoadInstance().checkAcByZx(checkMsgUrl, userAgent, xTag, requestType);
|
||||
} catch (Exception e) {
|
||||
logger.error("DataCenterLoad zxAcCheck is error,so this return acResBody maybe null,error is===>" + e + "<===");
|
||||
e.printStackTrace();
|
||||
}
|
||||
return acResBody;
|
||||
}
|
||||
|
||||
/**
|
||||
* flume zx验证之测试Avro数据导入---用于消息传输(批)
|
||||
*
|
||||
* @param urlProducer
|
||||
* @param topicName
|
||||
* @param jsonDataList
|
||||
* @param batchSize
|
||||
* @param userAgent
|
||||
* @param msgSessionCookie
|
||||
* @return
|
||||
*/
|
||||
public ProResBody avroDataLoad(String urlProducer, String topicName, List<String> jsonDataList, int batchSize, String userAgent, String msgSessionCookie) {
|
||||
String time5 = generateTimeWithInterval();
|
||||
List<String> avroBatchList = new ArrayList<String>();
|
||||
//根据topic获取对应schema
|
||||
String topicAvroSchema = GetAvroSchemaByTopicUtils.getAvroSchemaByTopicName(topicName);//批次
|
||||
int nums = 0;
|
||||
ProResBody proResBody = null;
|
||||
try {
|
||||
for (String jsonData : jsonDataList) {
|
||||
avroBatchList.add(jsonData);
|
||||
nums++;
|
||||
// if (nums >= batchSize) {
|
||||
// //logger.info("start to post data to zx---------> " + jsonDataList);
|
||||
// HttpManager.getInfoLoadInstance().producerAvroToZX(urlProducer, topicName, topicAvroSchema, avroBatchList, userAgent, msgSessionCookie);
|
||||
// avroBatchList.clear();
|
||||
// nums = 0;
|
||||
// }
|
||||
}
|
||||
if (nums != 0) {
|
||||
proResBody = HttpManager.getInfoLoadInstance().producerAvroToZX(urlProducer, topicName, topicAvroSchema, avroBatchList, userAgent, msgSessionCookie);
|
||||
avroBatchList.clear();
|
||||
nums = 0;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return proResBody;
|
||||
}
|
||||
|
||||
/**
|
||||
* flume zx验证之测试Avro数据导入---用于文件补全后传输(单条)
|
||||
* 也用于一部状态回传(定时单条)
|
||||
* 单条时传入的topicAvroSchema为""
|
||||
* 此为带返回值用于同步请求的版本
|
||||
*
|
||||
* @param urlProducer
|
||||
* @param topicName
|
||||
* @param jsonData
|
||||
* @param batchSize
|
||||
* @param userAgent
|
||||
* @param msgSessionCookie
|
||||
* @return
|
||||
*/
|
||||
public ProResBody avroDataLoad(String urlProducer, String topicName, String jsonData, int batchSize, String userAgent, String msgSessionCookie) {//原本的方法用于同步单线程
|
||||
//根据topic获取对应schema
|
||||
// String topicAvroSchema = GetAvroSchemaByTopicUtils.getAvroSchemaByTopicName(topicName);//单条
|
||||
String topicAvroSchema = "";//单条-20191224修改,不再从这里获取,HttpManager中统一获取
|
||||
ProResBody proResBody = null;
|
||||
try {
|
||||
proResBody = HttpManager.getInfoLoadInstance().producerAvroToZX(urlProducer, topicName, topicAvroSchema, jsonData, userAgent, msgSessionCookie);//原本的方法有返回值用于同步单线程
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return proResBody;
|
||||
}
|
||||
|
||||
//flume zx验证之测试json数据导入
|
||||
public void jsonDataLoad(String urlProducer, List<String> jsonDataList, int batchSize, String userAgent, String sessionCookie) {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
String time5 = generateTimeWithInterval();
|
||||
int nums = 0;
|
||||
for (String jsonData : jsonDataList) {
|
||||
try {
|
||||
//换另一种行分隔符与字段分隔符
|
||||
String aItem = jsonData + "\t" + time5;
|
||||
sb.append(aItem + "\n");
|
||||
nums++;
|
||||
if (nums >= batchSize) {
|
||||
String data = sb.substring(0, sb.length() - 1);
|
||||
//输出的topic
|
||||
logger.info("start to post data to zx---------> " + data);
|
||||
// HttpManager.getInfoLoadInstance().postToDataCenter(url, "DF_PZ_FLOW_REPORT", data);//原本的方法
|
||||
HttpManager.getInfoLoadInstance().producerJsonToZX(urlProducer, "SESSION-TEST-COMPLETED-LOG", data, userAgent, sessionCookie);
|
||||
// CSVAlarm.getInfoLoadInstance().csvDataLoad(url, "DF_PZ_FLOW_REPORT", data);//自己根据获得接口又使用的方法
|
||||
sb.setLength(0);
|
||||
nums = 0;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (nums != 0) {
|
||||
String data = sb.substring(0, sb.length() - 1);
|
||||
// HttpManager.getInfoLoadInstance().postToDataCenter(url, "DF_PZ_FLOW_REPORT", data);//这里topic位置需要根据实际情况修改
|
||||
HttpManager.getInfoLoadInstance().producerJsonToZX(urlProducer, "SESSION-TEST-COMPLETED-LOG", data, userAgent, sessionCookie);
|
||||
// CSVAlarm.getInfoLoadInstance().csvDataLoad(url, "DF_PZ_FLOW_REPORT", data);//自己根据获得接口又使用的方法
|
||||
sb.setLength(0);
|
||||
nums = 0;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
//新写的数据导入方法20190109:
|
||||
public void dfPzFlowBatchStorage(Map<String, Long> pzMap) {
|
||||
//String sql = " insert into DF_PZ_REPORT(STAT_ID, ACTIVE_SYS, CFG_ID, SERVICE, SUM, REPORT_TIME) " +
|
||||
// " VALUES(SEQ_DF_PZ_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
|
||||
StringBuffer sb = new StringBuffer();
|
||||
String time5 = generateTimeWithInterval();
|
||||
int nums = 0;
|
||||
for (String key : pzMap.keySet()) {
|
||||
try {
|
||||
String[] options = key.split("~=~");
|
||||
if (options[0] != null && options[0] != "" && options[1] != null && options[1] != "" && options[2] != null && options[2] != "" && options[3] != null && options[3] != "" && options[4] != null && options[4] != "" && options[5] != null && options[5] != "") {
|
||||
//若包含log名称,则因为log名称在索引0处,所以这里从1开始
|
||||
// String aItem = options[1] + "\t" + options[2] + "\t" + options[3] + "\t" + options[4] + "\t" + options[5] + "\t" + options[6] + "\t" + pzMap.get(key) + "\t" + time5;
|
||||
//若不包含log名称,则直接从0开始
|
||||
// String aItem = options[0] + "\t" + options[1] + "\t" + options[2] + "\t" + options[3] + "\t" + options[4] + "\t" + options[5] + "\t" + pzMap.get(key) + "\t" + time5;
|
||||
// sb.append(aItem + "\n");
|
||||
//换另一种行分隔符与字段分隔符
|
||||
String aItem = options[0] + "," + options[1] + "," + options[2] + "," + options[3] + "," + options[4] + "," + options[5] + "," + pzMap.get(key) + "," + time5;
|
||||
sb.append(aItem + "\n");
|
||||
nums++;
|
||||
if (nums >= 1000) {
|
||||
String data = sb.substring(0, sb.length() - 1);
|
||||
//输出的topic
|
||||
logger.info("start to post data to dc---------> " + data);
|
||||
System.out.println("start to post data to dc---------> " + data);
|
||||
HttpManager.getInfoLoadInstance().postToDataCenter(url, "DF_PZ_FLOW_REPORT", data);//原本的方法
|
||||
// CSVAlarm.getInfoLoadInstance().csvDataLoad(url, "DF_PZ_FLOW_REPORT", data);//自己根据获得接口又使用的方法
|
||||
sb.setLength(0);
|
||||
nums = 0;
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (nums != 0) {
|
||||
String data = sb.substring(0, sb.length() - 1);
|
||||
HttpManager.getInfoLoadInstance().postToDataCenter(url, "DF_PZ_FLOW_REPORT", data);//这里topic位置需要根据实际情况修改
|
||||
// CSVAlarm.getInfoLoadInstance().csvDataLoad(url, "DF_PZ_FLOW_REPORT", data);//自己根据获得接口又使用的方法
|
||||
sb.setLength(0);
|
||||
nums = 0;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
//原本的:
|
||||
public void dfPzBatchStorage(Map<String, Long> pzMap) {
|
||||
//String sql = " insert into DF_PZ_REPORT(STAT_ID, ACTIVE_SYS, CFG_ID, SERVICE, SUM, REPORT_TIME) " +
|
||||
// " VALUES(SEQ_DF_PZ_REPORT.NEXTVAL, ?, ?, ?, ?, ?)";
|
||||
StringBuffer sb = new StringBuffer();
|
||||
String time5 = generateTimeWithInterval();
|
||||
int nums = 0;
|
||||
for (String key : pzMap.keySet()) {
|
||||
try {
|
||||
String[] options = key.split("~=~");
|
||||
String aItem = options[2] + "\t" + options[1] + "\t" + options[3] + "\t" + pzMap.get(key) + "\t" + time5;
|
||||
sb.append(aItem + "\n");
|
||||
nums++;
|
||||
if (nums >= 1000) {
|
||||
String data = sb.substring(0, sb.length() - 1);
|
||||
HttpManager.getInfoLoadInstance().postToDataCenter(url, "t_xa_df_pz_report_dt", data);
|
||||
sb.setLength(0);
|
||||
nums = 0;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (nums != 0) {
|
||||
String data = sb.substring(0, sb.length() - 1);
|
||||
HttpManager.getInfoLoadInstance().postToDataCenter(url, "t_xa_df_pz_report_dt", data);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private String generateTimeWithInterval() {
|
||||
Long stamp = System.currentTimeMillis() + 300000L;
|
||||
Long stamp5 = stamp / 300000 * 300000;
|
||||
SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
|
||||
return df.format(stamp5);
|
||||
}
|
||||
|
||||
|
||||
// public static void main(String[] args) {
|
||||
// String topicAvroSchema = GetAvroSchemaByTopicUtils.getAvroSchemaByTopicName("NTC-CONN-RECORD-LOG");
|
||||
// System.out.println(topicAvroSchema);
|
||||
// }
|
||||
}
|
||||
@@ -0,0 +1,939 @@
|
||||
package cn.ac.iie.cusflume.sink.avroUtils;
|
||||
|
||||
import cn.ac.iie.cusflume.sink.CommonUtils.GetDataDictionaryCodeByTopicUtils;
|
||||
import cn.ac.iie.cusflume.sink.CommonUtils.GetFilePathByTopicUtils;
|
||||
import cn.ac.iie.cusflume.sink.HttpAsyncUtils.HttpClientUtil;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.avroRecord.GetAvroRecordByTopicUtils;
|
||||
import cn.ac.iie.cusflume.sink.avroUtils.avroSchema.GetAvroSchemaByTopicUtils;
|
||||
import cn.ac.iie.cusflume.sink.bean.ac.Req.AcReqBody;
|
||||
import cn.ac.iie.cusflume.sink.bean.ac.Res.AcResBody;
|
||||
import cn.ac.iie.cusflume.sink.bean.producer.Res.ProResBody;
|
||||
import cn.ac.iie.cusflume.sink.daoUtils.RealtimeCountConfig;
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
import org.apache.avro.Schema;
|
||||
import org.apache.avro.file.DataFileWriter;
|
||||
import org.apache.avro.generic.GenericData;
|
||||
import org.apache.avro.generic.GenericDatumWriter;
|
||||
import org.apache.avro.generic.GenericRecord;
|
||||
import org.apache.avro.io.BinaryEncoder;
|
||||
import org.apache.avro.io.DatumWriter;
|
||||
import org.apache.avro.io.EncoderFactory;
|
||||
import org.apache.avro.specific.SpecificDatumWriter;
|
||||
import org.apache.http.*;
|
||||
import org.apache.http.client.ClientProtocolException;
|
||||
import org.apache.http.client.HttpRequestRetryHandler;
|
||||
import org.apache.http.client.config.CookieSpecs;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.protocol.HttpClientContext;
|
||||
import org.apache.http.config.Registry;
|
||||
import org.apache.http.config.RegistryBuilder;
|
||||
import org.apache.http.conn.ConnectTimeoutException;
|
||||
import org.apache.http.conn.socket.ConnectionSocketFactory;
|
||||
import org.apache.http.conn.socket.PlainConnectionSocketFactory;
|
||||
import org.apache.http.conn.ssl.*;
|
||||
import org.apache.http.entity.ByteArrayEntity;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.http.impl.client.LaxRedirectStrategy;
|
||||
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
|
||||
import org.apache.http.protocol.HttpContext;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import javax.net.ssl.*;
|
||||
import java.io.*;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.UnknownHostException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.security.GeneralSecurityException;
|
||||
import java.security.cert.CertificateException;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
|
||||
//import org.apache.http.Header;
|
||||
|
||||
|
||||
public class HttpManager {
|
||||
// 创建httpclient连接池
|
||||
private PoolingHttpClientConnectionManager httpClientConnectionManager = null;
|
||||
private CloseableHttpClient httpClient = null;
|
||||
//类初始化时,自动实例化,饿汉单例模式
|
||||
private static final HttpManager manager = new HttpManager();
|
||||
private static Logger logger = Logger.getLogger(HttpManager.class);
|
||||
|
||||
private static HashMap<String, Schema> schemaHashMap = new HashMap<String, Schema>();//用于存放Schema
|
||||
|
||||
public static HttpManager getInfoLoadInstance() {
|
||||
return manager;
|
||||
}
|
||||
|
||||
private HttpManager() {
|
||||
//初始化httpClient
|
||||
initHttpClient();
|
||||
System.setProperty("sun.net.inetaddr.ttl", "300");
|
||||
System.setProperty("sun.net.inetaddr.negative.ttl", "10");
|
||||
}
|
||||
|
||||
/**
|
||||
* 新版initHttpClient()---20200425注释
|
||||
*/
|
||||
public void initHttpClient() {
|
||||
try {
|
||||
SSLContext ctx = SSLContext.getInstance(SSLConnectionSocketFactory.TLS);
|
||||
X509TrustManager tm = new X509TrustManager() {
|
||||
@Override
|
||||
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public X509Certificate[] getAcceptedIssuers() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
ctx.init(null, new TrustManager[]{tm}, null);
|
||||
SSLConnectionSocketFactory socketFactory = new SSLConnectionSocketFactory(ctx, NoopHostnameVerifier.INSTANCE);
|
||||
Registry<ConnectionSocketFactory> socketFactoryRegistry = RegistryBuilder.<ConnectionSocketFactory>create()
|
||||
.register("http", PlainConnectionSocketFactory.INSTANCE)
|
||||
.register("https", socketFactory)
|
||||
.build();
|
||||
/**
|
||||
* 新版httpClientConnectionManager
|
||||
*/
|
||||
//创建httpclient连接池
|
||||
httpClientConnectionManager = new PoolingHttpClientConnectionManager(socketFactoryRegistry);
|
||||
//设置连接池最大数量
|
||||
httpClientConnectionManager.setMaxTotal(2000);
|
||||
//设置单个路由最大连接数量
|
||||
httpClientConnectionManager.setDefaultMaxPerRoute(400);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
httpClient = getHttpClient();
|
||||
}
|
||||
|
||||
//请求重试机制
|
||||
HttpRequestRetryHandler myRetryHandler = new HttpRequestRetryHandler() {
|
||||
@Override
|
||||
public boolean retryRequest(IOException exception, int executionCount, HttpContext context) {
|
||||
if (executionCount >= 2) {
|
||||
// 超过两次则不再重试请求
|
||||
logger.error("http连接已重试" + executionCount + "次, 重试失败");
|
||||
return false;
|
||||
}
|
||||
if (exception instanceof InterruptedIOException) {
|
||||
// Timeout
|
||||
logger.info("InterruptedIOException, retry connection...");//新增-降低了上述的日志级别
|
||||
return true;
|
||||
}
|
||||
if (exception instanceof UnknownHostException) {
|
||||
// Unknown host
|
||||
return false;
|
||||
}
|
||||
if (exception instanceof ConnectTimeoutException) {
|
||||
logger.error("ConnectTimeoutException, 重试连接。。。");
|
||||
// Connection refused
|
||||
return true;
|
||||
}
|
||||
if (exception instanceof SSLException) {
|
||||
// SSL handshake exception
|
||||
return false;
|
||||
}
|
||||
HttpClientContext clientContext = HttpClientContext.adapt(context);
|
||||
HttpRequest request = clientContext.getRequest();
|
||||
boolean idempotent = !(request instanceof HttpEntityEnclosingRequest);
|
||||
if (idempotent) {
|
||||
logger.error("request is idempotent, 重试连接。。。");
|
||||
// Retry if the request is considered idempotent
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
public CloseableHttpClient getHttpClient() {
|
||||
// 创建全局的requestConfig
|
||||
RequestConfig requestConfig = RequestConfig.custom()
|
||||
.setConnectTimeout(3000)
|
||||
.setSocketTimeout(3000)
|
||||
//.setCookieSpec(CookieSpecs.BEST_MATCH)
|
||||
.build();
|
||||
// 声明重定向策略对象
|
||||
LaxRedirectStrategy redirectStrategy = new LaxRedirectStrategy();
|
||||
|
||||
/**
|
||||
* 原版
|
||||
*/
|
||||
CloseableHttpClient httpClient = HttpClients.custom()
|
||||
.setConnectionManager(httpClientConnectionManager)
|
||||
.setDefaultRequestConfig(requestConfig)
|
||||
.setRedirectStrategy(redirectStrategy)
|
||||
.setRetryHandler(myRetryHandler)
|
||||
.build();
|
||||
|
||||
return httpClient;
|
||||
}
|
||||
|
||||
public String getAddress() {
|
||||
//ArrayList<String> addrs = new ArrayList<String>();
|
||||
// String[] addrs = "http://10.208.133.172:10080,http://10.208.133.173:10080".split(",");
|
||||
String[] addrs = "http://10.208.133.172:10080,http://10.208.133.173:10080".split(",");
|
||||
|
||||
Random rnd = new Random();
|
||||
Integer addrIndex = rnd.nextInt(addrs.length);
|
||||
return addrs[addrIndex].trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* 生产AVRO数据入ZX(批次)--数据不包含schema
|
||||
*
|
||||
* @param urlProducer
|
||||
* @param topic
|
||||
* @param schemaStr
|
||||
* @param avroBatchList
|
||||
* @param userAgent
|
||||
* @param msgSessionCookie
|
||||
* @return
|
||||
*/
|
||||
public ProResBody producerAvroToZX(String urlProducer, String topic, String schemaStr, List<String> avroBatchList, String userAgent, String msgSessionCookie) {
|
||||
CloseableHttpResponse response = null;
|
||||
HttpPost httpPost = null;
|
||||
urlProducer = urlProducer.trim();
|
||||
ProResBody proResBody = null;
|
||||
byte[] resultArray = null;//用于存放avro的二进制流
|
||||
// JsonAvroConverter converter = new JsonAvroConverter();
|
||||
try {
|
||||
// get schema
|
||||
Schema schemaAvro = new Schema.Parser().parse(schemaStr);
|
||||
ByteArrayOutputStream outAvro = new ByteArrayOutputStream();
|
||||
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outAvro, null);
|
||||
DatumWriter<GenericRecord> fileWriter = new SpecificDatumWriter<GenericRecord>(schemaAvro);
|
||||
outAvro.reset();
|
||||
for (String dataJson : avroBatchList) {
|
||||
GenericRecord recordAvro = new GenericData.Record(schemaAvro);
|
||||
// GenericRecord recordAvro = converter.convertToGenericDataRecord(dataJson.getBytes(), schemaAvro);
|
||||
// recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(topic, recordAvro, dataJson, schemaStr);//批量
|
||||
recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(topic, recordAvro, dataJson, schemaAvro);//批量
|
||||
|
||||
logger.info("封装AvroRecord后数据为===>" + recordAvro.toString() + "<===封装AvroRecord后数据");
|
||||
|
||||
fileWriter.write(recordAvro, encoder);
|
||||
}
|
||||
|
||||
if (encoder != null) {
|
||||
encoder.flush();
|
||||
}
|
||||
|
||||
if (outAvro != null) {
|
||||
resultArray = outAvro.toByteArray();
|
||||
|
||||
outAvro.flush();
|
||||
outAvro.close();
|
||||
outAvro = null;
|
||||
}
|
||||
|
||||
httpPost = new HttpPost(urlProducer);
|
||||
// set header
|
||||
httpPost.addHeader("User-Agent", userAgent);
|
||||
httpPost.addHeader("Cookie", msgSessionCookie);
|
||||
|
||||
try {
|
||||
String md5Avro = MD5Utils.md5Encode(resultArray);
|
||||
httpPost.addHeader("Checksum", md5Avro);
|
||||
logger.info("请求端Checksum MD5 avro 加密为:" + md5Avro);
|
||||
} catch (Exception e) {
|
||||
logger.error("MD5Utils.md5Encode Method is error,this data is " + resultArray);
|
||||
e.printStackTrace();
|
||||
}
|
||||
// httpPost.addHeader("Content-Type", "binary/octet-stream");
|
||||
// httpPost.addHeader("Content-Type", "application/avro+json;charset=UTF-8");
|
||||
httpPost.addHeader("Content-Type", "binary/octet-stream");
|
||||
|
||||
//新增调度标签--------------20191210----------↓-----------
|
||||
//注意批次发送这里不需要传FilePath头部
|
||||
// Map map = JSONObject.parseObject(avroBatchList.get(0), Map.class);
|
||||
// String xTag = (String) map.get("x_tag");
|
||||
httpPost.addHeader("X-Tag", getXTAG(avroBatchList.get(0), topic));//批次,第一条,因为必然存在至少一条
|
||||
// httpPost.addHeader("FilePath", "-");//因为无文件,所以设置为空;//20191216新增注释:批次发送的不设置FilePath头部
|
||||
//新增调度标签--------------20191210----------↑-----------
|
||||
|
||||
ByteArrayEntity payload = new ByteArrayEntity(resultArray);
|
||||
payload.setContentEncoding("utf-8");
|
||||
//payload.setContentType("text/xml; charset=UTF-8");
|
||||
// anti avro
|
||||
httpPost.setEntity(payload);
|
||||
|
||||
//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%往下
|
||||
logger.info("加载内容字节数组长度: " + resultArray.length);
|
||||
//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%往上
|
||||
|
||||
//执行请求
|
||||
response = httpClient.execute(httpPost);
|
||||
try {
|
||||
int statuCode = response.getStatusLine().getStatusCode();
|
||||
HttpEntity entity = response.getEntity();
|
||||
String ret = EntityUtils.toString(entity);
|
||||
logger.info("返回的生产原始响应体String数据为:" + ret);
|
||||
proResBody = JSONObject.parseObject(ret, ProResBody.class);
|
||||
logger.info("封装入对象的生产响应体ProResBody为:" + JSONObject.toJSONString(proResBody));
|
||||
if (statuCode == 200) {
|
||||
logger.info("数据加载成功, 返回码: " + statuCode);
|
||||
AvroMonitorTimerTask.msgSuccessSum += avroBatchList.size();
|
||||
EntityUtils.consume(entity);
|
||||
} else {
|
||||
EntityUtils.consume(entity);
|
||||
// logger.error("数据加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + records.get(0).toString());//单条测试用
|
||||
logger.error("数据加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + avroBatchList);//测试用
|
||||
AvroMonitorTimerTask.msgFailedSum += avroBatchList.size();
|
||||
// logger.error("数据加载失败: " + ret + " --- code: " + statuCode + "\n");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("Get response from ZX is error===>>>" + e + "<<<===");
|
||||
e.printStackTrace();
|
||||
}
|
||||
} catch (MalformedURLException e) {
|
||||
//执行URL url = new URL()的异常
|
||||
e.printStackTrace();
|
||||
} catch (ClientProtocolException e) {
|
||||
// 执行httpClient.execute(httpGet)的异常
|
||||
e.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
// 执行httpClient.execute(httpGet)的异常
|
||||
logger.error("producerAvroToZX is IOException===>>>" + e + "<<<===");
|
||||
e.printStackTrace();
|
||||
} catch (Exception e) {
|
||||
//handle response here... try other servers
|
||||
logger.error("producerAvroToZX is Exception===>>>" + e + "<<<===");
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
if (response != null) {
|
||||
try {
|
||||
response.close();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
httpPost.abort();
|
||||
}
|
||||
return proResBody;
|
||||
}
|
||||
|
||||
/**
|
||||
* 生产AVRO数据入ZX(单条)--数据不包含schema
|
||||
* 带返回值,用于同步请求(非多线程)
|
||||
*
|
||||
* @param urlProducer
|
||||
* @param topic
|
||||
* @param schemaStr
|
||||
* @param dataJson
|
||||
* @param userAgent
|
||||
* @param msgSessionCookie
|
||||
* @return
|
||||
*/
|
||||
public ProResBody producerAvroToZX(String urlProducer, String topic, String schemaStr, String dataJson, String userAgent, String msgSessionCookie) {
|
||||
CloseableHttpResponse response = null;
|
||||
HttpPost httpPost = null;
|
||||
urlProducer = urlProducer.trim();
|
||||
ProResBody proResBody = null;
|
||||
byte[] resultArray = null;//用于存放avro的二进制流
|
||||
// JsonAvroConverter converter = new JsonAvroConverter();
|
||||
try {
|
||||
// get schema
|
||||
|
||||
// Schema schemaDataAvro = new Schema.Parser().parse(schemaStr);//旧版-20191224移除,改用hm获取
|
||||
Schema schemaDataAvro = getSchemaFromHashMap(topic);//新版-20191224-使用缓存优化性能
|
||||
ByteArrayOutputStream outAvro = new ByteArrayOutputStream();
|
||||
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outAvro, null);
|
||||
DatumWriter<GenericRecord> fileWriter = new SpecificDatumWriter<GenericRecord>(schemaDataAvro);
|
||||
outAvro.reset();
|
||||
|
||||
GenericRecord recordAvro = new GenericData.Record(schemaDataAvro);
|
||||
// GenericRecord recordAvro = converter.convertToGenericDataRecord(dataJson.getBytes(), schemaAvro);
|
||||
// recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(topic, recordAvro, dataJson, schemaStr);//单条
|
||||
recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(topic, recordAvro, dataJson, schemaDataAvro);//单条
|
||||
logger.info("封装数据对象或文件标签AvroRecord后数据为===>" + recordAvro.toString() + "<===封装数据对象或文件标签AvroRecord后数据为");
|
||||
fileWriter.write(recordAvro, encoder);
|
||||
|
||||
if (encoder != null) {
|
||||
encoder.flush();
|
||||
}
|
||||
|
||||
if (outAvro != null) {
|
||||
resultArray = outAvro.toByteArray();
|
||||
|
||||
outAvro.flush();
|
||||
outAvro.close();
|
||||
outAvro = null;
|
||||
}
|
||||
logger.info("数据对象或文件标签resultArray长度为:" + resultArray.length);
|
||||
|
||||
if (!("NTC-COLLECT-FILE-LOG".equals(topic))) {
|
||||
//传入的resultArray为数据对象的数组,返回的是拼接的整体的数组
|
||||
resultArray = getTagRecordAndMergeAllArray(topic, dataJson, resultArray);
|
||||
} else {
|
||||
//说明为NTC-COLLECT-FILE-LOG,此时不需要进行d_tag的获取,因为数据里不包含,前面处理的resultArray就作为NTC-COLLECT-FILE-LOG的d_tag使用
|
||||
resultArray = alreadyGetFileTagRecordSoOnlyGetMergeAllArray(topic, resultArray);
|
||||
}
|
||||
|
||||
httpPost = new HttpPost(urlProducer);
|
||||
// set header
|
||||
httpPost.addHeader("User-Agent", userAgent);
|
||||
|
||||
//一部状态回传需要此Cookie打开
|
||||
// if ("monitor-msg".equals(topic)) {
|
||||
// httpPost.addHeader("Cookie", msgSessionCookie);//广东文件消息测试时加上Cookie会验证不通过,即那边显示为两个Cookie,不加Cookie则验证通过
|
||||
// }
|
||||
|
||||
httpPost.addHeader("Cookie", msgSessionCookie);//不设置Cookie时,广东测试出现报错,打开广东的Cookie设置测试一下,经测试,可用
|
||||
|
||||
try {
|
||||
String md5Avro = MD5Utils.md5Encode(resultArray);
|
||||
httpPost.addHeader("Checksum", md5Avro);
|
||||
logger.info("请求端Checksum MD5 avro 加密为:" + md5Avro);
|
||||
} catch (Exception e) {
|
||||
logger.error("MD5Utils.md5Encode Method is error,this data is " + resultArray);
|
||||
e.printStackTrace();
|
||||
}
|
||||
// httpPost.addHeader("Content-Type", "binary/octet-stream");
|
||||
// httpPost.addHeader("Content-Type", "application/avro+json;charset=UTF-8");
|
||||
httpPost.addHeader("Content-Type", "binary/octet-stream");
|
||||
|
||||
//设置调度标签要求的参数,文件消息需要设置FilePath--------------------↓--------------------20191210新增
|
||||
httpPost.addHeader("X-Tag", getXTAG(dataJson, topic));//从json中获取-单条
|
||||
|
||||
String filePathByTopicName = GetFilePathByTopicUtils.getFilePathByTopicName(dataJson, topic);
|
||||
if (!("-".equals(filePathByTopicName)) && !("[\"null\"]".equals(filePathByTopicName))) {
|
||||
httpPost.addHeader("FilePath", filePathByTopicName);//从json中获取
|
||||
}
|
||||
//设置调度标签要求的参数,文件消息需要设置FilePath-------------------↑---------------------20191210新增
|
||||
|
||||
//尝试消除 Invalid cookie header: "Set-Cookie: SESSION=""; expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/". Invalid 'expires' attribute: Thu, 01 Jan 1970 00:00:00 GMT
|
||||
RequestConfig defaultConfig = RequestConfig.custom().setCookieSpec(CookieSpecs.STANDARD).build();
|
||||
httpPost.setConfig(defaultConfig);
|
||||
|
||||
//测试用
|
||||
Header[] allHeaders = httpPost.getAllHeaders();
|
||||
for (int i = 0; i < allHeaders.length; i++) {
|
||||
logger.info("allHeaders[" + i + "].getName()====>>>" + allHeaders[i].getName() + "##### allHeaders[" + i + "].getValue()=======>>>" + allHeaders[i].getValue());
|
||||
}
|
||||
|
||||
ByteArrayEntity payload = new ByteArrayEntity(resultArray);
|
||||
payload.setContentEncoding("utf-8");
|
||||
//payload.setContentType("text/xml; charset=UTF-8");
|
||||
// anti avro
|
||||
httpPost.setEntity(payload);
|
||||
|
||||
//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%往下
|
||||
logger.info("最终加载内容字节数组长度: " + resultArray.length);
|
||||
//%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%往上
|
||||
|
||||
//执行请求
|
||||
response = httpClient.execute(httpPost);
|
||||
try {
|
||||
int statuCode = response.getStatusLine().getStatusCode();
|
||||
HttpEntity entity = response.getEntity();
|
||||
String ret = EntityUtils.toString(entity);
|
||||
logger.info("返回的生产原始响应体String数据为:" + ret);
|
||||
proResBody = JSONObject.parseObject(ret, ProResBody.class);
|
||||
logger.info("封装入对象的生产响应体ProResBody为:" + JSONObject.toJSONString(proResBody));
|
||||
if (statuCode == 200) {
|
||||
logger.info("数据加载成功, 返回码: " + statuCode);
|
||||
AvroMonitorTimerTask.msgSuccessSum++;
|
||||
EntityUtils.consume(entity);
|
||||
} else {
|
||||
EntityUtils.consume(entity);
|
||||
// logger.error("数据加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + records.get(0).toString());//单条测试用
|
||||
logger.error("数据加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + dataJson);//测试用
|
||||
AvroMonitorTimerTask.msgFailedSum++;
|
||||
// logger.error("数据加载失败: " + ret + " --- code: " + statuCode + "\n");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("Get response from ZX is error===>>>" + e + "<<<===");
|
||||
e.printStackTrace();
|
||||
}
|
||||
} catch (MalformedURLException e) {
|
||||
//执行URL url = new URL()的异常
|
||||
e.printStackTrace();
|
||||
} catch (ClientProtocolException e) {
|
||||
// 执行httpClient.execute(httpGet)的异常
|
||||
e.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
// 执行httpClient.execute(httpGet)的异常
|
||||
logger.error("producerAvroToZX is IOException===>>>" + e + "<<<===");
|
||||
e.printStackTrace();
|
||||
} catch (Exception e) {
|
||||
//handle response here... try other servers
|
||||
logger.error("producerAvroToZX is Exception===>>>" + e + "<<<===");
|
||||
logger.error("message is " + dataJson);
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
if (response != null) {
|
||||
try {
|
||||
response.close();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
httpPost.abort();
|
||||
}
|
||||
return proResBody;
|
||||
}
|
||||
|
||||
/**
|
||||
* 使用hm缓存Schema
|
||||
*
|
||||
* @param topic
|
||||
* @return
|
||||
*/
|
||||
private Schema getSchemaFromHashMap(String topic) {
|
||||
if (schemaHashMap.containsKey(topic)) {
|
||||
return schemaHashMap.get(topic);
|
||||
} else {
|
||||
String schemaStr = GetAvroSchemaByTopicUtils.getAvroSchemaByTopicName(topic);
|
||||
Schema parseSchema = new Schema.Parser().parse(schemaStr);
|
||||
schemaHashMap.put(topic, parseSchema);
|
||||
return parseSchema;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取X-Tag用于设置请求头
|
||||
*
|
||||
* @param dataJson
|
||||
* @return
|
||||
*/
|
||||
private String getXTAG(String dataJson, String topic) {
|
||||
if ("monitor-msg".equals(topic)) {
|
||||
return RealtimeCountConfig.MONITOR_NOFILE_MSG_X_TAG;
|
||||
}
|
||||
|
||||
Map map = JSONObject.parseObject(dataJson, Map.class);
|
||||
// String xTag = (String) map.get("x_tag");
|
||||
Object x_tag = map.get("x_tag");
|
||||
if (x_tag != null) {
|
||||
String xTag = x_tag.toString();
|
||||
if (StringUtil.isNotBlank(xTag)) {
|
||||
return xTag;
|
||||
} else {
|
||||
return "-";
|
||||
}
|
||||
} else {
|
||||
return "-";
|
||||
}
|
||||
}
|
||||
|
||||
//我自己修改的部分-20190910-JSON版本
|
||||
public void producerJsonToZX(String urlProducer, String topic, String data, String userAgent, String MsgSessionCookie) {
|
||||
CloseableHttpResponse response = null;
|
||||
HttpPost httpPost = null;
|
||||
urlProducer = urlProducer.trim();
|
||||
ProResBody proResBody;
|
||||
try {
|
||||
httpPost = new HttpPost(urlProducer);
|
||||
// httpPost.addHeader("Connection","keep-alive");//可能影响入库
|
||||
// httpPost.addHeader("Accept-Encoding", "gzip, deflate");//可能影响入库
|
||||
//httpPost.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36");
|
||||
httpPost.addHeader("User-Agent", userAgent);
|
||||
httpPost.addHeader("Cookie", MsgSessionCookie);
|
||||
try {
|
||||
httpPost.addHeader("Checksum", MD5Utils.md5Encode(data));
|
||||
logger.info("请求端Checksum MD5 加密为:" + MD5Utils.md5Encode(data));
|
||||
} catch (Exception e) {
|
||||
logger.error("MD5Utils.md5Encode Method is error,this data is " + data);
|
||||
e.printStackTrace();
|
||||
}
|
||||
httpPost.addHeader("Content-Type", "binary/octet-stream");
|
||||
httpPost.addHeader("Topic", topic);
|
||||
StringEntity payload = new StringEntity(data, Charset.forName("utf-8"));//原版
|
||||
// StringEntity payload = new StringEntity(data);//自己修改测试版20190219
|
||||
//payload.setContentType("text/xml; charset=UTF-8");
|
||||
payload.setContentEncoding("utf-8");//原版,测试中暂不使用20190219
|
||||
httpPost.setEntity(payload);
|
||||
logger.info("加载内容: " + data);
|
||||
//执行请求
|
||||
response = httpClient.execute(httpPost);
|
||||
try {
|
||||
int statuCode = response.getStatusLine().getStatusCode();
|
||||
HttpEntity entity = response.getEntity();
|
||||
String ret = EntityUtils.toString(entity);
|
||||
logger.info("原本的返回的响应体String数据为:" + ret);
|
||||
// ret = ret.replace("\\\"", "\"")
|
||||
// .replace("\"{", "{")
|
||||
// .replace("}\"}", "}}");//注意这里和AC时不一样
|
||||
proResBody = JSONObject.parseObject(ret, ProResBody.class);
|
||||
logger.info("封装入对象的响应体ProResBody:" + proResBody.toString());
|
||||
if (statuCode == 200) {
|
||||
logger.info("数据加载成功, 返回码: " + statuCode);
|
||||
EntityUtils.consume(entity);
|
||||
} else {
|
||||
EntityUtils.consume(entity);
|
||||
logger.error("数据加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + data);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
} catch (MalformedURLException e) {
|
||||
//执行URL url = new URL()的异常
|
||||
e.printStackTrace();
|
||||
} catch (ClientProtocolException e) {
|
||||
// 执行httpClient.execute(httpGet)的异常
|
||||
e.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
// 执行httpClient.execute(httpGet)的异常
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
if (response != null) {
|
||||
try {
|
||||
response.close();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
httpPost.abort();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ZX认证,获取sessionId
|
||||
*
|
||||
* @param checkMsgUrl
|
||||
* @param userAgent
|
||||
* @param xTag
|
||||
* @param requestType
|
||||
* @return
|
||||
*/
|
||||
public AcResBody checkAcByZx(String checkMsgUrl, String userAgent, String xTag, int requestType) {
|
||||
CloseableHttpResponse response = null;
|
||||
HttpPost httpPost = null;
|
||||
AcResBody acResBody = null;
|
||||
String url = checkMsgUrl.trim();
|
||||
|
||||
//bean类型
|
||||
AcReqBody acReqBody = new AcReqBody();
|
||||
acReqBody.setRequestType(requestType);
|
||||
String data = JSONObject.toJSONString(acReqBody);//data为json版本
|
||||
|
||||
try {
|
||||
httpPost = new HttpPost(url);
|
||||
httpPost.addHeader("User-Agent", userAgent);
|
||||
httpPost.addHeader("X-Tag", xTag);//根据最新文档,目前已经不需要此头-20191217
|
||||
httpPost.addHeader("Content-Type", "application/json");
|
||||
StringEntity payload = new StringEntity(data, Charset.forName("utf-8"));
|
||||
//payload.setContentType("text/xml; charset=UTF-8");
|
||||
payload.setContentEncoding("utf-8");
|
||||
httpPost.setEntity(payload);
|
||||
logger.info("入库验证加载内容请求体: " + data);
|
||||
//执行请求
|
||||
response = httpClient.execute(httpPost);
|
||||
try {
|
||||
int statuCode = response.getStatusLine().getStatusCode();
|
||||
HttpEntity entity = response.getEntity();
|
||||
String ret = EntityUtils.toString(entity);
|
||||
logger.info("返回的验证原始响应体的String数据===>" + ret);//{"code":200,"msg":"请求成功","data":{"status":0,"reason":"审核通过"}}
|
||||
acResBody = JSONObject.parseObject(ret, AcResBody.class);
|
||||
|
||||
Header[] allHeaders = response.getAllHeaders();
|
||||
for (Header header : allHeaders) {
|
||||
if ("Set-Cookie".equals(header.getName())) {
|
||||
acResBody.setSessionId(header.getValue().split(";")[0]);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("封装的验证响应体对象AcResBody数据为===>" + acResBody.toString());
|
||||
if (statuCode == 200) {
|
||||
logger.info("数据加载成功, 返回码: " + statuCode);
|
||||
EntityUtils.consume(entity);
|
||||
} else {
|
||||
EntityUtils.consume(entity);
|
||||
logger.error("数据加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + data);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
} catch (MalformedURLException e) {
|
||||
//执行URL url = new URL()的异常
|
||||
e.printStackTrace();
|
||||
} catch (ClientProtocolException e) {
|
||||
// 执行httpClient.execute(httpGet)的异常
|
||||
e.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
// 执行httpClient.execute(httpGet)的异常
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
if (response != null) {
|
||||
try {
|
||||
response.close();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
httpPost.abort();
|
||||
}
|
||||
return acResBody;
|
||||
}
|
||||
|
||||
//老版
|
||||
public void postToDataCenter(String url, String topic, String data) {
|
||||
CloseableHttpResponse response = null;
|
||||
HttpPost httpPost = null;
|
||||
url = url.trim();
|
||||
try {
|
||||
httpPost = new HttpPost(url);
|
||||
// httpPost.addHeader("Connection","keep-alive");//可能影响入库
|
||||
// httpPost.addHeader("Accept-Encoding", "gzip, deflate");//可能影响入库
|
||||
//httpPost.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36");
|
||||
|
||||
// httpPost.addHeader("User", RealtimeCountConfig.DATACENTER_USERNAME);
|
||||
// httpPost.addHeader("Password", RealtimeCountConfig.DATACENTER_PASSWORD);
|
||||
httpPost.addHeader("Topic", topic);
|
||||
httpPost.addHeader("Schema-Version", "2");//测试新增20190219,原版没有
|
||||
httpPost.addHeader("Format", "csv");
|
||||
// httpPost.addHeader("Row-Split", "\\n");
|
||||
// httpPost.addHeader("Field-Split", "\\t");
|
||||
httpPost.addHeader("Row-Split", "\\n");
|
||||
httpPost.addHeader("Field-Split", ",");
|
||||
// StringEntity payload = new StringEntity(data, Charset.forName("utf-8"));//原版
|
||||
StringEntity payload = new StringEntity(data);//自己修改测试版20190219
|
||||
//payload.setContentType("text/xml; charset=UTF-8");
|
||||
// payload.setContentEncoding("utf-8");//原版,测试中暂不使用20190219
|
||||
httpPost.setEntity(payload);
|
||||
logger.info("数据中心加载内容: " + data);
|
||||
//执行请求
|
||||
response = httpClient.execute(httpPost);
|
||||
try {
|
||||
int statuCode = response.getStatusLine().getStatusCode();
|
||||
//Header[] headers = response.getAllHeaders();
|
||||
//logger.info("<<response header>>:");
|
||||
//System.out.println("<<response header>>:");
|
||||
//for(int i=0; i<headers.length; i++){
|
||||
// logger.info(headers[i].getName() +" : "+headers[i].getValue());
|
||||
//System.out.println(headers[i].getName() +" : "+headers[i].getValue());
|
||||
//}
|
||||
HttpEntity entity = response.getEntity();
|
||||
if (statuCode == 200) {
|
||||
logger.info("数据中心加载成功, 返回码: " + statuCode);
|
||||
System.out.println("数据中心加载成功, 返回码: " + statuCode);
|
||||
EntityUtils.consume(entity);
|
||||
} else {
|
||||
String ret = EntityUtils.toString(entity);
|
||||
EntityUtils.consume(entity);
|
||||
logger.info("数据中心加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + data);
|
||||
System.out.println("数据中心加载失败: " + ret + " --- code: " + statuCode + " ---失败数据为: \n" + data);
|
||||
logger.error("数据中心加载失败: " + ret + " --- code: " + statuCode);
|
||||
System.out.println("数据中心加载失败: " + ret + " --- code: " + statuCode);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
} catch (MalformedURLException e) {
|
||||
//执行URL url = new URL()的异常
|
||||
e.printStackTrace();
|
||||
} catch (ClientProtocolException e) {
|
||||
// 执行httpClient.execute(httpGet)的异常
|
||||
e.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
// 执行httpClient.execute(httpGet)的异常
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
if (response != null) {
|
||||
try {
|
||||
response.close();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
httpPost.abort();
|
||||
/**
|
||||
* httpclient的链接有线程池管理,这里不用直接关闭
|
||||
*/
|
||||
// try {//关闭连接
|
||||
// httpClient.close();
|
||||
// } catch (IOException e) {
|
||||
// e.printStackTrace();
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取数据总共字节数
|
||||
*
|
||||
* @param avroBatchList
|
||||
* @return
|
||||
*/
|
||||
private long getStrBytes(List<String> avroBatchList) {
|
||||
long countThisListStr = 0L;
|
||||
for (String str : avroBatchList) {
|
||||
countThisListStr += str.getBytes().length;
|
||||
}
|
||||
return countThisListStr;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取数据中的日志标签并将所有相关数据字节数组化后拼接返回
|
||||
*
|
||||
* @param topic
|
||||
* @param dataJson
|
||||
* @param dataResultArray
|
||||
* @return
|
||||
*/
|
||||
private byte[] getTagRecordAndMergeAllArray(String topic, String dataJson, byte[] dataResultArray) {
|
||||
byte[] dTagByteArray = null;//用于存放数据标签的avro的二进制流
|
||||
String tagTopicName = "log-tag";//除NTC-COLLECT-FILE-LOG外都是log-tag,因为都是在文件消息内部的d_tag获取的
|
||||
if ("monitor-msg".equals(topic)) {
|
||||
tagTopicName = "status-tag";
|
||||
}
|
||||
try {
|
||||
// get schema
|
||||
//---------20191224移除,改用hm获取-----------↓------
|
||||
// String schemaTag = GetAvroSchemaByTopicUtils.getAvroSchemaByTopicName(tagTopicName);//单条-"log-tag"或者"status-tag"
|
||||
// Schema schemaDTagAvro = new Schema.Parser().parse(schemaTag);//旧版
|
||||
//---------20191224移除-----------↑------
|
||||
|
||||
Schema schemaDTagAvro = getSchemaFromHashMap(tagTopicName);//新版-20191224-使用缓存优化性能
|
||||
ByteArrayOutputStream outAvro = new ByteArrayOutputStream();
|
||||
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outAvro, null);
|
||||
DatumWriter<GenericRecord> fileWriter = new SpecificDatumWriter<GenericRecord>(schemaDTagAvro);
|
||||
outAvro.reset();
|
||||
|
||||
GenericRecord recordAvro = new GenericData.Record(schemaDTagAvro);
|
||||
|
||||
//以下为临时,为了增加一个状态的临时d_tag,因为d_tag前端目前(20191222)传不了-----------↓--------------
|
||||
if (!("monitor-msg".equals(topic))) {
|
||||
//说明当前消息不是状态回传
|
||||
Map map = JSONObject.parseObject(dataJson, Map.class);
|
||||
Object d_tag = map.get("d_tag");
|
||||
if (d_tag != null) {
|
||||
dataJson = d_tag.toString();//获取消息上的d_tag用于序列化成数据标签
|
||||
}
|
||||
} else {
|
||||
//说明为状态,目前前端没有,所以先自己创建一个状态的d_tag
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
|
||||
String timeFormat = sdf.format(new Date(System.currentTimeMillis()));//格式2019-11-18 19:56:37
|
||||
dataJson = "{\"tag_version\":\"1.0\",\"data_subtype\":16387,\"data_type\":4,\"producer_id\":24832,\"timestamp\":\"yyyy-MM-dd HH:mm:ss\"}";
|
||||
Map map = JSONObject.parseObject(dataJson, Map.class);
|
||||
map.put("timestamp", timeFormat);
|
||||
dataJson = JSONObject.toJSONString(map);
|
||||
}
|
||||
//以下为临时,为了增加一个状态的临时d_tag,因为d_tag前端目前(20191222)传不了-----------↑--------------
|
||||
|
||||
// recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(tagTopicName, recordAvro, dataJson, schemaTag);//单条-"log-tag"或者"status-tag"
|
||||
recordAvro = GetAvroRecordByTopicUtils.getAvroRecordByTopicName(tagTopicName, recordAvro, dataJson, schemaDTagAvro);//单条-"log-tag"或者"status-tag"
|
||||
logger.info("封装日志标签或状态标签AvroRecord后数据为===>" + recordAvro.toString() + "<===封装日志标签或状态标签AvroRecord后数据为");
|
||||
fileWriter.write(recordAvro, encoder);
|
||||
|
||||
if (encoder != null) {
|
||||
encoder.flush();
|
||||
}
|
||||
|
||||
if (outAvro != null) {
|
||||
dTagByteArray = outAvro.toByteArray();
|
||||
|
||||
outAvro.flush();
|
||||
outAvro.close();
|
||||
outAvro = null;
|
||||
}
|
||||
logger.info("日志标签dTagByteArray长度为:" + dTagByteArray.length);
|
||||
|
||||
//获取数据对象类型编码,写成字节数组
|
||||
byte[] dataObjectTypeByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataObjectTypeCodeByTopicName(topic), 2);//长度为2
|
||||
|
||||
//将数据标签长度,写成字节数组
|
||||
byte[] dataTagLengthByteArray = sumHex(dTagByteArray.length, 2);//长度为2
|
||||
|
||||
//数据总长度(16+数据标签长度+数据对象长度),写成字节数组
|
||||
byte[] dataSumLengthByteArray = sumHex(16 + dTagByteArray.length + dataResultArray.length, 4);//长度为4
|
||||
|
||||
//数据标签SchemaID,写成字节数组
|
||||
byte[] dataTagSchemaIDByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataTagSchemaIDByTopicName(topic), 4);//长度为4
|
||||
|
||||
//数据对象SchemaID.写成字节数组
|
||||
byte[] dataObjectSchemaIDByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataObjectSchemaIDByTopicName(topic), 4);//长度为4
|
||||
|
||||
//拼接这些字节数组并返回,顺序:数据对象类型+数据标签长度+数据总长度+数据标签SchemaID+数据对象SchemaID+数据标签数组+数据对象数组
|
||||
return byteMerger(dataObjectTypeByteArray, dataTagLengthByteArray, dataSumLengthByteArray, dataTagSchemaIDByteArray, dataObjectSchemaIDByteArray, dTagByteArray, dataResultArray);
|
||||
} catch (Exception e) {
|
||||
logger.error("HttpManager getTagRecordAndMergeAllArray is error===>" + e + "<===");
|
||||
e.printStackTrace();
|
||||
return dataResultArray;//报错时返回数据对象数组,即本身不变
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 由于已经获取了文件标签,所以只需要将所有相关数据字节数组化后拼接返回
|
||||
*/
|
||||
private byte[] alreadyGetFileTagRecordSoOnlyGetMergeAllArray(String topic, byte[] dataResultArray) {
|
||||
try {
|
||||
//获取数据对象类型编码,写成字节数组
|
||||
byte[] dataObjectTypeByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataObjectTypeCodeByTopicName(topic), 2);//长度为2
|
||||
|
||||
//将数据标签长度,写成字节数组
|
||||
byte[] dataTagLengthByteArray = sumHex(dataResultArray.length, 2);//长度为2,由于是NTC-COLLECT-FILE-LOG.传来的数据对象其实就是数据标签
|
||||
|
||||
//数据总长度(16+数据标签长度+数据对象长度),写成字节数组
|
||||
byte[] dataSumLengthByteArray = sumHex(16 + dataResultArray.length, 4);//长度为4
|
||||
|
||||
//数据标签SchemaID,写成字节数组
|
||||
byte[] dataTagSchemaIDByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataTagSchemaIDByTopicName(topic), 4);//长度为4
|
||||
|
||||
//数据对象SchemaID.写成字节数组
|
||||
byte[] dataObjectSchemaIDByteArray = sumHex(GetDataDictionaryCodeByTopicUtils.getDataObjectSchemaIDByTopicName(topic), 4);//长度为4
|
||||
|
||||
//拼接这些字节数组,顺序:数据对象类型+数据标签长度+数据总长度+数据标签SchemaID+数据对象SchemaID+数据标签数组+数据对象数组
|
||||
return byteMerger(dataObjectTypeByteArray, dataTagLengthByteArray, dataSumLengthByteArray, dataTagSchemaIDByteArray, dataObjectSchemaIDByteArray, dataResultArray);
|
||||
} catch (Exception e) {
|
||||
logger.error("HttpManager alreadyGetFileTagRecordSoOnlyGetMergeAllArray is error===>" + e + "<===");
|
||||
e.printStackTrace();
|
||||
return dataResultArray;//报错时返回数据对象数组,即本身不变
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 描述:把一个整数转为指定位数byte数组
|
||||
*
|
||||
* @param tu5 数字
|
||||
* @param length 位数
|
||||
* @return byte[]
|
||||
*/
|
||||
public static byte[] sumHex(int tu5, int length) {
|
||||
byte[] bytes5 = new byte[length];
|
||||
while (length > 0) {
|
||||
length--;
|
||||
// bytes5[length] = (byte)(tu5 >> 8*(bytes5.length-length-1) & 0xFF);//大端
|
||||
bytes5[bytes5.length - length - 1] = (byte) (tu5 >> 8 * (bytes5.length - length - 1) & 0xFF);//小端
|
||||
}
|
||||
return bytes5;
|
||||
}
|
||||
|
||||
/**
|
||||
* 合并字节数组
|
||||
*
|
||||
* @param byteList
|
||||
* @return
|
||||
*/
|
||||
private static byte[] byteMerger(byte[]... byteList) {
|
||||
int lengthByte = 0;
|
||||
for (int i = 0; i < byteList.length; i++) {
|
||||
lengthByte += byteList[i].length;
|
||||
}
|
||||
byte[] allByte = new byte[lengthByte];
|
||||
int countLength = 0;
|
||||
for (int i = 0; i < byteList.length; i++) {
|
||||
byte[] b = byteList[i];
|
||||
System.arraycopy(b, 0, allByte, countLength, b.length);
|
||||
countLength += b.length;
|
||||
}
|
||||
return allByte;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
package cn.ac.iie.cusflume.sink.avroUtils;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import java.security.MessageDigest;
|
||||
|
||||
/**
|
||||
* 描述:转换MD5工具类
|
||||
*
|
||||
* @author Administrator
|
||||
* @create 2018-08-13 15:11
|
||||
*/
|
||||
public class MD5Utils {
|
||||
private static Logger logger = Logger.getLogger(MD5Utils.class);
|
||||
|
||||
// public static String md5Encode(String msg) throws Exception {
|
||||
public static String md5Encode(byte[] msgBytes) throws Exception {
|
||||
try {
|
||||
// byte[] msgBytes = msg.getBytes("utf-8");
|
||||
/*
|
||||
* 声明使用Md5算法,获得MessaDigest对象
|
||||
*/
|
||||
MessageDigest md5 = MessageDigest.getInstance("MD5");
|
||||
/*
|
||||
* 使用指定的字节更新摘要
|
||||
*/
|
||||
md5.update(msgBytes);
|
||||
/*
|
||||
* 完成哈希计算,获得密文
|
||||
*/
|
||||
byte[] digest = md5.digest();
|
||||
/*
|
||||
* 以上两行代码等同于
|
||||
* byte[] digest = md5.digest(msgBytes);
|
||||
*/
|
||||
return byteArr2hexString(digest);
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in conversion MD5! This msgBytes is " + msgBytes);
|
||||
// e.printStackTrace();
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
public static String md5Encode(String msg) throws Exception {
|
||||
try {
|
||||
byte[] msgBytes = msg.getBytes("utf-8");
|
||||
/*
|
||||
* 声明使用Md5算法,获得MessaDigest对象
|
||||
*/
|
||||
MessageDigest md5 = MessageDigest.getInstance("MD5");
|
||||
/*
|
||||
* 使用指定的字节更新摘要
|
||||
*/
|
||||
md5.update(msgBytes);
|
||||
/*
|
||||
* 完成哈希计算,获得密文
|
||||
*/
|
||||
byte[] digest = md5.digest();
|
||||
/*
|
||||
* 以上两行代码等同于
|
||||
* byte[] digest = md5.digest(msgBytes);
|
||||
*/
|
||||
return byteArr2hexString(digest);
|
||||
} catch (Exception e) {
|
||||
logger.error("Error in conversion MD5! This msg is " + msg);
|
||||
// e.printStackTrace();
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 将byte数组转化为16进制字符串形式
|
||||
*
|
||||
* @param bys 字节数组
|
||||
* @return 字符串
|
||||
*/
|
||||
public static String byteArr2hexString(byte[] bys) {
|
||||
StringBuffer hexVal = new StringBuffer();
|
||||
int val = 0;
|
||||
for (byte by : bys) {
|
||||
//将byte转化为int 如果byte是一个负数就必须要和16进制的0xff做一次与运算
|
||||
val = ((int) by) & 0xff;
|
||||
if (val < 16) {
|
||||
hexVal.append("0");
|
||||
}
|
||||
hexVal.append(Integer.toHexString(val));
|
||||
}
|
||||
|
||||
return hexVal.toString();
|
||||
|
||||
}
|
||||
|
||||
|
||||
// public static void main(String[] args) {
|
||||
// try {
|
||||
// String bbb = MD5Utils.md5Encode("aaa");
|
||||
// System.out.println(bbb);
|
||||
// } catch (Exception e) {
|
||||
// e.printStackTrace();
|
||||
// }
|
||||
// }
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user