GAL-578 更新groot任务模板

This commit is contained in:
zhanghongqing
2024-06-19 13:56:52 +08:00
parent dfa105063b
commit 2a2ad0bf36
16 changed files with 78 additions and 94 deletions

View File

@@ -19,7 +19,7 @@ sinks:
type: kafka type: kafka
properties: properties:
topic: {{ kafka_sink_topic }} topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: { { kafka_sink_servers } } kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10
kafka.request.timeout.ms: 30000 kafka.request.timeout.ms: 30000
@@ -35,7 +35,7 @@ sinks:
clickhouse_sink: clickhouse_sink:
type: clickhouse type: clickhouse
properties: properties:
host: {{ clickhouse_servers }} host: {{ clickhouse_sink_host }}
table: tsg_galaxy_v3.traffic_sketch_metric_local table: tsg_galaxy_v3.traffic_sketch_metric_local
batch.size: 100000 batch.size: 100000
batch.interval: 30s batch.interval: 30s
@@ -48,7 +48,6 @@ application:
shade.identifier: aes shade.identifier: aes
pipeline: pipeline:
object-reuse: true object-reuse: true
topology:
{{ topology }} {{ topology }}

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: DATAPATH-TELEMETRY-RECORD topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: {{ kafka_source_servers }} kafka.bootstrap.servers: {{ kafka_source_servers }}
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -37,7 +37,7 @@ sinks:
kafka_sink: kafka_sink:
type: kafka type: kafka
properties: properties:
topic: DATAPATH-TELEMETRY-RECORD topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }} kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10
@@ -71,7 +71,6 @@ application:
shade.identifier: aes shade.identifier: aes
pipeline: pipeline:
object-reuse: true object-reuse: true
topology:
{{ topology }} {{ topology }}

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: DOS-EVENT topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: "{{ kafka_source_servers }}" kafka.bootstrap.servers: "{{ kafka_source_servers }}"
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -22,12 +22,30 @@ sources:
kafka.compression.type: none kafka.compression.type: none
format: json format: json
sinks: sinks:
kafka_sink:
type: kafka
properties:
topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0
kafka.linger.ms: 10
kafka.request.timeout.ms: 30000
kafka.batch.size: 262144
kafka.buffer.memory: 134217728
kafka.max.request.size: 10485760
kafka.compression.type: snappy
kafka.security.protocol: SASL_PLAINTEXT
kafka.sasl.mechanism: PLAIN
kafka.sasl.jaas.config: 454f65ea6eef1256e3067104f82730e737b68959560966b811e7ff364116b03124917eb2b0f3596f14733aa29ebad9352644ce1a5c85991c6f01ba8a5e8f177a7ff0b2d3889a424249967b3870b50993d9644f239f0de82cdb13bdb502959e16afadffa49ef1e1d2b9c9b5113e619817
format: json
json.ignore.parse.errors: false
log.failures.only: true
clickhouse_sink: clickhouse_sink:
type: clickhouse type: clickhouse
properties: properties:
host: "{{ clickhouse_servers }}" host: {{ clickhouse_sink_host }}
table: tsg_galaxy_v3.dos_event_local table: tsg_galaxy_v3.dos_event_local
batch.size: 100000 batch.size: 100000
batch.interval: 30s batch.interval: 30s
@@ -37,13 +55,10 @@ sinks:
application: application:
env: env:
name: dos_event_kafka_to_clickhouse name: {{ job_name }}
shade.identifier: aes shade.identifier: aes
pipeline: pipeline:
object-reuse: true # [boolean] Object Reuse, default is false object-reuse: true # [boolean] Object Reuse, default is false
topology: {{ topology }}
- name: kafka_source
downstream: [clickhouse_sink]
- name: clickhouse_sink

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: PROXY-EVENT topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: {{ kafka_source_servers }} kafka.bootstrap.servers: {{ kafka_source_servers }}
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -50,12 +50,6 @@ processing_pipelines:
parameters: parameters:
value_expression: recv_time value_expression: recv_time
- function: DOMAIN
lookup_fields: [http_host, ssl_sni, dtls_sni, quic_sni]
output_fields: [server_domain]
parameters:
option: FIRST_SIGNIFICANT_SUBDOMAIN
- function: BASE64_DECODE_TO_STRING - function: BASE64_DECODE_TO_STRING
output_fields: [mail_subject] output_fields: [mail_subject]
parameters: parameters:
@@ -107,7 +101,7 @@ sinks:
kafka_sink: kafka_sink:
type: kafka type: kafka
properties: properties:
topic: PROXY-EVENT topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }} kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10
@@ -141,6 +135,4 @@ application:
shade.identifier: aes shade.identifier: aes
pipeline: pipeline:
object-reuse: true object-reuse: true
topology:
{{ topology }} {{ topology }}

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: SESSION-RECORD topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: {{ kafka_source_servers }} kafka.bootstrap.servers: {{ kafka_source_servers }}
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -50,12 +50,6 @@ processing_pipelines:
parameters: parameters:
value_expression: recv_time value_expression: recv_time
- function: DOMAIN
lookup_fields: [http_host, ssl_sni, dtls_sni, quic_sni]
output_fields: [server_domain]
parameters:
option: FIRST_SIGNIFICANT_SUBDOMAIN
- function: BASE64_DECODE_TO_STRING - function: BASE64_DECODE_TO_STRING
output_fields: [mail_subject] output_fields: [mail_subject]
parameters: parameters:
@@ -107,7 +101,7 @@ sinks:
kafka_sink: kafka_sink:
type: kafka type: kafka
properties: properties:
topic: SESSION-RECORD topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }} kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: TRAFFIC-SKETCH-METRIC topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: {{ kafka_source_servers }} kafka.bootstrap.servers: {{ kafka_source_servers }}
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -62,12 +62,11 @@ processing_pipelines:
parameters: parameters:
data_center_id_num: 1 data_center_id_num: 1
sinks: sinks:
kafka_sink: kafka_sink:
type: kafka type: kafka
properties: properties:
topic: TRAFFIC-SKETCH-METRIC topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }} kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10
@@ -86,7 +85,7 @@ sinks:
clickhouse_sink: clickhouse_sink:
type: clickhouse type: clickhouse
properties: properties:
host: {{ clickhouse_servers }} host: {{ clickhouse_sink_host }}
table: tsg_galaxy_v3.traffic_sketch_metric_local table: tsg_galaxy_v3.traffic_sketch_metric_local
batch.size: 100000 batch.size: 100000
batch.interval: 30s batch.interval: 30s
@@ -97,7 +96,7 @@ sinks:
application: application:
env: # [object] Environment Variables env: # [object] Environment Variables
name: etl_traffic_sketch_metric # [string] Job Name name: {{ job_name }}
shade.identifier: aes shade.identifier: aes
pipeline: pipeline:
object-reuse: true # [boolean] Object Reuse, default is false object-reuse: true # [boolean] Object Reuse, default is false

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: TRANSACTION-RECORD topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: {{ kafka_source_servers }} kafka.bootstrap.servers: {{ kafka_source_servers }}
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -107,7 +107,7 @@ sinks:
kafka_sink: kafka_sink:
type: kafka type: kafka
properties: properties:
topic: TRANSACTION-RECORD topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }} kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: VOIP-CONVERSATION-RECORD topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: {{ kafka_source_servers }} kafka.bootstrap.servers: {{ kafka_source_servers }}
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -50,12 +50,6 @@ processing_pipelines:
parameters: parameters:
value_expression: recv_time value_expression: recv_time
- function: DOMAIN
lookup_fields: [http_host, ssl_sni, dtls_sni, quic_sni]
output_fields: [server_domain]
parameters:
option: FIRST_SIGNIFICANT_SUBDOMAIN
- function: BASE64_DECODE_TO_STRING - function: BASE64_DECODE_TO_STRING
output_fields: [mail_subject] output_fields: [mail_subject]
parameters: parameters:
@@ -107,7 +101,7 @@ sinks:
kafka_sink: kafka_sink:
type: kafka type: kafka
properties: properties:
topic: VOIP-CONVERSATION-RECORD topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }} kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10

View File

@@ -19,7 +19,7 @@ sinks:
type: kafka type: kafka
properties: properties:
topic: {{ kafka_sink_topic }} topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: { { kafka_sink_servers } } kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10
kafka.request.timeout.ms: 30000 kafka.request.timeout.ms: 30000
@@ -35,7 +35,7 @@ sinks:
clickhouse_sink: clickhouse_sink:
type: clickhouse type: clickhouse
properties: properties:
host: {{ clickhouse_servers }} host: {{ clickhouse_sink_host }}
table: tsg_galaxy_v3.traffic_sketch_metric_local table: tsg_galaxy_v3.traffic_sketch_metric_local
batch.size: 100000 batch.size: 100000
batch.interval: 30s batch.interval: 30s
@@ -48,7 +48,6 @@ application:
shade.identifier: aes shade.identifier: aes
pipeline: pipeline:
object-reuse: true object-reuse: true
topology:
{{ topology }} {{ topology }}

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: DATAPATH-TELEMETRY-RECORD topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: {{ kafka_source_servers }} kafka.bootstrap.servers: {{ kafka_source_servers }}
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -37,7 +37,7 @@ sinks:
kafka_sink: kafka_sink:
type: kafka type: kafka
properties: properties:
topic: DATAPATH-TELEMETRY-RECORD topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }} kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10
@@ -71,7 +71,6 @@ application:
shade.identifier: aes shade.identifier: aes
pipeline: pipeline:
object-reuse: true object-reuse: true
topology:
{{ topology }} {{ topology }}

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: DOS-EVENT topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: "{{ kafka_source_servers }}" kafka.bootstrap.servers: "{{ kafka_source_servers }}"
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -22,12 +22,30 @@ sources:
kafka.compression.type: none kafka.compression.type: none
format: json format: json
sinks: sinks:
kafka_sink:
type: kafka
properties:
topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0
kafka.linger.ms: 10
kafka.request.timeout.ms: 30000
kafka.batch.size: 262144
kafka.buffer.memory: 134217728
kafka.max.request.size: 10485760
kafka.compression.type: snappy
kafka.security.protocol: SASL_PLAINTEXT
kafka.sasl.mechanism: PLAIN
kafka.sasl.jaas.config: 454f65ea6eef1256e3067104f82730e737b68959560966b811e7ff364116b03124917eb2b0f3596f14733aa29ebad9352644ce1a5c85991c6f01ba8a5e8f177a7ff0b2d3889a424249967b3870b50993d9644f239f0de82cdb13bdb502959e16afadffa49ef1e1d2b9c9b5113e619817
format: json
json.ignore.parse.errors: false
log.failures.only: true
clickhouse_sink: clickhouse_sink:
type: clickhouse type: clickhouse
properties: properties:
host: "{{ clickhouse_servers }}" host: {{ clickhouse_sink_host }}
table: tsg_galaxy_v3.dos_event_local table: tsg_galaxy_v3.dos_event_local
batch.size: 100000 batch.size: 100000
batch.interval: 30s batch.interval: 30s
@@ -37,13 +55,10 @@ sinks:
application: application:
env: env:
name: dos_event_kafka_to_clickhouse name: {{ job_name }}
shade.identifier: aes shade.identifier: aes
pipeline: pipeline:
object-reuse: true # [boolean] Object Reuse, default is false object-reuse: true # [boolean] Object Reuse, default is false
topology: {{ topology }}
- name: kafka_source
downstream: [clickhouse_sink]
- name: clickhouse_sink

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: PROXY-EVENT topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: {{ kafka_source_servers }} kafka.bootstrap.servers: {{ kafka_source_servers }}
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -50,12 +50,6 @@ processing_pipelines:
parameters: parameters:
value_expression: recv_time value_expression: recv_time
- function: DOMAIN
lookup_fields: [http_host, ssl_sni, dtls_sni, quic_sni]
output_fields: [server_domain]
parameters:
option: FIRST_SIGNIFICANT_SUBDOMAIN
- function: BASE64_DECODE_TO_STRING - function: BASE64_DECODE_TO_STRING
output_fields: [mail_subject] output_fields: [mail_subject]
parameters: parameters:
@@ -107,7 +101,7 @@ sinks:
kafka_sink: kafka_sink:
type: kafka type: kafka
properties: properties:
topic: PROXY-EVENT topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }} kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10
@@ -141,6 +135,4 @@ application:
shade.identifier: aes shade.identifier: aes
pipeline: pipeline:
object-reuse: true object-reuse: true
topology:
{{ topology }} {{ topology }}

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: SESSION-RECORD topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: {{ kafka_source_servers }} kafka.bootstrap.servers: {{ kafka_source_servers }}
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -50,12 +50,6 @@ processing_pipelines:
parameters: parameters:
value_expression: recv_time value_expression: recv_time
- function: DOMAIN
lookup_fields: [http_host, ssl_sni, dtls_sni, quic_sni]
output_fields: [server_domain]
parameters:
option: FIRST_SIGNIFICANT_SUBDOMAIN
- function: BASE64_DECODE_TO_STRING - function: BASE64_DECODE_TO_STRING
output_fields: [mail_subject] output_fields: [mail_subject]
parameters: parameters:
@@ -107,7 +101,7 @@ sinks:
kafka_sink: kafka_sink:
type: kafka type: kafka
properties: properties:
topic: SESSION-RECORD topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }} kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: TRAFFIC-SKETCH-METRIC topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: {{ kafka_source_servers }} kafka.bootstrap.servers: {{ kafka_source_servers }}
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -62,12 +62,11 @@ processing_pipelines:
parameters: parameters:
data_center_id_num: 1 data_center_id_num: 1
sinks: sinks:
kafka_sink: kafka_sink:
type: kafka type: kafka
properties: properties:
topic: TRAFFIC-SKETCH-METRIC topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }} kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10
@@ -86,7 +85,7 @@ sinks:
clickhouse_sink: clickhouse_sink:
type: clickhouse type: clickhouse
properties: properties:
host: {{ clickhouse_servers }} host: {{ clickhouse_sink_host }}
table: tsg_galaxy_v3.traffic_sketch_metric_local table: tsg_galaxy_v3.traffic_sketch_metric_local
batch.size: 100000 batch.size: 100000
batch.interval: 30s batch.interval: 30s
@@ -97,7 +96,7 @@ sinks:
application: application:
env: # [object] Environment Variables env: # [object] Environment Variables
name: etl_traffic_sketch_metric # [string] Job Name name: {{ job_name }}
shade.identifier: aes shade.identifier: aes
pipeline: pipeline:
object-reuse: true # [boolean] Object Reuse, default is false object-reuse: true # [boolean] Object Reuse, default is false

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: TRANSACTION-RECORD topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: {{ kafka_source_servers }} kafka.bootstrap.servers: {{ kafka_source_servers }}
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -107,7 +107,7 @@ sinks:
kafka_sink: kafka_sink:
type: kafka type: kafka
properties: properties:
topic: TRANSACTION-RECORD topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }} kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10

View File

@@ -2,7 +2,7 @@ sources:
kafka_source: kafka_source:
type: kafka type: kafka
properties: properties:
topic: VOIP-CONVERSATION-RECORD topic: {{ kafka_source_topic }}
kafka.bootstrap.servers: {{ kafka_source_servers }} kafka.bootstrap.servers: {{ kafka_source_servers }}
kafka.session.timeout.ms: 60000 kafka.session.timeout.ms: 60000
kafka.max.poll.records: 3000 kafka.max.poll.records: 3000
@@ -50,12 +50,6 @@ processing_pipelines:
parameters: parameters:
value_expression: recv_time value_expression: recv_time
- function: DOMAIN
lookup_fields: [http_host, ssl_sni, dtls_sni, quic_sni]
output_fields: [server_domain]
parameters:
option: FIRST_SIGNIFICANT_SUBDOMAIN
- function: BASE64_DECODE_TO_STRING - function: BASE64_DECODE_TO_STRING
output_fields: [mail_subject] output_fields: [mail_subject]
parameters: parameters:
@@ -107,7 +101,7 @@ sinks:
kafka_sink: kafka_sink:
type: kafka type: kafka
properties: properties:
topic: VOIP-CONVERSATION-RECORD topic: {{ kafka_sink_topic }}
kafka.bootstrap.servers: {{ kafka_sink_servers }} kafka.bootstrap.servers: {{ kafka_sink_servers }}
kafka.retries: 0 kafka.retries: 0
kafka.linger.ms: 10 kafka.linger.ms: 10