增加24.09相关初始化sql及模版配置文件

This commit is contained in:
doufenghu
2024-11-08 16:49:43 +08:00
parent f20d93b792
commit 446662f03d
61 changed files with 2807 additions and 256 deletions

View File

@@ -0,0 +1,50 @@
flink.job.name={{ job_name }}
#kafka source配置
#9092为无验证 9095为ssl 9094为sasl
source.kafka.broker={{ kafka_source_servers }}
source.kafka.topic={{ kafka_source_topic }}
source.kafka.group.id={{ kafka_source_group_id }}
#earliest从头开始 latest最新
source.kafka.auto.offset.reset=latest
source.kafka.session.timeout.ms=60000
#每次拉取操作从分区中获取的最大记录数
source.kafka.max.poll.records=1000
#消费者从单个分区中一次性获取的最大字节数
source.kafka.max.partition.fetch.bytes=31457280
source.kafka.enable.auto.commit=true
#kafka SASL验证用户名
source.kafka.user=olap
#kafka SASL及SSL验证密码
source.kafka.pin=galaxy2024
#SSL需要
source.kafka.tools.library={{ deploy_dir }}/flink/topology/data/
map.filter.expression=FileChunk.offset <= 1073741824
#窗口相关配置
combiner.window.parallelism={{ combiner_window_parallelism }}
#窗口大小,单位秒
combiner.window.size=10
#sink相关参数
sink.parallelism={{ hos_sink_parallelism }}
#可选hos、oss、hbase
sink.type=hos
sink.async=false
#hos sink相关配置
#访问nginx或单个hos配置为ip:port访问多个hos配置为ip1:port,ip2:port...
sink.hos.endpoint={{ hos_sink_servers }}
sink.hos.bucket={{ hos_sink_bucket }}
sink.hos.token={{ hos_token }}
sink.hos.batch.size=1048576
sink.hos.batch.interval.ms=10000
#http相关配置
sink.http.client.retries.number=3
sink.http.client.max.total=20
sink.http.client.max.per.route=10
sink.http.client.connect.timeout.ms=10000
sink.http.client.request.timeout.ms=10000
sink.http.client.socket.timeout.ms=60000