优化Kafka认证方式,删除配置项通过连接端口判断

This commit is contained in:
qidaijie
2022-03-09 10:05:54 +08:00
parent c6f364d451
commit 956811c2d4
7 changed files with 41 additions and 36 deletions

View File

@@ -0,0 +1,42 @@
package com.zdjizhi.utils.kafka;
import com.zdjizhi.common.StreamAggregateConfig;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.config.SslConfigs;
import java.util.Properties;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.kafka
* @Description:
* @date 2021/6/813:54
*/
public class KafkaConsumer {
private static Properties createConsumerConfig() {
Properties properties = new Properties();
properties.put("bootstrap.servers", StreamAggregateConfig.SOURCE_KAFKA_SERVERS);
properties.put("group.id", StreamAggregateConfig.GROUP_ID);
properties.put("session.timeout.ms", StreamAggregateConfig.SESSION_TIMEOUT_MS);
properties.put("max.poll.records", StreamAggregateConfig.MAX_POLL_RECORDS);
properties.put("max.partition.fetch.bytes", StreamAggregateConfig.MAX_PARTITION_FETCH_BYTES);
properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
CertUtils.chooseCert(StreamAggregateConfig.SOURCE_KAFKA_SERVERS, properties);
return properties;
}
public static FlinkKafkaConsumer<String> getKafkaConsumer() {
FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(StreamAggregateConfig.SOURCE_KAFKA_TOPIC,
new SimpleStringSchema(), createConsumerConfig());
kafkaConsumer.setCommitOffsetsOnCheckpoints(true);
kafkaConsumer.setStartFromGroupOffsets();
return kafkaConsumer;
}
}