This repository has been archived on 2025-09-14. You can view files and clone it, but cannot push or open issues or pull requests.
Files
galaxy-tsg-olap-storm-log-s…/src/main/java/com/zdjizhi/spout/CustomizedKafkaSpout.java
2021-03-25 14:27:41 +08:00

91 lines
3.2 KiB
Java

package com.zdjizhi.spout;
import cn.hutool.core.thread.ThreadUtil;
import com.zdjizhi.common.FlowWriteConfig;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.utils.exception.StreamCompletionException;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;
/**
* kafkaSpout
*
* @author Administrator
*/
public class CustomizedKafkaSpout extends BaseRichSpout {
private static final long serialVersionUID = -3363788553406229592L;
private KafkaConsumer<String, String> consumer;
private SpoutOutputCollector collector = null;
private TopologyContext context = null;
private static final Log logger = LogFactory.get();
private static Properties createConsumerConfig() {
Properties props = new Properties();
props.put("bootstrap.servers", FlowWriteConfig.INPUT_KAFKA_SERVERS);
props.put("group.id", FlowWriteConfig.GROUP_ID);
props.put("session.timeout.ms", "60000");
props.put("max.poll.records", 3000);
props.put("max.partition.fetch.bytes", 31457280);
props.put("auto.offset.reset", FlowWriteConfig.AUTO_OFFSET_RESET);
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
/**
* kafka限流配置-20201117
*/
props.put(ConsumerConfig.CLIENT_ID_CONFIG, FlowWriteConfig.CONSUMER_CLIENT_ID);
return props;
}
@Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
// TODO Auto-generated method stub
this.collector = collector;
this.context = context;
Properties prop = createConsumerConfig();
this.consumer = new KafkaConsumer<>(prop);
this.consumer.subscribe(Collections.singletonList(FlowWriteConfig.KAFKA_TOPIC));
}
@Override
public void close() {
consumer.close();
}
@Override
public void nextTuple() {
try {
// TODO Auto-generated method stub
ConsumerRecords<String, String> records = consumer.poll(10000L);
ThreadUtil.sleep(FlowWriteConfig.TOPOLOGY_SPOUT_SLEEP_TIME);
for (ConsumerRecord<String, String> record : records) {
this.collector.emit(new Values(record.value()));
}
} catch (StreamCompletionException e) {
logger.error("KafkaSpout发送消息出现异常!", e);
}
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
// TODO Auto-generated method stub
declarer.declare(new Fields("source"));
}
}