1、Kafka consumer和producer的配置类注释掉不需要了

2、添加kafka consumer和producer,消费者还需要做异常处理和生成指令
3、添加KafkaTopicConfig配置类,设置topic的Partition分区数
This commit is contained in:
Hao Miao
2024-01-30 00:00:39 +08:00
parent 11fb439e4c
commit 85410b9e53
7 changed files with 204 additions and 15 deletions

View File

@@ -0,0 +1,54 @@
package com.realtime.protection.configuration.kafka;
import com.realtime.protection.configuration.entity.alert.AlertMessage;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import java.util.HashMap;
import java.util.Map;
//@Configuration
//@EnableKafka
public class KafkaConsumerConfig {
// @Value("${spring.kafka.bootstrap-servers}")
// private String bootstrapServers;
//
// @Value("${spring.kafka.consumer.group-id}")
// private String groupId;
//
// @Autowired
// private KafkaProperties kafkaProperties;
//
// @Bean
// public Map<String, Object> consumerConfigs() {
// Map<String, Object> props = new HashMap<>();
// props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
// props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
// return props;
// }
//
// @Bean
// public ConsumerFactory<String, AlertMessage> consumerFactory() {
// return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(),
// new JsonDeserializer<>(AlertMessage.class));
// }
//
// @Bean
// public ConcurrentKafkaListenerContainerFactory<String, AlertMessage> kafkaListenerContainerFactory() {
// ConcurrentKafkaListenerContainerFactory<String, AlertMessage> factory = new ConcurrentKafkaListenerContainerFactory<>();
// factory.setConsumerFactory(consumerFactory());
// return factory;
// }
}

View File

@@ -0,0 +1,4 @@
package com.realtime.protection.configuration.kafka;
public interface KafkaMessage {
}

View File

@@ -0,0 +1,42 @@
package com.realtime.protection.configuration.kafka;
import com.realtime.protection.configuration.entity.alert.AlertMessage;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.serializer.JsonSerializer;
import java.util.HashMap;
import java.util.Map;
//@Configuration
public class KafkaProducerConfig {
// @Value("${spring.kafka.bootstrap-servers}")
// private String bootstrapServers;
//
// @Bean
// public Map<String, Object> producerConfigs() {
// Map<String, Object> props = new HashMap<>();
// props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
// props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
// props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
// return props;
// }
//
// @Bean
// public ProducerFactory<String, AlertMessage> producerFactory() {
// return new DefaultKafkaProducerFactory<>(producerConfigs());
// }
//
// @Bean
// public KafkaTemplate<String, AlertMessage> kafkaTemplate() {
// return new KafkaTemplate<>(producerFactory());
// }
}

View File

@@ -0,0 +1,13 @@
package com.realtime.protection.configuration.kafka;
import org.apache.kafka.clients.admin.NewTopic;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class KafkaTopicConfig {
@Bean
public NewTopic batchTopic() {
return new NewTopic("testTopic", 4, (short) 1);
}
}

View File

@@ -0,0 +1,25 @@
package com.realtime.protection.server.alertmessage.kafkaConsumer;
import com.realtime.protection.configuration.entity.alert.AlertMessage;
import lombok.extern.slf4j.Slf4j;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
@Slf4j
@RestController
public class KafkaProducerController {
KafkaTemplate<String, AlertMessage> kafkaTemplate;
public KafkaProducerController(KafkaTemplate<String, AlertMessage> kafkaTemplate) {
this.kafkaTemplate = kafkaTemplate;
}
@PostMapping("/kafkasend")
public void sendMessage(@RequestBody AlertMessage alerm) {
kafkaTemplate.send("topic-test", alerm);
}
}

View File

@@ -0,0 +1,24 @@
package com.realtime.protection.server.alertmessage.kafkaProducer;
import com.realtime.protection.configuration.entity.alert.AlertMessage;
import lombok.extern.slf4j.Slf4j;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Service;
@Slf4j
@Service
public class KafkaConsumerService {
@KafkaListener(topics = "${spring.kafka.consumer.topic-name}")
public void consume(AlertMessage alerm, Acknowledgment ack) {
try {
log.info("消费者监听到数据:{}", alerm);
// 手动提交
ack.acknowledge();
} catch (Exception e) {
throw new RuntimeException("消费失败,数据: " + alerm, e);
}
}
}