Springboot 集成 Kafka
环境准备
java17 + springboot 3 + kafka 3
依赖
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
<version>3.1.4</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-autoconfigure</artifactId>
<version>3.1.2</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-autoconfigure-processor</artifactId>
<version>2.7.11</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-configuration-processor</artifactId>
<version>2.7.11</version>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>3.0.11</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.30</version>
</dependency>
<!-- springboot 3.x 需要加入这个, 否则会找不到 -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>2.0.9</version>
</dependency>
Springboot 集成 kafka
生产者
配置类
package demo.config;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import java.util.HashMap;
import java.util.Map;
@Configuration
public class KafkaProducerConfig {
// 创建kafka 操作模板对象, 用于简化消息发送操作
@Bean
public KafkaTemplate<?, ?> kafkaTemplate(ProducerFactory<Object, Object> producerFactory) {
return new KafkaTemplate<>(producerFactory);
}
// 创建kafka 生产者工厂
@Bean
public ProducerFactory<?, ?> producerFactory() {
Map<String, Object> properties = buildProducerProperties();
return new DefaultKafkaProducerFactory<>(properties);
}
/**
* 构建生产者配置
*
* @return
*/
public static Map<String, Object> buildProducerProperties() {
Map<String, Object> properties = new HashMap<>();
properties.put("bootstrap.servers", "127.0.0.1:9092");
properties.put("acks", "all");
properties.put("retries", 0);
properties.put("batch.size", 16384);
properties.put("linger.ms", 1);
properties.put("buffer.memory", 33554432);
properties.put("key.serializer", StringSerializer.class.getName());
properties.put("value.serializer", StringSerializer.class.getName());
return properties;
}
}
生产消息
package demo.example;
import jakarta.annotation.PostConstruct;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;
import java.util.Properties;
import java.util.Scanner;
@Component
public class KafkaProducerDemo {
@Autowired
private KafkaTemplate<String, Object> kafkaTemplate;
@PostConstruct
public void send() throws InterruptedException {
Scanner sc = new Scanner(System.in);
String mess = null;
boolean flag = true;
while (flag) {
System.out.print("生产消息(输入exit 退出): ");
mess = sc.nextLine();
if (mess != null && (flag = (!"exit".equals(mess)))) {
kafkaTemplate.send("test", mess);
Thread.sleep(1000);
System.out.println("send message success....");
}
}
}
}
消费者
配置
package demo.config;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.*;
import java.util.HashMap;
import java.util.Map;
@Configuration
public class KafkaConsumerConfig {
/**
* 创建 消费者对象
*
* @param consumerFactory
* @return
*/
@Bean
public Consumer<?, ?> consumer(ConsumerFactory<Object, Object> consumerFactory) {
return consumerFactory.createConsumer();
}
@Bean
public ConsumerFactory<?,?> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(buildConsumerProperties());
}
/**
* 构建消费者配置
*
* @return
*/
public static Map<String, Object> buildConsumerProperties() {
Map<String, Object> properties = new HashMap<>();
properties.put("bootstrap.servers", "127.0.0.1:9092");
properties.put("group.id", "test");
properties.put("enable.auto.commit", false);
properties.put("auto.commit.interval.ms", "1000");
properties.put("session.timeout.ms", "30000");
properties.put("key.deserializer", StringDeserializer.class.getName());
properties.put("value.deserializer", StringDeserializer.class.getName());
return properties;
}
}
消费消息
package demo.example;
import jakarta.annotation.PostConstruct;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.Arrays;
import java.util.Properties;
@Component
public class KafkaConsumerDemo {
@Autowired
private Consumer<String, Object> consumer;
@PostConstruct
public void consumerTest() {
consumer.subscribe(Arrays.asList("test"));
new Thread(() -> {
while (true) {
ConsumerRecords<String, Object> records = consumer.poll(100);
for (ConsumerRecord<String, Object> record : records) {
System.out.println("消费消息: " + record.value());
}
}
}).start();
}
}
Springboot 集成 Kafka (监听器模式)
消费者(监听器)
配置
package demo.config;
import demo.listener.BatchMessageListener;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
@Configuration
public class KafkaConsumerConfig {
/**
* 创建 消费者对象
*
* @param consumerFactory
* @return
*/
@Bean
public Consumer<?, ?> consumer(ConsumerFactory<Object, Object> consumerFactory) {
return consumerFactory.createConsumer();
}
/**
消息监听器容器
*/
@Bean
public ConcurrentMessageListenerContainer<?, ?> messageListenerContainer(ConsumerFactory<Object, Object> consumerFactory) {
ContainerProperties containerProperties = new ContainerProperties("test");
containerProperties.setMessageListener(new BatchMessageListener());
return new ConcurrentMessageListenerContainer<>(consumerFactory, containerProperties);
}
@Bean
public ConsumerFactory<?, ?> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(buildConsumerProperties());
}
/**
* 构建消费者配置
*
* @return
*/
public static Map<String, Object> buildConsumerProperties() {
Map<String, Object> properties = new HashMap<>();
properties.put("bootstrap.servers", "127.0.0.1:9092");
properties.put("group.id", "test");
properties.put("enable.auto.commit", false);
properties.put("auto.commit.interval.ms", "1000");
properties.put("session.timeout.ms", "30000");
properties.put("key.deserializer", StringDeserializer.class.getName());
properties.put("value.deserializer", StringDeserializer.class.getName());
return properties;
}
}
使用监听器消费
package demo.listener;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.listener.BatchConsumerAwareMessageListener;
import java.util.List;
public class BatchMessageListener implements BatchConsumerAwareMessageListener<Object, Object> {
@Override
public void onMessage(List<ConsumerRecord<Object, Object>> data, Consumer<?, ?> consumer) {
for (ConsumerRecord<Object, Object> record : data) {
System.out.println("消费者:" + record.value());
consumer.commitAsync();
}
}
}
生产者无需修改
Springboot 集成 Kafka (自定义注解消费消息)
整合 消息监听器, 使用自定义注解的方式处理消息监听, 减少 多个消息监听器的创建和消息和主题的处理
自定义注解
订阅注解
package cn.xiaobai.spring.boot.annotaion;
import org.springframework.stereotype.Component;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* @Author yangdaji
* @Date 2023/11/2
* @PackageName cn.xiaobai.annotaion
* @ClassName EventSubscriber
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Component
public @interface KafkaTopicsSubscriber {
String[] topics() default {};
int partition() default 0;
}
消费消息标记注解
package cn.xiaobai.spring.boot.annotaion;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* @Author yangdaji
* @Date 2023/11/2
* @PackageName cn.xiaobai.annotaion
* @ClassName OnEvent
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface OnMessage {
}
配置自动转载类
package cn.xiaobai.spring.boot.config;
import cn.xiaobai.spring.boot.domain.KafkaPlusProperties;
import cn.xiaobai.spring.boot.listener.BatchKafkaListener;
import cn.xiaobai.spring.boot.listener.KafkaPlusListenerAdapter;
import org.apache.kafka.clients.admin.NewTopic;
import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.TopicBuilder;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import java.util.Map;
/**
* kafka 自动配置
*
* @Author yangdaji
* @Date 2023/11/4
* @PackageName cn.xiaobai.config
* @ClassName KafkaAutoConfiguration
*/
@AutoConfiguration
@EnableConfigurationProperties({KafkaPlusProperties.class})
@Import({BatchKafkaListener.class})
public class KafkaAutoConfiguration {
private final KafkaPlusProperties kafkaPlusProperties;
public KafkaAutoConfiguration(KafkaPlusProperties kafkaPlusProperties) {
this.kafkaPlusProperties = kafkaPlusProperties;
}
@Bean
public KafkaTemplate<?, ?> kafkaTemplate(ProducerFactory<Object, Object> defaultKafkaProducerFactory) {
return new KafkaTemplate<>(defaultKafkaProducerFactory);
}
/**
* 消息监听器容器
*
* @return
*/
@Bean
public ConcurrentMessageListenerContainer<?, ?> messageListenerContainer(ConcurrentKafkaListenerContainerFactory<Object, Object> concurrentKafkaListenerContainerFactory
, BatchKafkaListener batchKafkaListener) {
ConcurrentMessageListenerContainer<Object, Object> listenerContainer =
concurrentKafkaListenerContainerFactory.createListenerContainer(new KafkaPlusListenerAdapter(batchKafkaListener));
listenerContainer.setupMessageListener(batchKafkaListener);
return listenerContainer;
}
/**
* 消费者工厂
*
* @return
*/
@Bean
public ConsumerFactory<?, ?> kafkaConsumerFactory() {
Map<String, Object> consumerProperties = kafkaPlusProperties.buildConsumerProperties();
return new DefaultKafkaConsumerFactory<>(consumerProperties);
}
/**
* 生产者工厂
*
* @return
*/
@Bean
public ProducerFactory<?, ?> kafkaProducerFactory() {
Map<String, Object> producerProperties = kafkaPlusProperties.buildProducerProperties();
return new DefaultKafkaProducerFactory<>(producerProperties);
}
/**
* 消息监听器容器工厂
*
* @param consumerFactory
* @return
*/
@Bean
public ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory(ConsumerFactory<Object, Object> consumerFactory) {
ConcurrentKafkaListenerContainerFactory<Object, Object> concurrentKafkaListenerContainerFactory =
new ConcurrentKafkaListenerContainerFactory<>();
concurrentKafkaListenerContainerFactory.setBatchListener(true);
concurrentKafkaListenerContainerFactory.setConsumerFactory(consumerFactory);
return concurrentKafkaListenerContainerFactory;
}
@Bean
public NewTopic topic() {
return TopicBuilder.name("newTopic").build();
}
@Bean
public KafkaAdmin kafkaAdmin() {
Map<String, Object> properties = kafkaPlusProperties.buildConsumerProperties();
return new KafkaAdmin(properties);
}
}
创建 META-INF/spring。factories 文件(springboot 2.x)
org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
cn.xiaobai.spring.boot.config.KafkaAutoConfiguration
创建 META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports 文件(spirngboot 3)
cn.xiaobai.spring.boot.config.KafkaAutoConfiguration
创建配置类
package cn.xiaobai.spring.boot.domain;
import lombok.Data;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.util.CollectionUtils;
import java.time.Duration;
import java.util.*;
/**
* kafka 配置简化
*
* @Author yangdaji
* @Date 2023/11/4
* @PackageName cn.xiaobai.domain
* @ClassName KafkaProperties
*/
@Data
@ConfigurationProperties(prefix = "kafka-plus")
public class KafkaPlusProperties {
private List<String> bootstrapServers = new ArrayList(Collections.singletonList("localhost:9092"));
private final Map<String, String> properties = new HashMap();
private final Consumer consumer = new Consumer();
private final Producer producer = new Producer();
private final Listener listener = new Listener();
public Map<String, Object> buildCommonProperties() {
Map<String, Object> properties = new HashMap<>();
if (this.bootstrapServers != null) {
properties.put("bootstrap.servers", this.bootstrapServers);
}
if (!CollectionUtils.isEmpty(this.properties)) {
properties.putAll(this.properties);
}
return properties;
}
public Map<String, Object> buildConsumerProperties() {
Map<String, Object> properties = buildCommonProperties();
Map<String, Object> consumerMap = consumer.buildProperties();
mergeSameValeToMap(properties, consumerMap);
return properties;
}
public Map<String, Object> buildProducerProperties() {
Map<String, Object> properties = buildCommonProperties();
Map<String, Object> producerMap = producer.buildProperties();
mergeSameValeToMap(properties, producerMap);
return properties;
}
private void mergeSameValeToMap(Map<String, Object> commonMap, Map<String, Object> map) {
map.forEach((at, val) -> {
if (val != null) {
commonMap.put(at, val);
}
});
}
@Data
public static class Consumer {
private Integer autoCommitInterval = 5000;
private String autoOffsetReset = RulesType.LATEST.getName();
private List<String> bootstrapServers;
private String clientId;
private Boolean enableAutoCommit = true;
private Duration fetchMaxWait = Duration.ofMillis(500);
private Integer fetchMaxSize = 50 * 1024 * 1024;
private Integer fetchMinSize = 10 * 1024 * 1024;
private String groupId;
private Class<?> keyDeserializer = StringDeserializer.class;
private Class<?> valueDeserializer = StringDeserializer.class;
private Integer maxPollRecords = 500;
private Duration maxPollInterval = Duration.ofMillis(30 * 1000);
private final Map<String, String> properties = new HashMap<>();
public Map<String, Object> buildProperties() {
Map<String, Object> properties = new HashMap<>();
properties.put("bootstrap.servers", this.getBootstrapServers());
properties.put("group.id", this.getGroupId());
properties.put("enable.auto.commit", this.getEnableAutoCommit());
properties.put("auto.offset.reset", this.getAutoOffsetReset());
properties.put("max.poll.records", this.getMaxPollRecords());
properties.put("auto.commit.interval.ms", this.getAutoCommitInterval());
properties.put("max.poll.interval.ms", (int) this.getMaxPollInterval().toMillis());
properties.put("fetch.max.wait.ms", (int) this.getFetchMaxWait().toMillis());
properties.put("fetch.max.bytes", this.getFetchMaxSize());
properties.put("fetch.min.bytes", this.getFetchMinSize());
properties.put("key.deserializer", this.getKeyDeserializer());
properties.put("value.deserializer", this.getValueDeserializer());
properties.putAll(this.getProperties());
return properties;
}
public enum RulesType {
/**
* 自动重置偏移量到最早的偏移量
*/
EARLIEST("earliest"),
/**
* 默认,自动重置偏移量为最新的偏移量
*/
LATEST("latest"),
/**
* 如果消费组原来的偏移量不存在,则向消费者抛异常
*/
NONE("none");
private String name;
RulesType(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
}
@Data
public static class Producer {
private List<String> bootstrapServers;
private Integer batchSize = 16;
private Integer bufferMemory = 32 * 1024 * 1024;
private String clientId;
private Class<?> keySerializer = StringSerializer.class;
private Class<?> valueSerializer = StringSerializer.class;
private final Map<String, String> properties = new HashMap<>();
public Map<String, Object> buildProperties() {
Map<String, Object> properties = new HashMap<>();
properties.put("bootstrap.servers", this.getBootstrapServers());
properties.put("client.id", this.getClientId());
properties.put("batch.size", this.getBatchSize());
properties.put("buffer.memory", this.getBufferMemory());
properties.put("key.serializer", this.getKeySerializer());
properties.put("value.serializer", this.getValueSerializer());
properties.putAll(this.getProperties());
return properties;
}
}
@Data
public static class Listener {
private Type type = Type.SINGLE;
private String clientId;
private Duration pollTimeout;
public static enum Type {
SINGLE,
BATCH;
}
}
}
创建消息监听器
通过扫描注解并绑定 topic
package cn.xiaobai.spring.boot.listener;
import cn.xiaobai.spring.boot.annotaion.KafkaTopicsSubscriber;
import cn.xiaobai.spring.boot.utils.ProxyUtils;
import lombok.Getter;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.core.env.Environment;
import org.springframework.kafka.listener.BatchConsumerAwareMessageListener;
import org.springframework.kafka.support.TopicPartitionOffset;
import org.springframework.util.CollectionUtils;
import org.springframework.util.ReflectionUtils;
import java.lang.reflect.Method;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* kafka 批量消息监听器
* 并静态代理自定义注解
*
* @Author yangdaji
* @Date 2023/11/3
* @PackageName pattern.kafka.listener
* @ClassName BatchKafkaListener
*/
public class BatchKafkaListener implements BatchConsumerAwareMessageListener<Object, Object>, ApplicationContextAware {
private transient Map<String, Map<Object, Method>> topicMessagesMap = new ConcurrentHashMap<>();
@Getter
private TopicPartitionOffset[] topicPartitionOffsets;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
Environment environment = applicationContext.getEnvironment();
subscriber(applicationContext, environment);
}
private void subscriber(ApplicationContext applicationContext, Environment environment) {
Map<String, Object> beansWithAnnotation = applicationContext.getBeansWithAnnotation(KafkaTopicsSubscriber.class);
List<TopicPartitionOffset> topicList = new LinkedList<>();
beansWithAnnotation.values().forEach(sub -> {
KafkaTopicsSubscriber annotation = AnnotationUtils.findAnnotation(sub.getClass(), KafkaTopicsSubscriber.class);
String[] topics = annotation.topics();
int partition = annotation.partition();
for (String topic : topics) {
String top = environment.resolvePlaceholders(topic);
topicList.add(new TopicPartitionOffset(top, partition));
// 绑定代理对象
ProxyUtils.bindProxy(sub, top, topicMessagesMap);
}
});
topicPartitionOffsets = topicList.toArray(new TopicPartitionOffset[0]);
}
@Override
public void onMessage(List<ConsumerRecord<Object, Object>> data, Consumer<?, ?> consumer) {
for (ConsumerRecord<Object, Object> datum : data) {
String topic = datum.topic();
Object message = datum.value();
consumer.groupMetadata().groupId();
Map<Object, Method> objectMethodMap = topicMessagesMap.get(topic);
if (!CollectionUtils.isEmpty(objectMethodMap)) {
objectMethodMap.forEach((proxy, method) -> {
ReflectionUtils.invokeMethod(method, proxy, message);
});
}
consumer.commitAsync();
}
}
}
创建消息监听器处理器
package cn.xiaobai.spring.boot.listener;
import org.springframework.kafka.config.KafkaListenerEndpoint;
import org.springframework.kafka.listener.MessageListenerContainer;
import org.springframework.kafka.support.TopicPartitionOffset;
import org.springframework.kafka.support.converter.MessageConverter;
import java.util.Collection;
import java.util.regex.Pattern;
/**
* 监听器处理器
*
* @Author yangdaji
* @Date 2023/11/5
* @PackageName cn.xiaobai.listener
* @ClassName KafkaPlusListenerEndpoint
*/
public class KafkaPlusListenerAdapter implements KafkaListenerEndpoint {
private BatchKafkaListener messageListener;
public KafkaPlusListenerAdapter(BatchKafkaListener messageListener) {
this.messageListener = messageListener;
}
@Override
public String getId() {
return null;
}
@Override
public String getGroupId() {
return null;
}
@Override
public String getGroup() {
return null;
}
@Override
public Collection<String> getTopics() {
return null;
}
@Override
public TopicPartitionOffset[] getTopicPartitionsToAssign() {
return messageListener.getTopicPartitionOffsets();
}
@Override
public Pattern getTopicPattern() {
return null;
}
@Override
public String getClientIdPrefix() {
return null;
}
@Override
public Integer getConcurrency() {
return null;
}
@Override
public Boolean getAutoStartup() {
return null;
}
@Override
public void setupListenerContainer(MessageListenerContainer messageListenerContainer, MessageConverter messageConverter) {
}
@Override
public boolean isSplitIterables() {
return false;
}
}
自定义绑定工具
package cn.xiaobai.spring.boot.utils;
import cn.xiaobai.spring.boot.annotaion.OnMessage;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
/**
* @Author yangdaji
* @Date 2023/11/6
* @PackageName cn.xiaobai.utils
* @ClassName ProxyUtils
*/
public class ProxyUtils {
/**
* 代理对象绑定频道
*
* @param proxy
* @param topic
* @param topicMessagesMap
*/
public static void bindProxy(Object proxy, String topic, Map<String, Map<Object, Method>> topicMessagesMap) {
// 反射获取目标代理类中的注解方法, 并进行赋值
Method[] declaredMethods = proxy.getClass().getDeclaredMethods();
for (Method declaredMethod : declaredMethods) {
OnMessage onMessage = declaredMethod.getAnnotation(OnMessage.class);
if (onMessage != null) {
HashMap<Object, Method> value = new HashMap<>();
value.put(proxy, declaredMethod);
topicMessagesMap.put(topic, value);
}
}
}
}
以上就整合完毕, 可以在使用maven 将 项目 install 到本地, 然后创建项目引入依赖, 通过注解使用
编写配置
kafka-plus:
bootstrap-servers: localhost:9092 # kafka 连接地址, 单独配置时, 生产者和消费组共用一个地址
producer:
bootstrap-servers: localhost:9092 # kafka 生产者连接地址
consumer:
bootstrap-servers: localhost:9092 # kafka 消费组连接地址
enable-auto-commit: false # 关闭自动提交
group-id: 1213 # 配置消费组
其他参数可参考官方
使用例子
package cn.xiaobai.spring.boot.demo;
import cn.xiaobai.spring.boot.annotaion.KafkaTopicsSubscriber;
import cn.xiaobai.spring.boot.annotaion.OnMessage;
@KafkaTopicsSubscriber(topics = "test")
public class Message {
@OnMessage
public void onMessage(String message) {
System.out.println("接收到消息:" + message);
}
}
相关的集成以及放到码云上, 可参考: xiaobai-spring-boot
Springboot 官方集成使用
配置
spring:
kafka:
bootstrap-servers: localhost:9092 # 连接地址, 默认本地
producer:
bootstrap-servers: localhost:9092 # 生产者连接地址
key-serializer: org.apache.kafka.common.serialization.StringSerializer # key-value 序列化方式, 默认StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
consumer:
bootstrap-servers: localhost:9092 # 消费者连接地址
group-id: 1213
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer # key-value 反序列化方式, 默认StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
创建测试类
package demo.boot;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
@Component
public class KafkaClientDemo {
@KafkaListener(topics = "test", groupId = "123")
public void onMessage(ConsumerRecord<String, Object> message) {
System.out.println("消费消息:" + message.value());
}
}
下载地址
阿里云镜像站: https://mirrors.aliyun.com/apache/kafka/3.6.0/?spm=a2c6h.25603864.0.0.3d83126eRocLJk
阿里云盘: https://www.aliyundrive.com/s/zHsrsFsLPV7