SpringBoot2对接Kafka
1、依赖导入
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
2、application.yml配置
spring:
kafka:
bootstrap-servers: 127.0.0.1:9092
template:
default-topic: producer
listener:
concurrency: 5
consumer:
group-id: myGroup
client-id: 200
max-poll-records: 200
auto-offset-reset: earliest
producer:
batch-size: 1000
retries: 3
client-id: 200
3、生产者
package com.itennishy.ycweb.kafka;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Component;
import org.springframework.util.concurrent.ListenableFuture;
import java.util.concurrent.ExecutionException;
@Slf4j
@Component
public class Producer {
@Autowired
private KafkaTemplate<String, String> kafkaTemplate;
public RecordMetadata sendChannelMess(String topic, String message) {
ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send(topic, message);
RecordMetadata recordMetadata = null;
try {
recordMetadata = future.get().getRecordMetadata();
log.debug("Kafka发送成功,partition:" + recordMetadata.partition() + ",offset:" + recordMetadata.offset() + ",topic:" + recordMetadata.topic());
} catch (InterruptedException | ExecutionException e) {
log.debug("Kafka发送失败", e);
}
return recordMetadata;
}
}
4、消费者
package com.itennishy.ycweb.kafka;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.itennishy.ycweb.domain.Events;
import com.itennishy.ycweb.domain.LiveLevel;
import com.itennishy.ycweb.repository.EventsRepository;
import com.itennishy.ycweb.repository.LiveLevelRepository;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@Slf4j
@Component
public class Consumer {
ExecutorService pool = Executors.newFixedThreadPool(100);
@KafkaListener(topics = {"events-p3", "room-stat"})
public void receiveMessage(String message) {
pool.execute(() -> {
try {
log.info("Kafka收到一条消息:" + message + ",解析完成!");
} catch (Exception e) {
log.error("错误原因:" + e.getMessage() + ",错误类:" + e.getClass());
}
});
}
@KafkaListener(topics = "events-p3")
public void onMessage(List<String> crs) {
for(String str : crs){
System.out.println("events-p3:" + str);
}
}
@KafkaListener(topics = "test14")
public void listenT1(ConsumerRecord<?, ?> cr){
System.out.println("listenT1收到消息,topic:>>>" + cr.topic() + " offset:>>" + cr.offset()+ " key:>>" + cr.key() + " value:>>" + cr.value());
}
}
5、Controller编写
package com.itennishy.ycweb.controller;
import com.itennishy.ycweb.kafka.Producer;
import com.itennishy.ycweb.utils.JsonData;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/kafka")
@Slf4j
public class KafkaController {
@Autowired
private Producer producer;
@RequestMapping(value = "/send", method = RequestMethod.GET)
public JsonData sendMessage(String topic, String msg) {
try {
producer.sendChannelMess(topic, msg);
return JsonData.BuildSuccess();
} catch (Exception e) {
log.error("发送错误:", e);
return JsonData.BuildError();
}
}
}
6、kafka配置类
package com.itennishy.ycweb.config;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import java.util.HashMap;
import java.util.Map;
@Configuration
public class KafkaConsumerConfigs {
@Value("${spring.kafka.bootstrap-servers}")
private String servers;
@Value("${spring.kafka.consumer.group-id}")
private String groupId;
@Value("${spring.kafka.consumer.max-poll-records}")
private int maxPoolRecords;
@Bean
KafkaListenerContainerFactory<?> batchFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new
ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(new DefaultKafkaConsumerFactory<>(consumerConfigs()));
factory.setBatchListener(true);
return factory;
}
@Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxPoolRecords);
props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");
props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 120000);
props.put(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG, 180000);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
return props;
}
}
@KafkaListener(topics = "events-p3",containerFactory = "batchFactory" )