使用@KafkaListener定时消费
代码示例
基于SpringBoot2.0.4版本,spring-kafka:2.1.7.RELEASE
- 消费者 KafkaTaskService.java
@Service
public class KafkaTaskService {
private static final Logger log = LoggerFactory.getLogger(KafkaTaskService.class);
@Autowired
private KafkaListenerEndpointRegistry registry;
/**
* 定时执行
*
* @param recordList
* @param acknowledgment
*/
@KafkaListener(id = "test-task", topics = {"test-task"}, groupId = "test-topic", containerFactory = "batchFactory")
public void listenFailEmail(List<ConsumerRecord> recordList, Acknowledgment acknowledgment) {
for (ConsumerRecord record : recordList) {
log.info("fail email-消息:【{}】。", record.toString());
}
acknowledgment.acknowledge();
}
@Scheduled(cron = "0 53 20 * * ?")
public void startListener() {
log.info("开启监听");
MessageListenerContainer container = registry.getListenerContainer("test-task");
if (!container.isRunning()) {
container.start();
}
//恢复
container.resume();
}
@Scheduled(cron = "0 54 20 * * ?")
public void shutdownListener() {
log.info("关闭监听");
//暂停
MessageListenerContainer container = registry.getListenerContainer("test-task");
container.pause();
}
/**
* kafka监听工厂
*
* @param configurer
* @return
*/
@Bean("batchFactory")
public ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory(
ConcurrentKafkaListenerContainerFactoryConfigurer configurer,
ConsumerFactory consumerFactory) {
ConcurrentKafkaListenerContainerFactory<Object, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory);
//开启批量消费功能
factory.setBatchListener(true);
//不自动启动
factory.setAutoStartup(false);
configurer.configure(factory, consumerFactory);
return factory;
}
}
cron=“0 53 20 * * ?” 20:53:00开始消费
cron=“0 54 20 * * ?” 20:54:00停止消费
- 生产者 JavaTest.java
public class JavaTest {
@Test
public void test_send() {
KafkaTemplate<Object, Object> template = new KafkaTemplate<>(new DefaultKafkaProducerFactory<>(producerProps()));
for (int i = 0; i < 3; i++) {
ProducerRecord record =
new ProducerRecord("test-task", "Hello" + i);
try {
Thread.sleep(1000);
template.send(record).get(10, TimeUnit.SECONDS);
System.out.println("发送成功!");
} catch (TimeoutException | InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
}
private Map<String, Object> producerProps() {
Map<String, Object> props = new HashMap<>(16);
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(ProducerConfig.RETRIES_CONFIG, 0);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16284);
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return props;
}
}
- application.properties
# kafka配置
spring.kafka.bootstrap-servers=127.0.0.1:9092
spring.kafka.consumer.enable-auto-commit=false
spring.kafka.listener.ack-mode=manual
spring.kafka.consumer.max-poll-records=100
spring.kafka.consumer.auto-offset-reset=latest
#每个listener拥有一个处理线程
spring.kafka.listener.concurrency=1
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
- 测试结果
到20:54:00日志打印出关闭监听,这时再往kafka发送消息,将不会被消费。