一,加入依赖
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
二、在application.properties文件配置kafka
spring.kafka.bootstrap-servers=localhost:9092
spring.kafka.consumer.group-id=test-consume-group
三、生产者代码
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.util.concurrent.ListenableFuture;
import java.util.UUID;
/**
* 使用@EnableScheduling注解开启定时任务
*/
@Component
@EnableScheduling
public class KafkaProducer {
@Autowired
private KafkaTemplate<String, Object> kafkaTemplate;
/**
* 一秒
*/
@Scheduled(cron = "00/1 * * * * ?")
public void send(){
String message = UUID.randomUUID().toString();
ListenableFuture future = kafkaTemplate.send("test1", message);
future.addCallback(o -> System.out.println("send-消息发送成功:" + message), throwable -> System.out.println("消息发送失败:" + message));
}
}
四、消费者代码
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
/**
* 使用@KafkaListener注解,可以指定:主题,分区,消费组,offset等
*/
@Component
public class KafkaConsumer {
@KafkaListener(id="test",topics = {"test1","test2"},topicPartitions =
//配置topic和分区:监听两个topic,分别为test1、test2,
// test1只接收分区0的消息并且消费者初始位置为60
//test2接收分区0和分区1的消息
{
@TopicPartition(topic = "test1",partitionOffsets = @PartitionOffset(partition = "0", initialOffset = "60")),
@TopicPartition(topic = "test2", partitions ={"0","1"})
}
)
public void listen(ConsumerRecord<?,String> record) {
String value = record.value();
System.out.println(value);
System.out.println(record);
}
}
五、启动springboot即可在控制台看到生产者与消费者的打印信息
@SpringBootApplication
public class KafkaTest {
public static void main(String[] args) {
SpringApplication.run(KafkaTest.class, args);
}
}