1、小试身手-命令行
kafka producer默认使用轮巡方式发送消息到分区(不指定分区的情况下)
启动3个消费者:
./kafka-console-consumer.sh --bootstrap-server 192.168.78.169:9092,192.168.78.169:9093,192.168.78.169:9094 --topic test-topic --partition 0 --group group1
./kafka-console-consumer.sh --bootstrap-server 192.168.78.169:9092,192.168.78.169:9093,192.168.78.169:9094 --topic test-topic --partition 1 --group group1
./kafka-console-consumer.sh --bootstrap-server 192.168.78.169:9092,192.168.78.169:9093,192.168.78.169:9094 --topic test-topic --partition 2 --group group1
启动1个生产者:
./kafka-console-producer.sh --broker-list 192.168.78.169:9092,192.168.78.169:9093,192.168.78.169:9094 --topic test-topic
观察3个消费者,消息被轮流均摊:
2、KafkaClient java客户端
父pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.java.ashare</groupId>
<artifactId>JavaAction</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>pom</packaging>
<name>JavaAction</name>
<description>JavaAction</description>
<modules>
<!-- spring boot kafka集成 -->
<module>SpringBootKafka</module>
</modules>
<properties>
<java.version>1.8</java.version>
</properties>
<!-- spring boot启动父依赖 -->
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.1.0.RELEASE</version>
<relativePath/>
</parent>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
</configuration>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
子pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.java.ashare</groupId>
<artifactId>JavaAction</artifactId>
<version>0.0.1-SNAPSHOT</version>
</parent>
<artifactId>SpringBootKafka</artifactId>
<name>SpringBootKafka</name>
<description>SpringBootKafka</description>
<dependencies>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.6</version>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<classpathPrefix>lib/</classpathPrefix>
<mainClass>com.java.ashare.config.client.ConfigClientApplication</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
</plugins>
</build>
</project>
生产者Producer.java
package com.java.ashare.kafka.client;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Future;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
public class Producer {
public void send(String topic, String value) {
Properties properties = new Properties();
// kafka地址,集群使用逗号分隔,不用列出所有broker地址,列出两三个提高可用性
properties.put("bootstrap.servers", "192.168.78.169:9092,192.168.78.169:9093,192.168.78.169:9094");
// 序列化器
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(properties);
// 发送消息,消息会被缓存起来,以轮巡方式发送到各个分区中(均摊)
kafkaProducer.send(new ProducerRecord<String, String>(topic, value));
// 真正的发送消息到broker,如果开启事务就无需调用flush,事务提交会刷新缓存
kafkaProducer.flush();
kafkaProducer.close();
}
public void send1(String topic, String key, String value) {
Properties properties = new Properties();
properties.put("bootstrap.servers", "192.168.78.169:9092,192.168.78.169:9093,192.168.78.169:9094");
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializ