创建mave工程:maven-kafka
添加包依赖:
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.10.1.0</version>
</dependency>
1. 生产者
package com.gary.kafka;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import joptsimple.internal.Strings;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import kafka.serializer.StringEncoder;
@SuppressWarnings("deprecation")
public class KafkaProducer extends Thread{
private String topic;
private Producer producer;
private String message;
public KafkaProducer(String topic){
this.topic = topic;
producer = createProducer();
}
public void setMessage(String message) {
this.message = message;
}
private Producer createProducer(){
Properties properties = new Properties();
// 声明zk
properties.put("zookeeper.connect", "192.168.75.101:2181");
// 声明序列化类
properties.put("serializer.class", StringEncoder.class.getName());
// 声明kafka broker
properties.put("metadata.broker.list", "192.168.75.101:9093");
return new Producer<Integer, String>(new ProducerConfig(properties));
}
@Override
public void run() {
if (!Strings.isNullOrEmpty(message)){
producer.send(new KeyedMessage<Integer, String>(topic, message));
}
}
public static void main(String[] args){
String[] str = {"zhangsan","lisi","wangwu","zhaoliu","sunqi"};
for (int i = 0; i < str.length; i++) {
KafkaProducer producer = new KafkaProducer("kafkademo");
producer.setMessage(str[i]);
producer.start();
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
2. 消费者
package com.gary.kafka;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
public class KafkaConsumer extends Thread{
private int threadNumber;
private KafkaStream<byte[], byte[]> stream;
public KafkaConsumer(KafkaStream<byte[], byte[]> stream, int threadNumber) {
this.stream = stream;
this.threadNumber = threadNumber;
}
@Override
public void run() {
System.out.println("-------thread:" + threadNumber + "----run-----");
ConsumerIterator<byte[], byte[]> iterator = stream.iterator();
while(iterator.hasNext()){
String message = new String(iterator.next().message());
System.out.println("threadNumber:" + threadNumber + " 接收到:"+ message);
}
}
static public ConsumerConnector createConsumer(){
Properties properties = new Properties();
properties.put("zookeeper.connect", "192.168.75.101:2181");
properties.put("group.id", "group");
properties.put("zookeeper.session.timeout.ms", "40000");// 如果其超时,将会可能触发rebalance并认为已经死去
properties.put("zookeeper.sync.time.ms", "200");// zk follower落后leader的时间
properties.put("auto.commit.interval.ms", "1000");// 自动提交offset到zookeeper的时间间隔
return (ConsumerConnector) Consumer.createJavaConsumerConnector(new ConsumerConfig(properties));
}
public static void main(String[] args){
String topic = "kafkademo";
ConsumerConnector consumer = KafkaConsumer.createConsumer();
Map<String, Integer> topicMap = new HashMap<String, Integer>();
topicMap.put(topic, 2);// 可以启动多个线程一起来消化同一个topic
Map<String, List<KafkaStream<byte[], byte[]>>> messageSteams = consumer.createMessageStreams(topicMap);
List<KafkaStream<byte[], byte[]>> streams = messageSteams.get(topic);
int threadNumber = 0;
for (final KafkaStream stream : streams) {
KafkaConsumer kafkaConsumer = new KafkaConsumer(stream, threadNumber);
kafkaConsumer.start();
threadNumber++;
}
}
}