//实现思路:第一步,创建连接参数Properties,创建consumer上下文对象ConsumerConfig,通过Consumer类的createJavaConsumerConnector方法创建connector接口;
//第二步:通过connector的createMessageStreams方法创建信息流,并通过get方法得到信息流集合,集合中 流的数量跟线程数相等。遍历集合取出想要的数据。
//第四步:关闭数据流
package com.ibeifeng.spark.kafka;import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import kafka.serializer.Decoder;
import kafka.serializer.StringDecoder;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
/**
* Created by haojin on 4/27/2018.
*/
public class ConsumerDemo2 {
//connector是消费数据的入口
private ConsumerConnector connector = null;
private int numThread=0;
private String topicName = null;
public ConsumerDemo2(String groupid,String zkUrl,boolean largest,String topioName,int threadNum){
this.numThread = threadNum;
this.topicName = topioName;
//创建consumer连接参数
Properties prop = new Properties();
prop.put("group.id",groupid);
prop.put("zookeeper.connect",zkUrl);
if(largest){
prop.put("auto.offset.reset","largest");
}else{
prop.put("auto.offset.reset","smallest");
}
prop.put("auto.commit.interval.ms","2000");
//创建consumer上下文对象
ConsumerConfig consumerConfig = new ConsumerConfig(prop);
this.connector = Consumer.createJavaConsumerConnector(consumerConfig);
}
//消费数据流
public void run(){
Map<String, Integer> topicCountMap = new HashMap<>();
topicCountMap.put(topicName,numThread);
Decoder keyDecoder = new StringDecoder(null);
Decoder valueDecoder = new StringDecoder(null);
//通过connector创建信息流 ,需要传入三个参数,topicCountMap中包含topicName还有线程数,而得到的List集合中流的个数等于线程数。后面两个参数给定的是如何解析流的key和value
Map<String, List<KafkaStream<String,String>>> messageStreams = connector.createMessageStreams(topicCountMap, keyDecoder, valueDecoder);
//得到信息流集合
List<KafkaStream<String, String>> kafkaStreams = messageStreams.get(topicName);
//遍历List中的流数据
int k = 0;
for(final KafkaStream<String, String> stream : kafkaStreams){
new Thread(new Runnable() {
@Override
public void run() {
int count = 0;
String threadName = Thread.currentThread().getName();
ConsumerIterator<String, String> iter = stream.iterator(); //一个线程有一个stream!一个stream中有好多数据!
while(iter.hasNext()){
MessageAndMetadata<String, String> next = iter.next();
StringBuilder sb = new StringBuilder();
sb.append(next.topic());
sb.append(next.key());
sb.append(next.message());
sb.append(next.partition());
sb.append(next.offset());
System.out.println("第"+count +"条数据"+sb.toString());
count++;
}
System.out.println("线程["+threadName+"] 已经运行了"+count+"条数据");
}
},"Thread" + k++).start();
}
}
//关闭数据流
public void shutdown(){
if(this.connector != null){
System.out.println("关闭consumer连接");
this.connector.shutdown(); //connector自带的shutdown
}
}
public static void main(String[] args) throws InterruptedException {
ConsumerDemo2 consumer = new ConsumerDemo2("kafka_2","spark.ibeifeng.com:2181/kafka",true,"beifeng0",2);
consumer.run();
Thread.sleep(50000);
consumer.shutdown();
}
}