消费
public class KafkaConsumerDemo extends Thread{
private String topic;
public KafkaConsumerDemo(String topic){
super();
this.topic = topic;
}
@Override
public void run() {
ConsumerConnector consumer = createConsumer();
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(topic, 1); // 一次从主题中获取一个数据
Map<String, List<KafkaStream<byte[], byte[]>>> messageStreams = consumer.createMessageStreams(topicCountMap);
/*StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());
Map<String, List<KafkaStream<String, String>>> messageStreams =
consumer.createMessageStreams(topicCountMap,keyDecoder,valueDecoder);*/
KafkaStream<byte[], byte[]> stream = messageStreams.get(topic).get(0);// 获取每次接收到的这个数据
ConsumerIterator<byte[], byte[]> iterator = stream.iterator();
System.out.println(topic+"队列接收开始");
System.out.println(iterator.next().message());
while(iterator.hasNext()){
String message = new String(iterator.next().message());
System.out.println(topic+"接收到: " + message);
}
}
private ConsumerConnector createConsumer() {
Properties prop = new Properties();
prop.put("zookeeper.connect","127.0.0.1:2181");
//prop.put("zookeeper.connect","127.0.0.1:2181");
prop.put("serializer.class", StringEncoder.class.getName());
//prop.put("metadata.broker.list","127.0.0.1:9092");
//prop.put("group.id", "test-consumer-group");
prop.put("group.id", "xxx");
//prop.put("auto.offset.reset", "smallest");
prop.put("zookeeper.session.timeout.ms", "20000");
prop.put("zookeeper.sync.time.ms", "10");
prop.put("auto.commit.enable", "true");
return Consumer.createJavaConsumerConnector(new ConsumerConfig(prop));
}
生产
public class KafkaProducer extends Thread{
private String topic;
public KafkaProducer(String topic){
super();
this.topic = topic;
}
@Override
public void run() {
Producer producer = initProducer();
int i=0;
SysHeader sysHeader = new SysHeader();
sysHeader.setCertficateNo("1235");
sysHeader.setChannelNo(topic);
Object returnObj = "dopay返回";//method.invoke(object, args[0]);
byte[] resBody;
try {
resBody = com.tansun.util.HessianUtil.serialize(returnObj);
Response response = new Response(sysHeader,resBody);
//String s = "asdfasdfdsafdsafdsa";
while(true){
byte[] bytes= SerializationUtils.serialize(response);
KeyedMessage<String, byte[]> message = new KeyedMessage<String, byte[]>(topic,null,bytes);
producer.send(message);
//TimeUnit.SECONDS.sleep(1);
i++;
}
} catch (Exception e1) {
e1.printStackTrace();
}
}
private Producer<String, byte[]> initProducer(){
Properties properties = new Properties();
properties.put("serializer.class", "kafka.serializer.DefaultEncoder");
//配置key的序列化类
properties.put("key.serializer.class", "kafka.serializer.StringEncoder");
properties.put("metadata.broker.list","127.0.0.1:9092");// 声明kafka broker
ProducerConfig config = new ProducerConfig(properties);
Producer<String, byte[]> producer = new Producer<String, byte[]>(config);
return producer;
}
用于测试kafka生产、消费队列是否正常