kafka测试类

消费

public class KafkaConsumerDemo extends Thread{  
  
    private String topic;  
      
    public KafkaConsumerDemo(String topic){  
        super();  
        this.topic = topic;  
    }  
      
      
    @Override  
    public void run() {  
        ConsumerConnector consumer = createConsumer();  
        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();  
        topicCountMap.put(topic, 1); // 一次从主题中获取一个数据  
         Map<String, List<KafkaStream<byte[], byte[]>>>  messageStreams = consumer.createMessageStreams(topicCountMap);  
        /*StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
        StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());


        Map<String, List<KafkaStream<String, String>>> messageStreams = 
                consumer.createMessageStreams(topicCountMap,keyDecoder,valueDecoder);*/
        
         KafkaStream<byte[], byte[]> stream = messageStreams.get(topic).get(0);// 获取每次接收到的这个数据  
         ConsumerIterator<byte[], byte[]> iterator =  stream.iterator();  
         System.out.println(topic+"队列接收开始");
         System.out.println(iterator.next().message());
         while(iterator.hasNext()){  
             String message = new String(iterator.next().message());  
             System.out.println(topic+"接收到: " + message);  
         }  
    }  
  
    private ConsumerConnector createConsumer() {  
    Properties prop = new Properties();
        prop.put("zookeeper.connect","127.0.0.1:2181");
    //prop.put("zookeeper.connect","127.0.0.1:2181");
        prop.put("serializer.class", StringEncoder.class.getName());
       //prop.put("metadata.broker.list","127.0.0.1:9092");
        //prop.put("group.id", "test-consumer-group");
        prop.put("group.id", "xxx");
        //prop.put("auto.offset.reset", "smallest");
        
        prop.put("zookeeper.session.timeout.ms", "20000");
        prop.put("zookeeper.sync.time.ms", "10");
        prop.put("auto.commit.enable", "true");
        return Consumer.createJavaConsumerConnector(new ConsumerConfig(prop));  
     }  

生产

public class KafkaProducer extends Thread{  
  
    private String topic;  
      
    public KafkaProducer(String topic){  
        super();  
        this.topic = topic;  
    }  
      
      
    @Override  
    public void run() {  
        Producer producer = initProducer();  
        int i=0;  
        SysHeader sysHeader = new SysHeader();
        sysHeader.setCertficateNo("1235");
        sysHeader.setChannelNo(topic);
        Object returnObj = "dopay返回";//method.invoke(object, args[0]);
byte[] resBody;
try { 
resBody = com.tansun.util.HessianUtil.serialize(returnObj);
Response response = new Response(sysHeader,resBody);
//String s = "asdfasdfdsafdsafdsa";
while(true){  
byte[] bytes= SerializationUtils.serialize(response); 
KeyedMessage<String, byte[]> message = new KeyedMessage<String, byte[]>(topic,null,bytes);
producer.send(message);
//TimeUnit.SECONDS.sleep(1);  
i++;
}  
} catch (Exception e1) {
e1.printStackTrace();

    }  
  
    private Producer<String, byte[]> initProducer(){
Properties properties = new Properties();
properties.put("serializer.class", "kafka.serializer.DefaultEncoder");
        
        //配置key的序列化类
properties.put("key.serializer.class", "kafka.serializer.StringEncoder");
properties.put("metadata.broker.list","127.0.0.1:9092");// 声明kafka broker
        ProducerConfig config = new ProducerConfig(properties);
        Producer<String, byte[]> producer = new Producer<String, byte[]>(config);
        return producer;
}

用于测试kafka生产、消费队列是否正常

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值