1,以下是kafka的安装可使用教程亲测可用:
下载
http://kafka.apache.org/downloads.html
解压
tar -zxvf kafka_2.10-0.8.1.1.tgz
启动服务
首先启动zookeeper服务
bin/zookeeper-server-start.sh config/zookeeper.properties
启动Kafka
bin/kafka-server-start.sh config/server.properties >/dev/null 2>&1 &
创建topic
创建一个"test"的topic,一个分区一个副本
bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic test
查看主题
bin/kafka-topics.sh --list --zookeeper localhost:2181
查看主题详情
bin/kafka-topics.sh --describe --zookeeper localhost:2181 --topic test
删除主题
bin/kafka-run-class.sh kafka.admin.TopicCommand --delete --topic test1 --zookeeper 192.168.1.161:2181
创建生产者 producer
bin/kafka-console-producer.sh --broker-list localhost:9092 --topic test
创建消费者 consumer
bin/kafka-console-consumer.sh --zookeeper localhost:2181 --topic test --from-beginning
参数使用帮组信息查看:
生产者参数查看:bin/kafka-console-producer.sh
消费者参数查看:bin/kafka-console-consumer.sh
2,producer的java api接口
import java.util.Properties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
public class ProducerDemo {
public static void main(String[] args) throws Exception {
Properties props = new Properties();
props.put("zookeeper.connect", "192.168.2.129:2181");
props.put("metadata.broker.list","192.168.2.129:9092");
props.put("serializer.class", "kafka.serializer.StringEncoder");
ProducerConfig config = new ProducerConfig(props);
Producer producer = new Producer(config);
// for (int i = 1; i <= 100; i++) {
// Thread.sleep(500);
// KeyedMessage data = new KeyedMessage(
// "test", "", "this is message");
// producer.send(data);
//// producer.send(new KeyedMessage("test",
//// "dfdfd111 " + i + " dfdf"));
// }
KeyedMessage data = new KeyedMessage(
"test", "", "this is message1");
producer.send(data);
}
}
producer = new Producer(config);
// for (int i = 1; i <= 100; i++) {
// Thread.sleep(500);
// KeyedMessage data = new KeyedMessage(
// "test", "", "this is message");
// producer.send(data);
//// producer.send(new KeyedMessage("test",
//// "dfdfd111 " + i + " dfdf"));
// }
KeyedMessage data = new KeyedMessage(
"test", "", "this is message1");
producer.send(data);
}
}
3,consumer的java api接口
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
public class ConsumerDemo {
// private static final String topic = "mysons";
// private static final Integer threads = 1;
public static void main(String[] args) {
Properties props = new Properties();
props.put("zookeeper.connect", "192.168.2.129:2181");
props.put("group.id", "1");
props.put("auto.offset.reset", "smallest");
ConsumerConfig config = new ConsumerConfig(props);
ConsumerConnector consumer =Consumer.createJavaConsumerConnector(config);
Map topicCountMap = new HashMap();
// topicCountMap.put(topic, 1);
topicCountMap.put("test123", 1);
// topicCountMap.put("myboys", 1);
Map>> consumerMap = consumer.createMessageStreams(topicCountMap);
List> streams = consumerMap.get("test123");
for(final KafkaStream kafkaStream : streams){
new Thread(new Runnable() {
@Override
public void run() {
for(MessageAndMetadata mm : kafkaStream){
String msg = new String(mm.message());
System.out.println(msg);
}
}
}).start();
}
}
}
topicCountMap = new HashMap();
// topicCountMap.put(topic, 1);
topicCountMap.put("test123", 1);
// topicCountMap.put("myboys", 1);
Map>> consumerMap = consumer.createMessageStreams(topicCountMap);
List> streams = consumerMap.get("test123");
for(final KafkaStream kafkaStream : streams){
new Thread(new Runnable() {
@Override
public void run() {
for(MessageAndMetadata mm : kafkaStream){
String msg = new String(mm.message());
System.out.println(msg);
}
}
}).start();
}
}
}