- 1创建项目,pom.xml的依赖如下
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>2.12.6</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.12</artifactId>
<version>1.1.0</version>
</dependency>
</dependencies>
- 2 idea可能也要配置对应的scala sdk,对应的sdk可以在下面地址下
https://downloads.lightbend.com/scala/2.12.6/scala-2.12.6.zip
- 3 创建scala object,本代码功能为创建kafka topic,代码如下
import java.util.Properties
import kafka.admin.{AdminUtils, RackAwareMode}
import kafka.utils.ZkUtils
import org.apache.kafka.common.security.JaasUtils
object TestKafka {
def main(args: Array[String]): Unit = {
val zkUtils = ZkUtils.
apply("t0:2181", 30000, 30000, JaasUtils.isZkSecurityEnabled)
AdminUtils.createTopic(zkUtils, "newTopic", 1, 1, new Properties(), RackAwareMode.Enforced)
zkUtils.close()
}
}
运行后到服务器查看所有topic
/opt/cloudera/parcels/KAFKA-3.0.0-1.3.0.0.p0.40/lib/kafka/bin/kafka-topics.sh --list --zookeeper t0:2181
即可看到创建的topic
- 4.生产者代码
首先在linux中启动下消费者用来看生产者代码的效果
/opt/cloudera/parcels/KAFKA-3.0.0-1.3.0.0.p0.40/lib/kafka/bin/kafka-console-consumer.sh --zookeeper t0:2181 --from-beginning --topic newTopic
启动好后,控制台会等待,然后运行如下生产者代码
import java.util.Properties
import kafka.admin.{AdminUtils, RackAwareMode}
import kafka.javaapi.producer.Producer
import kafka.producer.{KeyedMessage, ProducerConfig}
import kafka.utils.ZkUtils
import org.apache.kafka.common.security.JaasUtils
object TestKafka {
def main(args: Array[String]): Unit = {
testProducer
}
def testProducer {
val props = new Properties()
props.put("serializer.class", "kafka.serializer.StringEncoder")
props.put("metadata.broker.list", "t0:9092,t1:9092,t2:9092")
props.put("request.required.acks", "1")
val config = new ProducerConfig(props)
val producer = new Producer[String, String](config)
(1 to 100).foreach((i: Int) => {
print(".")
val msg = new KeyedMessage("newTopic", "key", "msg" + i)
producer.send(msg)
Thread.sleep(1000)
})
producer.close()
}
}
然后在linux控制台可看到对应的topic不断有生产者进行生产
- 5.消费者代码
运行如下代码
import java.util.Properties
import kafka.admin.{AdminUtils, RackAwareMode}
import kafka.consumer.{Consumer, ConsumerConfig}
import kafka.javaapi.producer.Producer
import kafka.producer.{KeyedMessage, ProducerConfig}
import kafka.utils.ZkUtils
import org.apache.kafka.common.security.JaasUtils
import collection.JavaConversions._
object TestKafka {
def main(args: Array[String]): Unit = {
testConsumer
}
def testConsumer {
val topic = "newTopic"
val props = new Properties()
props.put("zookeeper.connect", "t0:2181,t1:2181,t2:2181")
props.put("group.id", "testGroup")
val consumerConnector = Consumer.createJavaConsumerConnector(new ConsumerConfig(props))
val topicCountMap = Map(topic -> new Integer(1))
val messageStreams = consumerConnector.createMessageStreams(topicCountMap)
val stream = messageStreams.get(topic).get(0)
val iterator = stream.iterator()
while (iterator.hasNext) {
val msg = new String(iterator.next.message)
println(msg)
}
}
然后在linux启动生产者
/opt/cloudera/parcels/KAFKA-3.0.0-1.3.0.0.p0.40/lib/kafka/bin/kafka-console-producer.sh --broker-list t0:9092 --topic newTopic
启动好后,在生产者的shell里面输入文字
这样在刚刚写的消费者代码这里,这些文字会被消费