依赖:
<dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_2.11</artifactId> <version>2.4.4</version> </dependency>
Producer:
package com.cmb import java.util.Properties import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord} import org.apache.kafka.common.serialization.StringSerializer object Producer { def main(args: Array[String]): Unit = { val props=new Properties() val broker_list="192.168.0.123:9092" val topic="test" props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,broker_list) props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer].getName) props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer].getName) val producer=new KafkaProducer[String,String](props) var num=0 for(i<-1 to 100){ producer.send(new ProducerRecord(topic,i.toString)) } producer.close() } }
Consumer:
package com.cmb import java.util.{Collections,Properties} import org.apache.kafka.clients.consumer.KafkaConsumer import org.apache.kafka.common.serialization.StringSerializer import scala.collection.JavaConversions._ object Customer { def main(args: Array[String]): Unit = { val props=new Properties() val broker_list="192.168.0.123:9092" val topic="test" props.put("bootstrap.servers",broker_list) props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer") props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer") props.put("group.id", "test") props.put("auto.offset.reset","earliest") props.put("enable.auto.commit", "true") props.put("auto.commit.interval.ms", "1000") val consumer=new KafkaConsumer[String,String](props) consumer.subscribe(Collections.singletonList(topic)) while(true){ val records=consumer.poll(100) for(record<-records){ println(record.offset()+"--"+record.key()+"--" +record.value()) } } consumer.close() } }