package test01
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{InputDStream, ReceiverInputDStream}
import org.apache.spark.streaming.flume.{FlumeUtils, SparkFlumeEvent}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}
object SparkkafkaDirectDemo extends App {
private val conf: SparkConf = new SparkConf().setAppName("flume01").setMaster("local[2]")
private val ssc = new StreamingContext(conf,Seconds(5))
val kafkaParams = Map(
(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "192.168.106.107:9092"),
("key.deserializer"->"org.apache.kafka.common.serialization.StringDeserializer"),
("value.deserializer"-> "org.apache.kafka.common.serialization.StringDeserializer"),
(ConsumerConfig.GROUP_ID_CONFIG-> "testGroup")
)
private val message: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(ssc,
LocationStrategies.PreferConsistent,
ConsumerStrategies.Subscribe(Set("kb07"), kafkaParams)
)
message.map(x=>x.value()).flatMap(_.split("")).map((_,1)).reduceByKey(_+_)
ssc.start()
ssc.awaitTermination()
}