创建主题
bin/kafka-topics.sh -create --zookeeper hdp01:2181,hdp01:2181,hdp03:2181 -replication-factor 3 --partitions 3 --topic my
================================================================+
生产者
bin/kafka-console-producer.sh --broker-list hdp01:9092 -topic my
消费者
bin/kafka-console-consumer.sh --bootstrap-server hdp02:9092 -topic my
================================================================+
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
object Hive {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName(“yoy”).setMaster(“local[2]”)
val context = new StreamingContext(conf,Seconds(5))
context.sparkContext.setLogLevel("error")
context.checkpoint("out")
val stringToObject = Map[String, Object](
"bootstrap.servers" -> "hdp02:9092",
"key.deserializer" -> classOf[StringDeserializer],
"value.deserializer" -> classOf[StringDeserializer],
"group.id" -> "group1"
)
val strings = Array("my")
val unit = KafkaUtils.createDirectStream(
context,
PreferConsistent,
Subscribe[String,String](strings, stringToObject)
)
val unit1 = unit.map(
a => (a.value())
).flatMap(_.split(" "))
unit1.map(
a=>{
(a,1)
}
).reduceByKeyAndWindow(
(a:(Int),b:(Int))=>{
a+b
}
,Seconds(5),Seconds(20)).print()/*updateStateByKey((seq:Seq[Int],option:Option[Int])=>{
var value = 0
value+=option.getOrElse(0)
for ( k<- seq ) {
value+=k
}
Option(value)
}).print()*/
context.start()
context.awaitTermination()
}
}
================================================================+