SparkStreaming---窗口函数

本文档展示了如何在 Spark Streaming 中使用窗口函数进行数据处理,包括普通窗口、滑动窗口、countByValueAndWindow、reduceByWindow 和 reduceByKeyAndWindow 等操作。同时,还演示了如何利用 transform 方法进行数据结构转换,以适应不同的计算需求。代码示例中涉及 Kafka 数据源的消费,并使用 Scala 进行实现。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

一、窗口函数

package window

import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/*
* 窗口函数的窗口大小和滑动大小必须是 采集周期的整数倍
* */

object WindowDemo {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("Windows")
    val streamingContext = new StreamingContext(conf,Seconds(2))

    streamingContext.checkpoint("checkpoint")

    val kafkaParams = Map(
      (ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "192.168.XXX.100:9092"),
      (ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG->"org.apache.kafka.common.serialization.StringDeserializer"),
      (ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG->"org.apache.kafka.common.serialization.StringDeserializer"),
//      (ConsumerConfig.AUTO_OFFSET_RESET_CONFIG->"earliest"),
      (ConsumerConfig.GROUP_ID_CONFIG->"kafkaGroup4")
    )

    val kafkaStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
      streamingContext,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe(Set("sparkKafka"), kafkaParams)
    )

    // 操作窗口
//    val wind1: DStream[(String, Int)] = kafkaStream.flatMap(_.value().toString.split("\\s+"))
//      .map(x => (x, 1))
//      .window(Seconds(6))               // 普通窗口
//      .window(Seconds(8), Seconds(4))   // 滑动窗口

    // countByValueAndWindow
//    val wind1: DStream[(String, Long)] = kafkaStream.flatMap(_.value().toString.split("\\s+"))
//       .map(x => (x, 1))
//       .countByValueAndWindow(Seconds(8),Seconds(4))  //统计value个数的窗口

    // reduceByWindow -1
//    val wind1 = kafkaStream.flatMap(_.value().toString.split("\\s+"))
//    wind1.reduceByWindow((x,y)=>x+"%"+y,Seconds(8),Seconds(4))

    // reduceByWindow -2
//    val wind1 = kafkaStream.flatMap(_.value().toString.split("\\s+")).map(x=>(x,1))
//        .reduceByWindow((x,y)=>("count:",x._2+y._2),Seconds(8),Seconds(4))

    //reduceByKeyAndWindow
//    val wind1 = kafkaStream.flatMap(_.value().toString.split("\\s+")).map(x=>(x,1))
//        .reduceByKeyAndWindow((x:Int,y:Int)=>x+y,Seconds(8),Seconds(4))

    val wind1 = kafkaStream.flatMap(_.value().toString.split("\\s+")).map(x=>(x,1))
      .reduceByKeyAndWindow((x:Int,y:Int)=>{println("one");x+y},(x:Int,y:Int)=>{println("two");x+y},Seconds(8),Seconds(4))

    wind1.print()

    streamingContext.start()
    streamingContext.awaitTermination()
  }
}

二、transform

package window

import java.text.SimpleDateFormat

import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}

object WindowDemo2 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("Windows")
    val streamingContext = new StreamingContext(conf,Seconds(2))

    streamingContext.checkpoint("checkpoint")

    val kafkaParams = Map(
      (ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "192.168.XXX.100:9092"),
      (ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG->"org.apache.kafka.common.serialization.StringDeserializer"),
      (ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG->"org.apache.kafka.common.serialization.StringDeserializer"),
      //      (ConsumerConfig.AUTO_OFFSET_RESET_CONFIG->"earliest"),
      ConsumerConfig.GROUP_ID_CONFIG->"kafkaGroup4"
    )

    val kafkaStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
      streamingContext,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe(Set("sparkKafka"), kafkaParams)
    )

    // TODO
    // transform --- 完成数据结构的转化
    val trans: DStream[((String, String), Int)] = kafkaStream.transform((rdd, timestamp) => {
      val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
      val time: String = sdf.format(timestamp.milliseconds)
      val value = rdd.flatMap(x => x.value().split("\\s+").map(x => ((x, time), 1))).reduceByKey(_ + _)
      value
    })

    kafkaStream.transform((x,y)=>{
      x.flatMap(_.value().split("\\s+").map(x=>(x,1)))
    })

    trans.print()

    streamingContext.start()
    streamingContext.awaitTermination()
  }
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值