import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}
import scala.utils.LoggerLevels
object StreamingAssignOffset {
def main(args: Array[String]): Unit = {
val conf: SparkConf = new SparkConf().setMaster("local[2]").setAppName("test01")
.set("spark.dynamicAllocation.enabled", "false")
.set("spark.streaming.backpressure.enabled", "false")
.set("spark.streaming.kafka.maxRatePerPartiti
1、SparkStreaming整合Kafka010读取kafka的三种策略
最新推荐文章于 2024-05-07 16:23:35 发布