import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.kafka010.OffsetRange
import redis.Jpools
object DataToRedis {
def saveDataOffset(result1: RDD[((String,String,String), Int)],ranges: Array[OffsetRange],groupid:String)={
result1.foreachPartition(filter=>{
val jedis = Jpools.getJedis
val transaction = jedis.multi()
filter.foreach(tp=>{
try{
transaction.hincrBy("operator",tp._1._1+":"+tp._1._2+":"+tp._1._3,tp._2)
for(o <- ranges){
transaction.hset(groupid,o.topic+":"+o.partition,o.untilOffset.toString)
}
}catch {
case _ => println("报错了,需要回滚")
transaction.discard()
}
})
transaction.exec()
jedis.close()
})
}
}