mongo-spark-读取不同的库数据和写入不同的库中

mongo-spark-读取不同的库数据和写入不同的库中

 package com.example.app

 import com.mongodb.spark.config.{ReadConfig, WriteConfig}
 import com.mongodb.spark.sql._

object App {


 def main(args: Array[String]): Unit = {

    val MongoUri1 = args(0).toString
    val MongoUri2 = args(1).toString
    val SparkMasterUri= args(2).toString

     def makeMongoURI(uri:String,database:String,collection:String) = (s"${uri}/${database}.${collection}")

   val mongoURI1 = s"mongodb://${MongoUri1}:27017"
   val mongoURI2 = s"mongodb://${MongoUri2}:27017"

   val CONFdb1 = makeMongoURI(s"${mongoURI1}","MyColletion1,"df")
   val CONFdb2 = makeMongoURI(s"${mongoURI2}","MyColletion2,"df")

   val WRITEdb1: WriteConfig =  WriteConfig(scala.collection.immutable.Map("uri"->CONFdb1))
   val READdb1: ReadConfig = ReadConfig(Map("uri" -> CONFdb1))

   val WRITEdb2: WriteConfig =  WriteConfig(scala.collection.immutable.Map("uri"->CONFdb2))
   val READdb2: ReadConfig = ReadConfig(Map("uri" -> CONFdb2))

   val spark = SparkSession
  .builder
  .appName("AppMongo")
  .config("spark.worker.cleanup.enabled", "true")
  .config("spark.scheduler.mode", "FAIR")
  .getOrCreate()

   val df1 = spark.read.mongo(READdb1)
   val df2 = spark.read.mongo(READdb2)
   df1.write.mode("overwrite").mongo(WRITEdb1)
   df2.write.mode("overwrite").mongo(WRITEdb2)
 }
}

转载于:https://www.cnblogs.com/wenbronk/p/9887466.html

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值