import java.sql.{Connection, DriverManager}
import java.util.Properties
import org.apache.spark.sql.{DataFrame, SparkSession}
object Test01 {
def main(args: Array[String]): Unit = {
val spark = SparkSession.builder()
.appName("Test01")
.master("local[*]")
.enableHiveSupport()
.getOrCreate()
import spark.implicits._
//mysql的参数设置
val url = "jdbc:mysql://192.168.245.131:3306/database"
val user = "root"
val password = "hadoop"
val props = new Properties()
props.put("user", user)
props.put("password", password)
props.setProperty("useSSL", "false")
props.setProperty("useUnicode", "true")
props.setProperty("characterEncoding", "utf8")
var connection: Connection = null
val seq: Seq[(Int, Int, Double)] = Seq((66666896,252666555,1001348.64),(66666669,3356533,999),(66666670,4444,8888))
val df01: DataFrame = seq.toDF("userid","amount","changeamount")
df01.show(false)
df01.foreachPartition(tmp=>{
try{
classOf[com.mysql.jdbc.Driver]
connection=DriverManager.getConnection(url,props)
while (tmp.hasNext){
val row = tmp.next()
val unit = row.getAs[Int]("userid")
val i = row.getAs[Int]("amount")
val changeamount = row.getAs[Double]("changeamount")
val df03 = s"update superset.test1 set amount=$i where userid = $unit and changeamount=$changeamount"
println(df03)
connection.createStatement().executeLargeUpdate(df03)
}
}catch {
case e:Exception=>println(e.printStackTrace())
}finally {
connection.close()
}
})
df.show(false)
}
}
如果用submit执行spark的jar包,就需要上传mysql的连接jar包,
mysql-connector-java-5.1.41-bin 这个包名