package com.chao.huang
import java.util.Properties
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}
/**
-
@author by chaohuang
-
@date 2020/6/23
*/
object sparkJDBC {
def main(args: Array[String]): Unit = {val conf: SparkConf = new SparkConf().setAppName(“sparkJDBC”).setMaster(“local”)
val sc: SparkContext = new SparkContext(conf)
val spark = SparkSession.builder().master(“local”).appName(“spark”)
.getOrCreate()val properties = new Properties()
properties.put(“user”, “root”)
properties.put(“password”, “huangchao”)val frame: DataFrame = spark.read.format(“jdbc”)
.option(“url”, “jdbc:mysql://slave2:3306/travel”)
.option(“dbtable”, “student”)
.option(“user”, “root”)
.option(“password”, “huangchao”)
.load()val rdd: RDD[Row] = frame.rdd
val rdd_mysql: RDD[(String, String, Long)] = rdd.map(x => {
val row: String = x.getAsString
val name: String = x.getAsString
val age: Long = x.getAsLong
(row, name, age)
})frame.show()
}
}