package cn.bw.spark.day03
import java.sql.DriverManager
import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.{SparkConf, SparkContext}
/*
* 将mysql中的数据并发分区读取到本地
* */
object JdbcRdd2 extends App {
def getConnection() = {
//加载驱动
Class.forName("com.mysql.jdbc.Driver")
//获取连接
DriverManager.getConnection("jdbc:mysql://localhost:3306/bigdata?user=root&password=123456&characterEncoding=utf-8")
}
val conf = new SparkConf().setAppName("JdbcRdd2").setMaster("local[2]")
val sc = new SparkContext(conf)
val sql: String = "SELECT * FROM localtion_info WHERE id > ? AND id > ?" //sql语句必须大写
val jdbcRDD = new JdbcRDD(
sc,
getConnection,
sql,
10,
14, //sql的 参数 where id > 10 and id < 14
2, //分区数量
rs => {
val id = rs.getInt(1) //传ID传1 都可以
val provice = rs.getString(2)
val num = rs.getInt(3)
val date = rs.getObject(4)
(id, provice, num, date)
}
)
jdbcRDD.saveAsTextFile("e:/outJDBDRDD2")
sc.stop()
}