Scala操作关系数据库增删改查
1. 引入依赖
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.38</version>
</dependency>

2. 查询数据
object IpJdbc01 {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("IpJdbc").setMaster("local[2]")
val sc = new SparkContext(conf)
var conn: Connection = null
var ps: PreparedStatement = null
val sql = "select * from location_info"
try{
conn = DriverManager.getConnection("jdbc:mysql://192.168.126.31:3306/sparkdatabase?useUnicode=true&characterEncoding=utf-8", "root", "Zhm@818919")
ps = conn.prepareStatement(sql)
val rs = ps.executeQuery()
//将数据保存到ArrayBuffer中
/* val rsmd = rs.getMetaData
val size = rsmd.getColumnCount
val buffer = new ArrayBuffer[scala.collection.mutable.HashMap[String, Any]]()
while (rs.next()) {
val map = mutable.HashMap[String, Any]()
for (i <- 1 to size) {
map += (rsmd.getColumnLabel(i) -> rs.getString(i))
}
buffer += map
}
println(buffer.toBuffer)*/
//直接获取
while(rs.next()){
println(rs.getString("location")+"-------"+rs.getInt("counts"));
}
}catch {
case e: Exception => println("myException")
}finally {
if (conn != null)
conn.close()
if (ps != null) {
ps.close()
}
}
sc.stop()
}
}
3. 插入数据
object UpdateLocation {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("UpdateLocation").setMaster("local[2]")
val sc = new SparkContext(conf)
var conn: Connection = null
var ps: PreparedStatement = null
try {
val sql = "INSERT INTO location_info(location,accesse_date,counts) VALUES (?,?,?)"
conn = DriverManager.getConnection("jdbc:mysql://192.168.126.31:3306/sparkdatabase?useUnicode=true&characterEncoding=utf-8", "root", "Zhm@818919")
ps = conn.prepareStatement(sql)
ps.setString(1, "深圳")
ps.setString(2, "2018-7-2")
ps.setInt(3, 122)
ps.execute()
} catch {
case e: Exception => println("myException")
} finally {
if (conn != null) {
conn.close()
}
if (ps != null) {
ps.close()
}
}
sc.stop()
}
}
4. 删除操作
object DeleteLocation {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("UpdateLocation").setMaster("local[2]")
val sc = new SparkContext(conf)
var conn: Connection = null
var ps: PreparedStatement = null
try {
val sql = "delete from location_info where location = ?"
conn = DriverManager.getConnection("jdbc:mysql://192.168.126.31:3306/sparkdatabase?useUnicode=true&characterEncoding=utf-8", "root", "Zhm@818919")
ps = conn.prepareStatement(sql)
ps.setString(1, "深圳")
ps.execute()
} catch {
case e: Exception => println("myException")
} finally {
if (conn != null) {
conn.close()
}
if (ps != null) {
ps.close()
}
}
sc.stop()
}
}
5. 更新操作
object InsertLocation {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("UpdateLocation").setMaster("local[2]")
val sc = new SparkContext(conf)
var conn: Connection = null
var ps: PreparedStatement = null
try {
val sql = "update location_info set location=? where id = ?";
conn = DriverManager.getConnection("jdbc:mysql://192.168.126.31:3306/sparkdatabase?useUnicode=true&characterEncoding=utf-8", "root", "Zhm@818919")
ps = conn.prepareStatement(sql)
ps.setString(1, "深圳")
ps.setInt(2,26)
ps.execute()
} catch {
case e: Exception => println("myException")
} finally {
if (conn != null) {
conn.close()
}
if (ps != null) {
ps.close()
}
}
sc.stop()
}
}