/**
* http://phoenix.apache.org/phoenix_spark.html官方api,可惜不懂像查mysql那样传sql
* */
def loadPhoenixTable(sqlContext:HiveContext,table:String): DataFrame ={
sqlContext.load(
"org.apache.phoenix.spark",
Map("table" -> table, "zkUrl" -> CluserProperties.PHOENIX_ZKURL)
)
}
/**
* jdbc方式传sql查询Phoenix,字符串比较时候注意单、双引号(不同于mysql通用)
* */
def queryPhoenix(sql:String){
var cc: Connection = null
val driver: String = "org.apache.phoenix.jdbc.PhoenixDriver"
val url: String = "jdbc:phoenix:slave7:2181"
Class.forName(driver)
cc = DriverManager.getConnection(url)
val conn: Connection = DriverManager.getConnection(url)
val statement: Statement = conn.createStatement
val time: Long = System.currentTimeMillis
// val sql: String = "select date,serv,info from T_ARATE where date>$start and date<$end"
// val rs: ResultSet = statement.executeQuery(s"select date,serv,info from T_ARATE where date>$start and date<$end")
val rs: ResultSet = statement.executeQuery(sql)
while (rs.next) {
val info=rs.getString("info")
println(info)
}
}
/**
* 保存数据到Phoenix
*
* @param dataFrame dataFrame
* @param tableName 表名
*/
def saveDataFrameToPhoenixNew(dataFrame: DataFrame, tableName: String) = {
dataFrame.write.format("org.apache.phoenix.spark")
.mode(SaveMode.Overwrite)
.option("driver", "org.apache.phoenix.jdbc.PhoenixDriver")
.option("zkUrl", CluserProperties.PHOENIX_ZKURL)
.option("table", tableName)
.save()
}
附(参考直接读写hbase):
https://blog.youkuaiyun.com/qq_41851454/article/details/80784899