package com.jojo.spark.utils
import org.apache.hadoop.hbase.{
HBaseConfiguration, TableName}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{
DataFrame, SparkSession}
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.client.{
Connection, ConnectionFactory, Put, Table}
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat
import org.apache.hadoop.mapreduce.Job
import scala.collection.JavaConverters._
/**
* @author wenXin
* @date 2024/04/01
* @describe Hive写入Lindorm
*/
object XGBUserStratificationToLindorm {
def main(args: Array[String]): Unit = {
/** TODO 创建sparkSession上下文执行环境 */
val sparkConf = new SparkConf().setAppName("SparkHiveToHbase")
val sparkSession = SparkSession
Hbase Spark将Hive数据写入Hbase/Lindorm
于 2024-04-09 16:35:56 首次发布