hbase hive
1、建立hive hbase 关联表(暂时线上环境不支持,待测试)
CREATE TABLE hive_user_info(
a string,b string,c string,
d string,e string,
f string,g string)
PARTITIONED BY(dt string)
STORED BY'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES
("hbase.columns.mapping"=":key,info:b,info:c,info:d,info:e,info:e,info:f ")
TBLPROPERTIES("hbase.table.name" = "user_info");
2、importtsv 方式(不支持)
hbase org.apache.hadoop.hbase.mapreduce.ImportTsv -Dimporttsv.columns=a,b,c <tablename> <hdfs-inputdir>
3、bulkload 方式(不支持)
HTable hTable = new HTable(configuration, tableName);//指定表名
loadFfiles.doBulkLoad(new Path(pathToHFile), hTable);//导入数据, pathToHFile为hfile文件名
4、table.put()
Hive导入数据到mysql
1、sqoop
2、udf
CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput';
select dboutput('jdbc:mysql://10.100.56.22:3306/ddshow_stat','ucst-sys','E2xNDFT6rSecx','insert into t_temp_access_pv_log(ytId,room) values(?,?)',remain_count,remain_count_tag) from t_result_user_remain_day_stat limit 100;
ytId,room 为t_temp_access_pv_log 的字段
remain_count,remain_count_tag 为 t_result_user_remain_day_stat 的字段
3、Hive server2 (不支持)
Class.forName( "org.apache.hive.jdbc.HiveDriver");
Connection con = DriverManager.getConnection(
"jdbc:hive2://192.168.17.15:10000/hivedb", "hiveuser", "hiveuser");
Statement stmt = con.createStatement();
ResultSet res = null;
String sql = "select count(*) from test_data";
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
System.out.println("ok");
while (res.next()) {
System.out.println(res.getString(1));
}
mysql导入数据到Hive
bin/sqoop import --connect jdbc:mysql://10.95.3.49:3306/workflow --username shirdrn --P --table tags --columns 'id,tag' --create-hive-table -target-dir /hive/tag_db/tags -m 1 --hive-table tags --hive-import -- --default-character-set=utf-8
hive mysql Hbase间导表方式
最新推荐文章于 2025-08-13 17:42:51 发布