hive系列-hive整合hbase

hive整合hbase
1、在hive控制台建库,建表(注意,int,long,boolean这些数据类型,映射时要加上#b,不然查询到的数据会是乱码或者null)
create database credit_mate_data;
use credit_mate_data;

create external table shop_order_info(
key string,
ticket_head string,
token string,
trans_status string,
actual_fee bigint,
create_time string,
flow_no string,
goods_num int,
order_address string,
order_create_time string,
order_finish_time string,
order_full_name string,
order_mobile string,
order_pay_time string,
order_post string,
order_send_time string,
other_order_num string,
post_fee int,
shop_name string
)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,
SHOP_INFO:ticket_head,
SHOP_INFO:token,
SHOP_INFO:trans_status,
SHOP_INFO:actual_fee#b,
SHOP_INFO:create_time,
SHOP_INFO:flow_no,
SHOP_INFO:goods_num#b,
SHOP_INFO:order_address,
SHOP_INFO:order_create_time,
SHOP_INFO:order_finish_time,
SHOP_INFO:order_full_name,
SHOP_INFO:order_mobile,
SHOP_INFO:order_pay_time,
SHOP_INFO:order_post,
SHOP_INFO:order_send_time,
SHOP_INFO:other_order_num,
SHOP_INFO:post_fee#b,
SHOP_INFO:shop_name
")
TBLPROPERTIES ("hbase.table.name" = "SHOP");

2、建完表后就可以直接在hive控制台(注意:hive控制台必须切换到hdfs用户启动,不然没有启动mapreduce的权限)查询数据了,下面是执行的sql
select * from shop_order_info where key like 'SO%';
select token,trans_status,actual_fee,goods_num,post_fee from shop_order_info where key like 'SO%';
select * from shop_order_info limit 5;

3、java用jdbc连接hive
maven的依赖

<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-service</artifactId>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>1.1</version>
</dependency>

<!-- Logging with SLF4J & LogBack -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.13</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.1.3</version>
<scope>runtime</scope>
</dependency>

java代码

package com.boat.hive;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;

import org.junit.Test;

/**
* @Description: TODO(用一句话描述该文件做什么)
* @author boat
* @date 2017年4月26日 上午8:52:16
* @version V1.0
*/

public class TestHiveConnect {

@Test
public void testHiveConnect() {
String driverName = "org.apache.hive.jdbc.HiveDriver";
String sql = "select * from shop_order_info where key like 'SO%' limit 5";
try {
Class.forName(driverName);
} catch (ClassNotFoundException e) {
e.printStackTrace();
System.exit(1);
}
Connection connection = null;
try {
connection = DriverManager.getConnection("jdbc:hive2://master1:10000/credit_mate_data", "hdfs", "");
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(sql);
ResultSetMetaData m = rs.getMetaData();

int columns = m.getColumnCount();
// 显示列,表格的表头
for (int i = 1; i <= columns; i++) {
System.out.print(m.getColumnName(i));
System.out.print("\t\t");
}
System.out.println();
// 显示表格内容
while (rs.next()) {
for (int i = 1; i <= columns; i++) {
System.out.print(rs.getString(i));
System.out.print("\t\t");
}
System.out.println();
}
connection.close();
} catch (SQLException e) {
e.printStackTrace();
}

}
}


评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值