pom
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>0.11.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.2.0</version>
</dependency>
public class TeminalHiveCoreTest {
public static void main(String[] args) throws SQLException {
String tableName = " terminal ";
String params ="appId,deviceMac,deviceId,deviceToken,deviceImsi,deviceModel,deviceManufacture,channel,appKey,userId,appVersion,versionCod,sdkType,sdkVersion,os,country,language,timezone,resolution,access,accessSubtype,carrier,cpu";
String partitionId ="appId";
String clusterId ="deviceToken";
String baseHdfsPath="hdfs://dev10.aoiplus.openpf:9000";
String hdfsPath=baseHdfsPath +"/user/hadoop/storm/terminalInfo_terminalInfo-3-0-1483076684221.log";
Connection connection = HiveJdbcConnection.getInstance().getConnection(HiveJdbcConnection.url, HiveJdbcConnection.user, HiveJdbcConnection.password);
Statement stmt = connection.createStatement();
ResultSet res = null;
HiveSqlDateDefineLanguage hqddl = new HiveSqlDateDefineLanguage().setCon(connection).setStmt(stmt);
hqddl.dropTableByName(tableName);
hqddl.createTableByParams(tableName, params,partitionId,clusterId);
hqddl.loadSQL(tableName, hdfsPath);
HiveSqlDateManageLanguage hqdml = new TermialHiveSqlDMLOperator().setCon(connection).setStmt(stmt).setRes(res);
List<TerminalInfo> list = hqdml.findAll(tableName);
System.out.println(list);
HiveJdbcConnection.getInstance().close(res, stmt, connection);
}
}
package com.curiousby.baoyou.cn.hive.base;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
/**
* @Type HiveJdbcConnection.java
* @Desc
* @author hadoop
* @date 2016年12月29日 下午3:04:42
* @version
*/
public class HiveJdbcConnection {
public static String driverName = "org.apache.hive.jdbc.HiveDriver";
public static String url ="jdbc:hive2://172.23.27.120:10000/default";
public static String user ="hive";
public static String password="hive";
static{
try {
Class.forName(driverName);
} catch (ClassNotFoundException e) {
e.printStackTrace();
throw new ExceptionInInitializerError();
}
}
private static HiveJdbcConnection instance = null;
public static HiveJdbcConnection getInstance(){
if (instance == null ) {
synchronized (HiveJdbcConnection.class) {
if (instance == null ) {
instance = new HiveJdbcConnection();
}
}
}
return instance;
}
public Connection getConnection(String url,String user,String password) throws SQLException {
return DriverManager.getConnection(url, user, password);
}
public void close(ResultSet rs, Statement st, Connection conn) {
try {
if (rs != null)
rs.close();
} catch (SQLException e) {
e.printStackTrace();
} finally {
try {
if (st != null)
st.close();
} catch (SQLException e) {
e.printStackTrace();
} finally {
if (conn != null)
try {
conn.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
}
public static void main(String[] args) throws SQLException {
System.out.println(HiveJdbcConnection.getInstance().getConnection(url, user, password));
}
}
public class HiveSqlDateDefineLanguage implements SqlDateDefineLanguageInterface,SqlLoadData{
private Statement stmt;
private ResultSet res;
private Connection con;
@Override
public boolean createDatabase(String sql) {
return false;
}
@Override
public boolean createTable(String sql) {
try {
stmt.execute(sql);
} catch (SQLException e) {
e.printStackTrace();
return false;
}
return true;
}
/* CREATE [EXTERNAL] TABLE [IF NOT EXISTS] table_name
[(col_name data_type [COMMENT col_comment], ...)]
[COMMENT table_comment]
[PARTITIONED BY (col_name data_type
[COMMENT col_comment], ...)]
[CLUSTERED BY (col_name, col_name, ...)
[SORTED BY (col_name [ASC|DESC], ...)]
INTO num_buckets BUCKETS]
[ROW FORMAT row_format]
[STORED AS file_format]
[LOCATION hdfs_path]*/
public boolean createTableByParams(String tableName,String params,String partitionedId,String clusteredId ){
String properties ="";
String[] objects = params.split(",");
for (int i = 0; i < objects.length; i++) {
properties += objects[i] +" string,";
}
properties = properties.substring(0, properties.length()-1);
String sql =" create table " + tableName + " ("+ properties +") "
+" comment '"+tableName+"' "
// +" PARTITIONED BY ("+partitionedId+" string) "
+" clustered by ("+clusteredId+") into 2 buckets "
+ " row format delimited fields terminated by ',' "
// +" stored as orc "
+" TBLPROPERTIES ('transactional'='true') "
;
System.out.println(sql);
return execute(sql);
}
@Override
public boolean dropTable(String sql) {
try {
stmt.execute(sql);
} catch (SQLException e) {
e.printStackTrace();
return false;
}
return true;
}
public boolean dropTableByName(String tableName) {
String sql ="drop table if exists " + tableName;
return execute(sql);
}
public boolean execute(String sql) {
try {
stmt.execute(sql);
} catch (SQLException e) {
e.printStackTrace();
return false;
}
return true;
}
public Statement getStmt() {
return stmt;
}
public ResultSet getRes() {
return res;
}
public Connection getCon() {
return con;
}
public HiveSqlDateDefineLanguage setStmt(Statement stmt) {
this.stmt = stmt;
return this;
}
public HiveSqlDateDefineLanguage setRes(ResultSet res) {
this.res = res;
return this;
}
public HiveSqlDateDefineLanguage setCon(Connection con) {
this.con = con;
return this;
}
@Override
public boolean loadSQL(String tableName, String hdfsPath) {
String sql = "LOAD DATA INPATH '"+hdfsPath+"' OVERWRITE INTO TABLE "+tableName;
return execute(sql);
}
@Override
public boolean loadLocalSQL(String tableName, String localPath) {
String sql = "LOAD DATA local INPATH '"+localPath+"' OVERWRITE INTO TABLE "+tableName;
return execute(sql);
}
}
public abstract class HiveSqlDateManageLanguage extends SqlEntiyFormat<TerminalInfo> implements SqlDateManageLanguageInterface<TerminalInfo> {
private Statement stmt;
private ResultSet res;
private Connection con;
@Override
public List<TerminalInfo> findAll(String tableName) {
try {
res = stmt.executeQuery("select * from " +tableName);
List< TerminalInfo> list = formate(res);
return list;
} catch (SQLException e) {
}
return null;
}
@Override
public TerminalInfo findOne(String key, String value) {
return null;
}
@Override
public TerminalInfo findOne(String sql) {
return null;
}
@Override
public List<TerminalInfo> findAllBySql(String sql) {
return null;
}
@Override
public boolean update(String sql) {
return false;
}
@Override
public boolean delete(String sql) {
return false;
}
@Override
public boolean insert(String sql) {
return false;
}
@Override
public boolean insert(TerminalInfo t) {
return false;
}
public Statement getStmt() {
return stmt;
}
public ResultSet getRes() {
return res;
}
public Connection getCon() {
return con;
}
public HiveSqlDateManageLanguage setStmt(Statement stmt) {
this.stmt = stmt;
return this;
}
public HiveSqlDateManageLanguage setRes(ResultSet res) {
this.res = res;
return this;
}
public HiveSqlDateManageLanguage setCon(Connection con) {
this.con = con;
return this;
}
}
public class TermialHiveSqlDMLOperator extends HiveSqlDateManageLanguage{
@Override
public void sqlMapper(ResultSet res, List<TerminalInfo> list) throws SQLException {
TerminalInfo entity = new TerminalInfo();
entity
.setAppId(res.getString(1))
.setDeviceMac(res.getString(2))
.setDeviceId(res.getString(3))
.setDeviceToken(res.getString(4))
.setDeviceImsi(res.getString(5))
.setDeviceModel(res.getString(6))
.setDeviceManufacture(res.getString(7))
.setChannel(res.getString(8))
.setAppKey(res.getString(9))
.setUserId(res.getString(10))
.setAppVersion(res.getString(1))
.setVersionCode(res.getString(12))
.setSdkType(res.getString(13))
.setSdkVersion(res.getString(14))
.setOs(res.getString(15))
.setCountry(res.getString(16))
.setLanguage(res.getString(17))
.setTimezone(res.getString(18))
.setResolution(res.getString(19))
.setAccess(res.getString(20))
.setAccessSubtype(res.getString(21))
.setCarrier(res.getString(22))
.setCpu(res.getString(23))
;
list.add(entity);
}
}
public interface SqlLoadData {
public boolean loadSQL(String tableName,String hdfsPath);
public boolean loadLocalSQL(String tableName,String localPath);
}
public interface SqlMapper<T> {
public void sqlMapper(ResultSet res , List< T> list) throws SQLException;
}
public abstract class SqlEntiyFormat<T> implements SqlMapper<T>{
public List<T> formate( ResultSet res) throws SQLException{
List< T> list = new ArrayList<T>();
while (res.next()) {
sqlMapper(res, list);
}
return list;
}
}
public interface SqlDateManageLanguageInterface<T> {
public List<T> findAll(String tableName);
public T findOne(String key ,String value);
public T findOne (String sql);
public List<T> findAllBySql (String sql);
public boolean update (String sql);
public boolean delete (String sql);
public boolean insert(String sql);
public boolean insert(T t);
}
/**
* @Type SqlDateDefineLanguageInterface.java
* @Desc CREATE DATABASE - 创建新数据库
ALTER DATABASE - 修改数据库
CREATE TABLE - 创建新表
ALTER TABLE - 变更(改变)数据库表
DROP TABLE - 删除表
CREATE INDEX - 创建索引(搜索键)
DROP INDEX - 删除索引
* @author hadoop
* @date 2016年12月29日 下午2:54:34
* @version
*/
public interface SqlDateDefineLanguageInterface {
public boolean createDatabase(String sql);
public boolean createTable(String sql);
public boolean dropTable(String sql);
public boolean dropTableByName(String tableName);
}
捐助开发者
在兴趣的驱动下,写一个免费
的东西,有欣喜,也还有汗水,希望你喜欢我的作品,同时也能支持一下。 当然,有钱捧个钱场(右上角的爱心标志,支持支付宝和PayPal捐助),没钱捧个人场,谢谢各位。
谢谢您的赞助,我会做的更好!