public static void loadData2Hive(String dst,String dateString) {
String JDBC_DRIVER = "org.apache.hive.jdbc.HiveDriver";
String CONNECTION_URL = "jdbc:hive2://xx.xx.xx.xx:10000";
String username = "";
String password = "";
Connection con = null;
Statement stmt = null;
try {
Class.forName(JDBC_DRIVER);
con = (Connection) DriverManager.getConnection(CONNECTION_URL,username,password);
stmt = con.createStatement();
String sql = "load data local inpath '" + dst + "' into table test_databse.test_table PARTITION (dt = '"+dateString+"')";
stmt.execute(sql);
System.out.println("loadData到Hive表成功!");
} catch (SQLException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}finally {
// 关闭stmt和con
if (stmt!= null){
try {
stmt.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
if(con != null){
try {
con.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
}
直接替换你的host的ip与你的表名和库名就可以了
需要用到的依赖,注意hive的版本hadoop的版本要对应,不然会报错
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>3.1.0</version>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-metastore</artifactId>
<version>2.1.1</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>2.1.1</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>2.1.1</version>
</dependency>
</dependencies>