1. 在IDEA建立一个Java+Maven项目,POM文档如下
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.github.xlongshu.maven</groupId>
<artifactId>java-starter</artifactId>
<version>1.0.1</version>
</parent>
<groupId>com.spark.hbase</groupId>
<artifactId>hbase_learning</artifactId>
<version>1.0</version>
<packaging>jar</packaging>
<name>hbase_learning</name>
<properties>
<java.version>1.7</java.version>
<skipTests>true</skipTests>
<log.level>DEBUG</log.level>
<log.path>/data/logs</log.path>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>1.2.4</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<!-- Logger Impl -->
<!-- <dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</dependency> -->
<dependency>
<groupId>cn.hutool</groupId>
<artifactId>hutool-core</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
2 . 创建HBaseConn类
package com.spark.hbase.hbase_learning;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Table;
import java.io.IOException;
public class HBaseConn {
private static final HBaseConn INSTANCE = new HBaseConn();
private static Configuration configuration;
private static Connection connection;
private HBaseConn(){
try {
if(configuration == null){
configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum","localhost:2181");
}
} catch (Exception e){
e.printStackTrace();
}
}
private Connection getConnection(){
if(configuration == null || connection.isClosed()){
try{
connection=ConnectionFactory.createConnection(configuration);
}
catch (Exception e){
e.printStackTrace();
}
}
return connection;
}
public static Connection getHBaseConn(){
return INSTANCE.getConnection();
}
public static Table getTable(String tableName) throws IOException {
return INSTANCE.getConnection().getTable(TableName.valueOf(tableName));
}
public static void closeConn(){
if(connection!=null){
try {
connection.close();
} catch (IOException ioe){
ioe.printStackTrace();
}
}
}
}
-
Java API 操作 HBase
package com.imooc.bigdata.hbase.api;
import java.io.IOException; import java.util.Arrays; import java.util.List; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.util.Bytes; import org.omg.CORBA.PUBLIC_MEMBER; public class HBaseUtil { /** * 创建HBase表. * * @param tableName 表名 * @param cfs 列族的数组 * @return 是否创建成功 */ public static boolean createTable(String tableName, String[] cfs) { try (HBaseAdmin admin = (HBaseAdmin) HBaseConn.getHBaseConn().getAdmin()) { if (admin.tableExists(tableName)) { return false; } HTableDescriptor tableDescriptor = new HTableDescriptor(TableName.valueOf(tableName)); Arrays.stream(cfs).forEach(cf -> { HColumnDescriptor columnDescriptor = new HColumnDescriptor(cf); columnDescriptor.setMaxVersions(1); tableDescriptor.addFamily(columnDescriptor); }); admin.createTable(tableDescriptor); } catch (Exception e) { e.printStackTrace(); } return true; } /** * 删除hbase表. * * @param tableName 表名 * @return 是否删除成功 */ public static boolean deleteTable(String tableName) { try (HBaseAdmin admin = (HBaseAdmin) HBaseConn.getHBaseConn().getAdmin()) { admin.disableTable(tableName); admin.deleteTable(tableName); } catch (Exception e) { e.printStackTrace(); } return true; } /** * hbase插入一条数据. * * @param tableName 表名 * @param rowKey 唯一标识 * @param cfName 列族名 * @param qualifier 列标识 * @param data 数据 * @return 是否插入成功 */ public static boolean putRow(String tableName, String rowKey, String cfName, String qualifier, String data) { try (Table table = HBaseConn.getTable(tableName)) { Put put = new Put(Bytes.toBytes(rowKey)); put.addColumn(Bytes.toBytes(cfName), Bytes.toBytes(qualifier), Bytes.toBytes(data)); table.put(put); } catch (IOException ioe) { ioe.printStackTrace(); } return true; } public static boolean putRows(String tableName, List<Put> puts) { try (Table table = HBaseConn.getTable(tableName)) { table.put(puts); } catch (IOException ioe) { ioe.printStackTrace(); } return true; } /** * 获取单条数据. * * @param tableName 表名 * @param rowKey 唯一标识 * @return 查询结果 */ public static Result getRow(String tableName, String rowKey) { try (Table table = HBaseConn.getTable(tableName)) { Get get = new Get(Bytes.toBytes(rowKey)); return table.get(get); } catch (IOException ioe) { ioe.printStackTrace(); } return null; } public static Result getRow(String tableName, String rowKey, FilterList filterList) { try (Table table = HBaseConn.getTable(tableName)) { Get get = new Get(Bytes.toBytes(rowKey)); get.setFilter(filterList); return table.get(get); } catch (IOException ioe) { ioe.printStackTrace(); } return null; } public static ResultScanner getScanner(String tableName) { try (Table table = HBaseConn.getTable(tableName)) { Scan scan = new Scan(); scan.setCaching(1000); return table.getScanner(scan); } catch (IOException ioe) { ioe.printStackTrace(); } return null; } /** * 批量检索数据. * * @param tableName 表名 * @param startRowKey 起始RowKey * @param endRowKey 终止RowKey * @return ResultScanner实例 */ public static ResultScanner getScanner(String tableName, String startRowKey, String endRowKey) { try (Table table = HBaseConn.getTable(tableName)) { Scan scan = new Scan(); scan.setStartRow(Bytes.toBytes(startRowKey)); scan.setStopRow(Bytes.toBytes(endRowKey)); scan.setCaching(1000); return table.getScanner(scan); } catch (IOException ioe) { ioe.printStackTrace(); } return null; } public static ResultScanner getScanner(String tableName, String startRowKey, String endRowKey, FilterList filterList) { try (Table table = HBaseConn.getTable(tableName)) { Scan scan = new Scan(); scan.setStartRow(Bytes.toBytes(startRowKey)); scan.setStopRow(Bytes.toBytes(endRowKey)); scan.setFilter(filterList); scan.setCaching(1000); return table.getScanner(scan); } catch (IOException ioe) { ioe.printStackTrace(); } return null; } /** * HBase删除一行记录. * * @param tableName 表名 * @param rowKey 唯一标识 * @return 是否删除成功 */ public static boolean deleteRow(String tableName, String rowKey) { try (Table table = HBaseConn.getTable(tableName)) { Delete delete = new Delete(Bytes.toBytes(rowKey)); table.delete(delete); } catch (IOException ioe) { ioe.printStackTrace(); } return true; } public static boolean deleteColumnFamily(String tableName, String cfName) { try (HBaseAdmin admin = (HBaseAdmin) HBaseConn.getHBaseConn().getAdmin()) { admin.deleteColumn(tableName, cfName); } catch (Exception e) { e.printStackTrace(); } return true; } public static boolean deleteQualifier(String tableName, String rowKey, String cfName, String qualifier) { try (Table table = HBaseConn.getTable(tableName)) { Delete delete = new Delete(Bytes.toBytes(rowKey)); delete.addColumn(Bytes.toBytes(cfName), Bytes.toBytes(qualifier)); table.delete(delete); } catch (IOException ioe) { ioe.printStackTrace(); } return true; } }
本文详细介绍如何使用Java API进行HBase数据库的操作,包括项目搭建、连接配置、表的创建与删除、数据的增删查改等核心功能实现。
585

被折叠的 条评论
为什么被折叠?



