详细代码及链接
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* @author 王海
* @version V1.0
* @package per.wanghai
* @Description
* @Date 2017/10/29 23:23
*/
public class SomeHbaseAPI {
private final Logger logger = LoggerFactory.getLogger(SomeHbaseAPI.class);
protected void listTable(Admin admin) throws IOException {
HTableDescriptor[] tableDescriptor = admin.listTables();
System.out.println("您的HBase有以下表:");
for (int i = 0; i < tableDescriptor.length; i++) {
System.out.println("表" + i + ":" + tableDescriptor[i].getNameAsString());
}
}
/**
* @param columnFamilies(这是一个变长参数,“Varargs”机制只允许一个变长参数,且必须放在最后)详见参考2
* @throws IOException
* @Description 该方法创建一个table实例
*/
protected void createTable(Admin admin, TableName tableName, String... columnFamilies) throws IOException {
try {
if (admin.tableExists(tableName)) {
logger.warn("表:{}已经存在!", tableName.getNameAsString());
} else {
HTableDescriptor tableDescriptor = new HTableDescriptor(tableName);
for (String columnFamily : columnFamilies) {
tableDescriptor.addFamily(new HColumnDescriptor(columnFamily));
}
admin.createTable(tableDescriptor);
logger.info("表:{}创建成功!", tableName.getNameAsString());
}
} finally {
if (admin != null) {
admin.close();
}
}
}
/**
* @throws IOException
* @Description 一行一行的插入数据
* TODO:批量插入可以使用 Table.put(List<Put> list)
*/
protected void putOneByOne(Connection connection, TableName tableName,
byte[] rowKey, String columnFamily, String column, String data) throws IOException {
Table table = null;
try {
table = connection.getTable(tableName);
Put p = new Put(rowKey);
p.addColumn(Bytes.toBytes(columnFamily), Bytes.toBytes(column), Bytes.toBytes(data));
table.put(p);
logger.info("表:{}已更新!!!", tableName.