转载自:http://blog.sina.com.cn/s/blog_8c6d7ff60100zexn.html
一、HBase JavaAPI:
1.HbaseConfiguration
关系:org.apache.hadoop.hbase.HBaseConfiguration
作用:通过此类可以对HBase进行配置
2.HBaseAdmin
关系:org.apache.hadoop.hbase.client.HBaseAdmin
作用:提供一个接口来管理HBase数据库中的表信息。它提供创建表、删除表等方法。
3.HTableDescriptor
关系:org.apache.hadoop.hbase.client.HTableDescriptor
作用:包含了表的名字及其对应列族。 提供的方法有
void addFamily(HColumnDescriptor) 添加一个列族
HColumnDescriptor removeFamily(byte[]column) 移除一个列族
byte[] getName() 获取表的名字
byte[] getValue(byte[]key) 获取属性的值
void setValue(String key,Stringvalue) 设置属性的值
4.HColumnDescriptor
关系:org.apache.hadoop.hbase.client.HColumnDescriptor
作用:维护关于列的信息。提供的方法有
byte[] getName() 获取列族的名字
byte[] getValue() 获取对应的属性的值
void setValue(String key,Stringvalue) 设置对应属性的值
5.HTable
关系:org.apache.hadoop.hbase.client.HTable
作用:用户与HBase表进行通信。此方法对于更新操作来说是非线程安全的,如果启动多个线程尝试与单个HTable实例进行通信,那么写缓冲器可能会崩溃。
6.Put
关系:org.apache.hadoop.hbase.client.Put
作用:用于对单个行执行添加操作
7.Get
关系:org.apache.hadoop.hbase.client.Get
作用:用于获取单个行的相关信息
8.Result
关系:org.apache.hadoop.hbase.client.Result
作用:存储Get或Scan操作后获取的单行值。
9.ResultScanner
关系:Interface
作用:客户端获取值的接口。
二、示例:
以下是一个完整的代码示例,基于hbase-0.90.3编写
类:
HBaseOperation 功能: 创建、删除表,增加、删除、查询记录。
类:HBaseTest 功能: 实例化HBaseOperation,使用其相应方法。
2.1HBaseOperation源码如下:
package
model;
import
java.io.IOException;
import
java.util.ArrayList;
import
java.util.List;
import
org.apache.hadoop.conf.Configuration;
import
org.apache.hadoop.hbase.HBaseConfiguration;
import
org.apache.hadoop.hbase.HColumnDescriptor;
import
org.apache.hadoop.hbase.HTableDescriptor;
import
org.apache.hadoop.hbase.client.Delete;
import
org.apache.hadoop.hbase.client.Get;
import
org.apache.hadoop.hbase.client.HBaseAdmin;
import
org.apache.hadoop.hbase.client.HTable;
import
org.apache.hadoop.hbase.client.Put;
import
org.apache.hadoop.hbase.client.Result;
import
org.apache.hadoop.hbase.client.ResultScanner;
import
org.apache.hadoop.hbase.client.Scan;
public
class HBaseOperation {
//.相关属性
private Configurationconf
;
private HBaseAdminadmin;
public HBaseOperation(Configuration conf)
throws IOException{
this.conf=HBaseConfiguration.create(conf);
this.admin
=new
HBaseAdmin(this.conf);
}
public
HBaseOperation()throws
IOException{
Configuration cnf =
new
Configuration();
this.conf=HBaseConfiguration.create(cnf);
this.admin=new
HBaseAdmin(this.conf);
}
//1.创建表
public
void createTable(StringtableName,String colFamilies[])
throws
IOException{
if(this.admin.tableExists(tableName)){
System.out.println("Table: "+tableName+"
already exists!");
}else{
HTableDescriptor dsc =
new
HTableDescriptor(tableName);
int
len = colFamilies.length;
for(int
i=0;i<len;i++){
HColumnDescriptor family =
new
HColumnDescriptor(colFamilies[i]);
dsc.addFamily(family);
}
admin.createTable(dsc);
System.out.println("创建表成功");
}
}
//2.删除表
public
void deleteTable(String tableName)throws
IOException{
if(this.admin.tableExists(tableName)){
admin.deleteTable(tableName);
System.out.println("删除表成功");
}else{
System.out.println("Table Not Exists!");
}
}
//3.插入一行记录
public
void insertRecord(String tableName,Stringrowkey,String family,String qualifier,String value)throws
IOException {
HTable table =
new
HTable(this.conf,tableName);
Put put=
new
Put(rowkey.getBytes());
put.add(family.getBytes(),qualifier.getBytes(),value.getBytes());
table.put(put);
System.out.println("插入行成功");
}
//4.删除一行记录
public
void deleteRecord(String tableName,Stringrowkey)
throws
IOException{
HTable table =
new
HTable(this.conf,tableName);
Delete del =new
Delete(rowkey.getBytes());
table.delete(del);
System.out.println("删除行成功");
}
//5.获取一行记录
public
Result getOneRecord(StringtableName,String rowkey)
throws
IOException{
HTable table =new
HTable(this.conf,tableName);
Get get =new
Get(rowkey.getBytes());
Result rs = table.get(get);
return
rs;
}
//6.获取所有记录
public
List<Result>getAllRecord(String tableName)
throws IOException{
HTable table =
new
HTable(this.conf,tableName);
Scanscan =
new
Scan();
ResultScanner
scanner =table.getScanner(scan);
List<Result> list=new
ArrayList<Result>();
for(Result r:scanner){
list.add(r);
}
scanner.close();
return
list;
}
}
2.2HBaseTest源码如下:
package
model;
import
java.io.IOException;
import
java.util.Iterator;
import
java.util.List;
import
org.apache.hadoop.conf.Configuration;
import
org.apache.hadoop.hbase.KeyValue;
import
org.apache.hadoop.hbase.client.Result;
public
class HBaseTest {
public
static void
main(String[] args)throws
IOException {
//
TODO
Auto-generated methodstub
System.out.println("hello veagle and serapy ");
//1.初始化HBaseOperation
Configuration conf =
new
Configuration();
//与hbase/conf/hbase-site.xml中hbase.zookeeper.quorum配置的值相同
conf.set("hbase.zookeeper.quorum",
"172.21.7.124");
//与hbase/conf/hbase-site.xml中hbase.zookeeper.property.clientPort配置的值相同
conf.set("hbase.zookeeper.property.clientPort",
"2181");
HBaseOperation hbase =
new
HBaseOperation(conf);
//2.测试相应操作
//2.1创建表
String tableName =
"blog";
String colFamilies[]={"article","author"};
hbase.createTable(tableName, colFamilies);
//2.2插入一条记录
hbase.insertRecord(tableName,
"row1",
"article", "title",
"Hadoop");
hbase.insertRecord(tableName,
"row1",
"author", "name",
"veagle");
hbase.insertRecord(tableName,
"row1",
"author", "nickname",
"serapy");
//2.3查询一条记录
Result rs1 =hbase.getOneRecord(tableName,
"row1");
for(KeyValue kv:rs1.raw()){
System.out.println(new
String(kv.getRow()));
System.out.println(new
String(kv.getFamily()));
System.out.println(new
String(kv.getQualifier()));
System.out.println(new
String(kv.getValue()));
}
//2.4查询整个Table
List<Result>list =null;
list= hbase.getAllRecord("blog");
Iterator<Result>it = list.iterator();
while(it.hasNext()){
Resultrs2=it.next();
for(KeyValue kv :rs2.raw()){
System.out.print("row key
is : " + new
String(kv.getRow()));
System.out.print("family
is : "+ new
String(kv.getFamily()));
System.out.print("qualifier
is:" + new
String(kv.getQualifier()));
System.out.print("timestamp
is:" +kv.getTimestamp());
System.out.println("Value
is : "+ new
String(kv.getValue()));
}
}
}
}
2.3执行结果
插入行成功
插入行成功
插入行成功
row1
article
title
Hadoop
row1
author
name
veagle
row1
author
nickname
serapy
row key is : row1familyis :articlequalifier is:titletimestampis:1322728761046Value is :Hadoop
row key is : row1familyis :authorqualifier is:nametimestamp is:1322728761056Value is :veagle
row key is : row1familyis :authorqualifier is:nicknametimestampis:1322728761060Value is :serapy