1,用Get方法
package hTableManagement;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
public class GetRow {
/**
* @param args
*/
public static void main(String[] args) throws IOException {
// TODO Auto-generated method stub
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf, "ub");
Get get = new Get(Bytes.toBytes("row-1"));
get.setMaxVersions(3);
Result r = table.get(get);
System.out.println(r.size());
System.out.println(Bytes.toString(r.value()));
for(KeyValue kv : r.raw()) {
System.out.println(Bytes.toString(kv.getRow()));
System.out.println(Bytes.toString(kv.getQualifier()) + ":" + Bytes.toString(kv.getValue()));
}
System.out.println(r);
}
}
2,向hbase put一条数据
package hTableManagement;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
public class PutTable {
/**
* @param args
*/
public static void main(String[] args) throws IOException {
// TODO Auto-generated method stub
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf, "ub");
Put put = new Put(Bytes.toBytes("row-145"));
put.add(Bytes.toBytes("cf"), Bytes.toBytes("tmp"), 1, Bytes.toBytes("value-2"));
table.put(put);
}
}
3,put多条数据
package hTableManagement;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
public class PutList {
/**
* @param args
*/
public static void main(String[] args) throws IOException {
// TODO Auto-generated method stub
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf, "ub");
List<Put> puts = new ArrayList<Put>();
Put put = new Put(Bytes.toBytes("row-1"));
put.add(Bytes.toBytes("cf"), Bytes.toBytes("tmp2"), 2, Bytes.toBytes("value-tmp2"));
Put put2 = new Put(Bytes.toBytes("row-1"));
put2.add(Bytes.toBytes("cf"), Bytes.toBytes("tmp2"), 2, Bytes.toBytes("value-tmp3"));
puts.add(put);
puts.add(put2);
table.put(puts);
}
}
4,Partical Key Scan扫描一定范围内的row key
package hTableManagement;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
public class ScanParticalKey {
/**
* @param args
*/
public static void main(String[] args) throws IOException {
// TODO Auto-generated method stub
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf, "p9");
Scan scan = new Scan();
scan.setStartRow(Bytes.toBytes("5e172404-1345719473421-224269"));
scan.setStopRow(Bytes.toBytes("5e172404-1345719473421-2242698"));
scan.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("p1"));
ResultScanner rs = table.getScanner(scan);
try
{
for(Result rr = rs.next(); rr != null; rr = rs.next()) {
System.out.println("Found Row:" + rr);
}
}
catch(Exception e) {
e.printStackTrace();
rs.close();
}
}
}