1,RowFilter的使用
package hTableManagement;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.PageFilter;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.util.Bytes;
public class MyRowFilter {
/**
* @param args
*/
public static void main(String[] args) throws IOException {
// TODO Auto-generated method stub
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf, "p9");
Scan scan = new Scan();
// scan.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("p1"));
RowFilter filter1 = new RowFilter(CompareFilter.CompareOp.EQUAL, new SubstringComparator("6jnvv2k"));
scan.setFilter(filter1);
ResultScanner rs = table.getScanner(scan);
try
{
for(Result rr = rs.next(); rr != null; rr = rs.next()) {
for(KeyValue kv : rr.raw()) {
System.out.println("KV:" + kv + "value: " + Bytes.toString(kv.getValue()));
}
}
}
catch(Exception e) {
e.printStackTrace();
rs.close();
}
}
}
2,PageFilter返回指定数目的结果
package hTableManagement;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.PageFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes;
public class PageFilterTest {
/**
* @param args
*/
public static void main(String[] args) throws IOException {
// TODO Auto-generated method stub
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf, "p9");
Scan scan = new Scan();
scan.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("p1"));
PageFilter pageFilter = new PageFilter(2);
scan.setFilter(pageFilter);
ResultScanner rs = table.getScanner(scan);
try
{
for(Result rr = rs.next(); rr != null; rr = rs.next()) {
for(KeyValue kv : rr.raw()) {
System.out.println("KV:" + kv + "value: " + Bytes.toString(kv.getValue()));
}
}
}
catch(Exception e) {
e.printStackTrace();
rs.close();
}
}
}
3,SingleColumnValueFilter测试
package hTableManagement;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes;
public class SingleColumnValueFilterTest {
/**
* @param args
*/
public static void main(String[] args) throws IOException {
// TODO Auto-generated method stub
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf, "p9");
Scan scan = new Scan();
SingleColumnValueFilter filter1 = new SingleColumnValueFilter(
Bytes.toBytes("cf"),
Bytes.toBytes("p1"),
CompareOp.EQUAL,
Bytes.toBytes("13826021287"));
filter1.setFilterIfMissing(true);
filter1.setLatestVersionOnly(false);
scan.setFilter(filter1);
ResultScanner rs = table.getScanner(scan);
try
{
for(Result rr = rs.next(); rr != null; rr = rs.next()) {
for(KeyValue kv : rr.raw()) {
System.out.println("KV:" + kv + "value: " + Bytes.toString(kv.getValue()));
}
}
}
catch(Exception e) {
e.printStackTrace();
rs.close();
}
}
}
4,SingleColumnValueFilter正则表达式测试
package hTableManagement;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes;
public class MyFilterList_Regex {
/**
* @param args
*/
public static void main(String[] args) throws IOException {
// TODO Auto-generated method stub
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf, "p9");
Scan scan = new Scan();
scan.setStartRow(Bytes.toBytes("lgofboem9q6jnvv2kfcpj3saq09h8u5ygy8"));
scan.setStopRow(Bytes.toBytes("lgofboem9q6jnvv2kfcpj3saq09h8u5ygy85"));
FilterList filters = new FilterList(FilterList.Operator.MUST_PASS_ALL);
RegexStringComparator com = new RegexStringComparator("138\\d{8}");
SingleColumnValueFilter filter1 = new SingleColumnValueFilter(
Bytes.toBytes("cf"),
Bytes.toBytes("p1"),
CompareOp.EQUAL,
com);
filter1.setFilterIfMissing(true);
filter1.setLatestVersionOnly(false);
filters.addFilter(filter1);
scan.setFilter(filters);
ResultScanner rs = table.getScanner(scan);
try
{
for(Result rr = rs.next(); rr != null; rr = rs.next()) {
for(KeyValue kv : rr.raw()) {
System.out.println("KV:" + kv + "value: " + Bytes.toString(kv.getValue()));
}
}
}
catch(Exception e) {
e.printStackTrace();
rs.close();
}
}
}
5,FilterList的MUST_PASS_ALL测试
package hTableManagement;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.util.Bytes;
public class MyFilterList_NotAll {
/**
* @param args
*/
public static void main(String[] args) throws IOException {
// TODO Auto-generated method stub
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf, "p9");
Scan scan = new Scan();
scan.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("p1"));
scan.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("p2"));
scan.setStartRow(Bytes.toBytes("lgofboem9q6jnvv2kfcpj3saq09h8u5ygy8"));
scan.setStopRow(Bytes.toBytes("lgofboem9q6jnvv2kfcpj3saq09h8u5ygy89"));
FilterList filters = new FilterList(FilterList.Operator.MUST_PASS_ALL);
SingleColumnValueFilter filter1 = new SingleColumnValueFilter(
Bytes.toBytes("cf"),
Bytes.toBytes("p1"),
CompareOp.EQUAL,
Bytes.toBytes("13826021287"));
filter1.setFilterIfMissing(true);
filter1.setLatestVersionOnly(false);
filters.addFilter(filter1);
SingleColumnValueFilter filter2 = new SingleColumnValueFilter(
Bytes.toBytes("cf"),
Bytes.toBytes("p2"),
CompareOp.EQUAL,
Bytes.toBytes("13828404306"));
filter2.setFilterIfMissing(true);
filter2.setLatestVersionOnly(false);
filters.addFilter(filter2);
scan.setFilter(filters);
ResultScanner rs = table.getScanner(scan);
try
{
for(Result rr = rs.next(); rr != null; rr = rs.next()) {
for(KeyValue kv : rr.raw()) {
System.out.println("KV:" + kv + "value: " + Bytes.toString(kv.getValue()));
}
}
}
catch(Exception e) {
e.printStackTrace();
rs.close();
}
}
}
6,
本文通过实战演示了HBase中几种常用过滤器的使用方法,包括RowFilter、PageFilter、SingleColumnValueFilter等,并展示了如何组合使用这些过滤器来实现更复杂的查询需求。
1466

被折叠的 条评论
为什么被折叠?



