写在前边:为表添加协处理器,失败后 建议删除表然后新建表 然后再次添加协处理器,我的就是一直在读缓存的协处理器,也可能是我环境的问题。
package com.ws.coprocessor;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WALEdit;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Optional;
public class myCoprocess implements RegionCoprocessor, RegionObserver {
@Override
public Optional<RegionObserver> getRegionObserver() {
return Optional.of(this);
}
// alter 'star', METHOD => 'table_att', 'Coprocessor'=>'hdfs://dream1:9000/starcop/starcop.jar| com.ws.coprocessor.myCoprocess|100|'
@Override
public void prePut(ObserverContext<RegionCoprocessorEnvironment> c, Put put, WALEdit edit, Durability durability) throws IOException {
Log log = LogFactory.getLog(myCoprocess.class);
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum","dream1:2181,dream2:2181,dream3:2181");
Connection conn = ConnectionFactory.createConnection(conf);
CellScanner cellScanner = put.cellScanner();
if (cellScanner.advance()){
Cell current = cellScanner.current();
byte[] star = CellUtil.cloneRow(current);
log.error("传入行键:"+Bytes.toString(star));
byte[] mine = CellUtil.cloneValue(current);
log.error("传入值:"+Bytes.toString(mine));
Table fans = conn.getTable(TableName.valueOf("fans"));
Put fs = new Put(mine);
fs.addColumn("f".getBytes(),star,star);
fans.put(fs);
conn.close();
}else{
Table fans = conn.getTable(TableName.valueOf("fans"));
Put fs = new Put("erro".getBytes());
fs.addColumn("f".getBytes(),"erro".getBytes(),"erro".getBytes());
fans.put(fs);
}
}
}
添加协处理器
alter 'star', METHOD => 'table_att', 'Coprocessor'=>'hdfs://dream1:9000/starcop/starcop.jar|
hbase alter 'users', METHOD => 'table_att', 'Coprocessor'=>'hdfs://<namenode>:<port>/user/<hadoop-user>/coprocessor.jar| org.myname.hbase.Coprocessor.RegionObserverExample|1073741823|arg1=1,arg2=2'
'users' 拦截的目标表
METHOD =>
'table_att' 这个表示alter命令要对users表做属性修改操作hdfs://<namenode>:<port>/user/<hadoop-user>/coprocessor.jar 协处理器类所在的jar包
org.myname.hbase.Coprocessor.RegionObserverExample 自定义的协处理器实现类
1073741823 协处理器的执行顺序号,可以为任意数值,小的会优先执行
arg1=1,arg2=2 自定义协处理器所需要的参数
移出协处理器
alter 'students',METHOD =>'table_att_unset',NAME => 'coprocessor$1'