1.目标:将fruit表中的一部分数据,通过mr迁入到fruit1表中
从hbase读数据,经过mr,最终写入hbase中
//数据样式
1001 apple red
1002 pear yellow
1003 pineapple yellow
/opt/hadoop/bin/yarn jar /opt/hbase/lib/hbase-server-1.4.3.jar importtsv
-Dimporttsv.columns=HBASE_ROW_KEY,info:name,info:color fruit
hdfs://192.168.8.10:9000/fruit.tsv
1.创建fruit.tsv,导入数据,上传到hdfs / 下
2.hbase 中 创建 fruit 表
3.把下面自定义 mr打包 上传到集群中 // 例如: /opt 下
4.运行命令:
/opt/hadoop/bin/yarn jar /opt/mr.jar mr1(主类名)
5.hbase中 scan ‘fruit’ ,查看数据是否写入
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class mr1 {
public static class MyMapper extends TableMapper<ImmutableBytesWritable, Put>{
@Override
protected void map(ImmutableBytesWritable key, Result value, Context context)
throws IOException, InterruptedException {
//构建put数组
Put put = new Put(key.get()); // key.get() 把 序列化的key 转成 byte数组
//遍历一个rowkey下的数据
Cell[] cells = value.rawCells();
for (Cell cell : cells) {
if("name".equals(Bytes.toString(CellUtil.cloneQualifier(cell)))) {
put.add(cell);
}
}
//将数据写出去
context.write(key, put);
}
}
public static class MyReduce extends TableReducer<ImmutableBytesWritable, Put, NullWritable>{
@Override
protected void reduce(ImmutableBytesWritable key, Iterable<Put> values, Context context)
throws IOException, InterruptedException {
//遍历写出 (可以作 + - / * 等等)
for (Put value : values) {
context.write(NullWritable.get(), value);
}
}
}
public static class Driver extends Configuration implements Tool{
private Configuration configuration = null;
public void setConf(Configuration conf) {
this.configuration = conf;
}
public Configuration getConf() {
return configuration;
}
public int run(String[] args) throws Exception {
//获取任务对象
Job job = Job.getInstance(configuration);
//设置主类
job.setJarByClass(Driver.class);
//指定Mapper fruit:表名 Scan:扫描器 Mapper.class:Mapper的类
TableMapReduceUtil.initTableMapperJob("fruit",
new Scan(),
MyMapper.class,
ImmutableBytesWritable.class,
Put.class,
job
);
//指定Reducer fruit111:向哪个表写 MyReduce.class:redece类型
TableMapReduceUtil.initTableReducerJob("q",
MyReduce.class,
job
);
//提交
boolean b = job.waitForCompletion(true);
return b ? 0 : 1;
}
}
public static void main(String[] args) throws Exception {
Configuration configuration = HBaseConfiguration.create();
int i = ToolRunner.run(configuration, new Driver(),args);
}
}