1.目标:实现将hdfs中的数据写入到hbase表中
//数据样式
1001 apple red
1002 pear yellow
1003 pineapple yellow
1.创建fruit.tsv,导入数据,上传到hdfs / 下
2.hbase 中 创建 fruit2 表
3.把下面自定义 mr2打包 上传到集群中 // 例如: /opt 下
4.运行命令:
/opt/hadoop/bin/yarn jar /opt/mr2.jar mr2(主类名) /fruit.tsv (因为自定义中写了输入路径)
//如果读取本地目录
/opt/hadoop/bin/yarn jar /opt/mr2.jar mr2 file:///opt/fruit.tsv
5.hbase中 scan ‘fruit2’ ,查看数据是否写入
import java.io.IOException;
import javax.ws.rs.PUT;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class mr2 {
public static class MyMapper extends Mapper<LongWritable, Text, NullWritable, Put>{
@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
//获取一行数据
String line = value.toString();
//切割
String [] split = line.split("\t");
//封装put对象
Put put = new Put(Bytes.toBytes(split[0]));
put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes("split[1]"));
put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("color"), Bytes.toBytes("split[2]"));
//写出去
context.write(NullWritable.get(), put);
}
}
public static class MyReduce extends TableReducer<NullWritable, Put, NullWritable>{
@Override
protected void reduce(NullWritable key, Iterable<Put> values,Context context)
throws IOException, InterruptedException {
//遍历写出
for (Put value : values) {
context.write(NullWritable.get(), value);
}
}
}
public static class Driver extends Configuration implements Tool{
private Configuration configuration = null;
public void setConf(Configuration conf) {
configuration = conf;
}
public Configuration getConf() {
return configuration;
}
public int run(String[] args) throws Exception {
//获取任务对象
Job job = Job.getInstance(configuration);
//指定主类
job.setJarByClass(Driver.class);
//指定Mapper
job.setMapperClass(MyMapper.class);
job.setMapOutputKeyClass(NullWritable.class);
job.setMapOutputValueClass(PUT.class);
//指定Reducer fruit111:向哪个表写 MyReduce.class:redece类型
TableMapReduceUtil.initTableReducerJob("q2",
MyReduce.class,
job
);
//设置输入路径
FileInputFormat.addInputPaths(job, args[0]);
//提交
boolean b = job.waitForCompletion(true);
return b ? 0 : 1; //0提交成功
}
}
public static void main(String[] args) throws Exception {
Configuration configuration = HBaseConfiguration.create();
//mr2类型的封闭实例不可访问。必须用mr2类型的封闭实例限定分配(例如x.new a(),其中x是mr2的实例)。
int i = ToolRunner.run(configuration,new Driver(), args);
System.exit(0);
}
}