package hadoop;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper.Context;
import com.sun.jersey.core.util.StringIgnoreCaseKeyComparator;
import hadoop.HbaseMapreduce.HbaseMapper;
public class HbaseToHbase {
static class HbaseMap extends TableMapper<Text, Text>{
@Override
protected void map(ImmutableBytesWritable key, Result value,Context context)
throws IOException, InterruptedException {
for(Cell cell :value.rawCells()){
context.write(new Text(Bytes.toString(CellUtil.cloneRow(cell))),new Text(Bytes.toString(CellUtil.cloneQualifier(cell))+","+Bytes.toString(CellUtil.cloneValue(cell))));
}
}
}
static class HbaseRed extends TableReducer<Text, Text, ImmutableBytesWritable>{
@Override
protected void reduce(Text key, Iterable<Text> values,Context context)
throws IOException, InterruptedException {
for(Text text :values){
String line = text.toString();
String[] lines = line.split(",");
Put put = new Put(key.toString().getBytes());
put.add("f".getBytes(), lines[0].getBytes(), lines[1].getBytes());
context.write(new ImmutableBytesWritable("result".getBytes()), put);
}
}
}
public static void main(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum", "192.168.10.250");
conf.set("hbase.zookeeper.property.clientPort", "2181");
Job job = Job.getInstance(conf,"hbasedemo");
job.setJarByClass(HbaseToHbase.class);
List<Scan> list = new ArrayList<Scan>();
Scan scan = new Scan();
scan.setCaching(200);
scan.setCacheBlocks(false);
scan.setStartRow("row1".getBytes());
scan.setStopRow("row9".getBytes());
scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, "mapreduce".getBytes());
list.add(scan);
TableMapReduceUtil.initTableMapperJob(list, HbaseMap.class, Text.class,Text.class, job);
TableMapReduceUtil.initTableReducerJob("result", HbaseRed.class, job);
job.waitForCompletion(true);
return;
}
}