package com.zsb.test.util;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.gson.Gson;
/**
* Hbase 工具类
*
* @date 2016年8月15日
* @author zhoushanbin
*
*/
public class HbaseUtil {
private static Configuration config = null;
private static Logger logger = LoggerFactory.getLogger(HbaseUtil.class);
private static Connection connection = null;
static {
Properties prop = new Properties();
try {
prop.load(HbaseUtil.class.getClassLoader().getResourceAsStream(
"context/jdbc.properties"));
config = HBaseConfiguration.create();
config.set("hbase.zookeeper.property.clientPort",
prop.getProperty("hbase.zookeeper.property.clientPort"));
config.set("hbase.zookeeper.quorum",
prop.getProperty("hbase.zookeeper.quorum"));
config.set("hbase.master", prop.getProperty("hbase.master"));
connection = ConnectionFactory.createConnection(config);
} catch (IOException e1) {
logger.error("加载数据库配置信息有误", e1);
}
File workaround = new File(".");
System.getProperties().put("hadoop.home.dir",
workaround.getAbsolutePath());
new File("./bin").mkdirs();
try {
/***
* 读取hbase数据你首先需要一个client,jar包里不集成windows环境client的。故windows需要 winutils.exe
*/
new File("./bin/winutils.exe").createNewFile();
} catch (IOException e) {
logger.error("加载配置信息有误", e);
}
}
// 创建新表
public static void createTable(String tableName, String[] familys)
throws Exception {
Admin admin = connection.getAdmin();
HTableDescriptor hdes = new HTableDescriptor(
TableName.valueOf(tableName));
for (int i = 0; i < familys.length; i++) {
hdes.addFamily(new HColumnDescriptor(familys[i]));
}
admin.createTable(hdes);
logger.info("hbase 成功创建表" + tableName);
}
/**
* 当表不存在时创建表
*
* @param tableName
* @param familys
* @throws Exception
*/
public synchronized static void createTableIfNeed(String tableName, String[] familys)
throws Exception {
Admin admin = connection.getAdmin();
if (!admin.isTableAvailable(TableName.valueOf(tableName))) {
// 创建table
createTable(tableName, familys);
}
}
// 插入一条记录
public static void addRecord(String tableName, String rowKey,
String family, String qualifier, String value) throws Exception {
Table table = null;
table = connection.getTable(TableName.valueOf(tableName));
Put put = new Put(Bytes.toBytes(rowKey));
put.addColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier),
Bytes.toBytes(value));
table.put(put);
table.close();
if (table != null) {
table.close();
}
}
/**
* 批量插入数据
*
* @param records
* @throws Exception
*/
public static void batchAddRecords(String tableName, List<Put> records)
throws Exception {
Gson gson = new Gson();
Table table = connection.getTable(TableName.valueOf(tableName));
table.batch(records, new Object[records.size()]);
logger.debug("批量入库【{}】到【{}】", tableName, gson.toJson(records));
if (null != table) {
table.close();
}
}
/**
* 批量插入数据
*
* @param records
* @throws Exception
*/
public static void batchAddRecords(Map<String, List<Put>> records)
throws Exception {
for (Entry<String, List<Put>> entry : records.entrySet()) {
Table table = connection
.getTable(TableName.valueOf(entry.getKey()));
table.batch(entry.getValue(), new Object[entry.getValue().size()]);
logger.info("入库到hbase {},记录数为{}",entry.getKey(),String.valueOf(entry.getValue().size()));
if (null != table) {
table.close();
records = null;
}
}
}
public static void main(String[] args) throws Exception {
System.out.println("begin....");
String[] cf = { "dup" };
createTable("VIV_TEST00", cf);
createTable("VIV_TEST01", cf);
Map<String, List<Put>> map = new HashMap<String, List<Put>>();
List<Put> puts = new ArrayList<Put>();
for (int i = 0; i < 100; i++) {
Put put = new Put(
(String.valueOf(i) + "150" + String.valueOf(i)).getBytes());
put.addColumn("dup".getBytes(), "ownerId".getBytes(), String
.valueOf(i).getBytes());
put.addColumn("dup".getBytes(), "service_num".getBytes(),
("150" + String.valueOf(i)).getBytes());
puts.add(put);
}
map.put("VIV_TEST00", puts);
List<Put> puts01 = new ArrayList<Put>();
for (int i = 200; i < 300; i++) {
Put put = new Put(
(String.valueOf(i) + "150" + String.valueOf(i)).getBytes());
put.addColumn("dup".getBytes(), "ownerId".getBytes(), String
.valueOf(i).getBytes());
put.addColumn("dup".getBytes(), "service_num".getBytes(),
("150" + String.valueOf(i)).getBytes());
puts01.add(put);
}
map.put("VIV_TEST01", puts01);
batchAddRecords(map);
System.out.println("end!!!");
}
}
Hbase 工具类
最新推荐文章于 2024-11-07 19:18:25 发布