Kafka数据导入到HBase
#创建HBase配置
public class HBaseConf {
public static Configuration getConf(){
Configuration conf= HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum","localhost:2181");
return conf;
}
}
#创建线程池并获取连接对象
public class HBaseUtils {
public static Connection getConnection(Configuration conf) throws IOException {
ExecutorService es = Executors.newFixedThreadPool(10);
Connection connection = ConnectionFactory.createConnection(conf,es);
return connection;
}
}
#创建kafkaConsumer
public class KafkaUtils {
public static KafkaConsumer<String,String>createConsumer(Properties prop){
return new KafkaConsumer<String, String>(prop);
}
}
#创建read接口
public interface Read {
public ConsumerRecords<String,String>readKafka(Properties prop,String topic ) throws IOException;
}
#创建KafkaReadImpl 类实现read接口
public class KafkaReadImpl implements Read {
private Write write;
public KafkaReadImpl(Write write) {
this.write = write;
}
@Override
public ConsumerRecords<String, String> readKafka(Properties prop, String topic) throws IOException {
KafkaConsumer<String,String>consumer=KafkaUtils.createConsumer(prop);
consumer.subscribe(Arrays.asList(topic));
while (true){
ConsumerRecords<String, String> res = consumer.poll(Duration.ofMillis(1000));
write.saveDataToHbase(res);
}
}
}
#创建Write 接口
public interface Write {
public void saveDataToHbase(ConsumerRecords<String,String>records) throws IOException;
}
#创建对应主题的类实现write接口
public class EventAttend implements Write {
private Connection con;
public EventAttend(Connection con) {
this.con = con;
}
@Override
public void saveDataToHbase(ConsumerRecords<String, String> records){
try {
Table table= con.getTable(TableName.valueOf("nameSpace:tableName"));
List<Put> list=new ArrayList<>();
for (ConsumerRecord<String, String> record : records) {
String[] info = record.value().split(",",-1);
Put put = new Put((info[0]+info[1]+info[2]).getBytes());
put.addColumn("base".getBytes(),"eventId".getBytes(),info[0].getBytes());
put.addColumn("base".getBytes(),"userId".getBytes(),info[1].getBytes());
put.addColumn("base".getBytes(),"actions".getBytes(),info[2].getBytes());
list.add(put);
}
table.put(list);
} catch (IOException e) {
e.printStackTrace();
}
}
}
#工厂类
public class Factory {
public static void run(String topic) throws Exception {
Connection con= HBaseUtils.getConnection(HBaseConf.getConf());
Write writer =null;
switch (topic){
case "topicName":writer=new EventAttend(con);break;
...
default:
throw new Exception("NotFoundTopic");
}
Read read = new KafkaReadImpl(writer);
Properties pro = new Properties();
pro.put("bootstrap.servers","localHost:9092");
pro.put("group.id", "l001");
pro.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
pro.put("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
read.readKafka(pro,topic);
}
}
#测试类
public class App {
public static void main(String[] args) throws Exception {
Factory.run(topicName);
}
}