1.启动kafka:
]# ./bin/kafka-server-start.sh config/server.properties
2 查看topic list
./bin/kafka-topics.sh --list --zookeeper localhost:2181
3.streaming代码:
package com.badou.streaming
import org.apache.hadoop.hbase._
import org.apache.hadoop.hbase.client._
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka.KafkaUtils
object HbaseHandler {
def insert(row: String, column: String, value: String) {
// Hbase配置
val tableName = "sparkstream_kafkahbase_table" // 定义表名
val hbaseConf = HBaseConfiguration.create()
hbaseConf.set("hbase.zookeeper.quorum", "master,slave1,slave2")
hbaseConf.set("hbase.zookeeper.property.clientPort", "2181")
hbaseConf.set("hbase.defaults.for.version.skip", "true")
val hTable = new HTable(hbaseConf, tableName)
val thePut = new Put(row.getBytes)
thePut.add("info".getBytes,column.getBytes,value