object MyHbaseSinkTest {
def main(args: Array[String]): Unit = {
//环境
val env = StreamExecutionEnvironment.getExecutionEnvironment
/**
* 获取基础参数
*/
val bootstrapserversnew = Contant.BOOTSTRAP_SERVERS_NEW
import org.apache.flink.api.scala._
/**
* 定义kafka-source得到DataStream
*/
val topics = "vs_merging_middle_topic"
//将kafka中数据反序列化,
val valueDeserializer: DeserializationSchema[String] = new SimpleStringSchema()
val properties = new Properties()
properties.put("bootstrap.servers", bootstrapserversnew)
properties.put("group.id", "flink_hbase_sink_consumer2")
properties.put("auto.offset.reset", Contant.AUTO_OFFSET_RESET_VALUE)
println(Contant.BOOTSTRAP_SERVERS_NEW)
val kafkaSinkDStream = env.addSource(new FlinkKafkaConsumer[String](topics, valueDeserializer, properties))
kafkaSinkDS