public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(conf);
/**
* 老版本的API FlinkKafkaConsumer
* 指定三个参数
* topic 名称
* 反序列化 Schema,SimpleStringSchema 指的是读取 Kafka 中的数据反序列化成 String 格式;
* Properties 实例
* 这种方式不能保证eos语义
*/
Properties prop = new Properties();
prop.setProperty("bootstrap.servers", "dw1:9092,dw2:9092,dw3:9092");
prop.setProperty("group.id", "id1");
prop.setProperty("enable.auto.commit", "true");
FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(
"benge0920",
new SimpleStringSchema(),
prop);
DataStreamSource<String> kafkaStream1 = env.addSource(kafkaConsumer);
//kafkaStream1.print();
/**
* 新版API 使用KafkaSource
*/
KafkaSource<String> kafkaSource = KafkaSource.<String>builder()
.setBootstrapServers("dw1:9092,dw2:9092,dw3:902")
.setTopics("benge0920")
.setValueOnlyDeserializer(new SimpleStringSchema())
.setStartingOffsets(OffsetsInitializer.latest())
.build();
WatermarkStrategy<String> stringWatermarkStrategy = WatermarkStrategy.<String>forMonotonousTimestamps()
.withTimestampAssigner(new SerializableTimestampAssigner<String>() {
@Override
public long extractTimestamp(String s, long l) {
String[] words = s.split(",");
return Long.parseLong(words[1]);
}
});
DataStreamSource<String> result = env.fromSource(kafkaSource, stringWatermarkStrategy, "");
result.print();
env.execute();
}
Flink的新老KafkaSource算子对比
最新推荐文章于 2025-02-01 15:58:40 发布