Flink 之输出算子Sink


一、StreamingFileSink

1.1、SinkUtils类

package com.hpsk.flink.sink;

import org.apache.flink.api.common.serialization.SimpleStringEncoder;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy;
import java.util.concurrent.TimeUnit;

public class SinkUtils {
   
    /**
     * 将数据写入文本文件
     * @param path 写入路径
     * @return StreamingFileSink
     */
    public StreamingFileSink<String> GetStreamingFileSink(String path){
   
        return StreamingFileSink
                .<String>forRowFormat(new Path(path),
                        new SimpleStringEncoder<>("UTF-8"))
                .withRollingPolicy( // 滚动策略:达到某一策略生成新的文件
                        DefaultRollingPolicy
                                .builder()
                                .withMaxPartSize(1024 * 1024 * 1024) // 文件大小
                                .withRolloverInterval(TimeUnit.MINUTES.toMillis(15)) // 滚动周期
                                .withInactivityInterval(TimeUnit.MINUTES.toMillis(5)) // 不活跃时间
                                .build()

                )
                .build();
    }
}

1.2 、SinkStream类

package com.hpsk.flink.stream;

import com.hpsk.flink.beans.Event;
import com.hpsk.flink.sink.SinkUtils;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class SinkStream {
   
    public static void main(String[] args) throws Exception {
   
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(2);
        DataStreamSource<Event> inputDS = env.fromElements(
                new Event("Mary", "./home", 2000L),
                new Event("Bob", "./home", 3000L),
                new Event("Mary", "./home", 2000L),
                new Event("Alice", "./home", 1000L),
                new Event("Alice", "./home", 2000L),
                new Event("Mary", "./home", 3000L),
                new Event("Bob", "./home", 5000L),
                new Event("Bob", "./home", 6000L)
        );
        // 输出到文件
        inputDS
                .map(Event::toString)
                .addSink(new SinkUtils().GetStreamingFileSink("output/output.txt"));
        env.execute();
    }
}

二、KafkaSink

2.1、SinkUtils类

package com.hpsk.flink.sink;

import org.apache.flink.api.common.serialization.SimpleStringEncoder;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import java.util.concurrent.TimeUnit;

public class SinkUtils {
   
    /**
     * 将数据写入kafka
     * @return FlinkKafkaProducer
     */
    public FlinkKafkaProducer<String> getFlinkKafkaProducer(){
   
        return new FlinkKafkaProducer<>(
                "hadoop102:9092", // brokerList
                "event", // topic
                new SimpleStringSchema()
 
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值