直接上代码:
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.ReducingState;
import org.apache.flink.api.common.state.ReducingStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
//实现ReduceState 每次将状态进行累加
public class ReduceStateMain {
public static void main(String[] args) throws Exception{
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<Tuple2<Long, Long>> dataStream = env.fromElements(Tuple2.of(1L, 3L), Tuple2.of(1L, 7L),
Tuple2.of(2L, 4L),
Tuple2.of(1L, 5L),
Tuple2.of(2L, 2L),
Tuple2.of(2L, 6L)
);
dataStream
.keyBy(0)
.flatMap(new UseReduceState())
.print();
env.execute();
}
public static class UseReduceState extends RichFlatMapFunction<Tuple2<Long, Long>, Tuple2<Long, Long>>{
private ReducingState<Long> reducingState;
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
reducingState = getRuntimeContext().getReducingState(new ReducingStateDescriptor<Long>("reduceState", new ReduceFunction<Long>() {
public Long reduce(Long value1, Long value2) throws Exception {
return value1+value2;
}
},Long.class));
}
public void flatMap(Tuple2<Long, Long> value, Collector<Tuple2<Long, Long>> out) throws Exception {
reducingState.add(value.f1);;
out.collect(Tuple2.of(value.f0,reducingState.get()));
}
}
}

该博客展示了如何在Apache Flink中利用ReduceState处理数据流,通过RichFlatMapFunction和ReducingStateDescriptor实现状态累加,从而对特定key的值进行聚合计算。
4119

被折叠的 条评论
为什么被折叠?



