案例:
package kgc.kb11.transform;
import kgc.kb11.beans.SensorReading;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.collector.selector.OutputSelector;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import java.util.Collections;
import java.util.Properties;
public class Transform3 {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Properties prop = new Properties();
prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.119.125:9092");
prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"sensor_group1");
prop.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
prop.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
DataStreamSource<String> inputStream = env.addSource(new FlinkKafkaConsumer011<String>(
"sensor",
new SimpleStringSchema(),
prop
));
SingleOutputStreamOperator<SensorReading> mapStream = inputStream.map(new MapFunction<String, SensorReading>() {
@Override
public SensorReading map(String s) throws Exception {
String[] splits = s.split(",");
return new SensorReading(
splits[0],
Long.parseLong(splits[1]),
Double.parseDouble(splits[2])
);
}
});
SplitStream<SensorReading> splitStream = mapStream.split(new OutputSelector<SensorReading>() {
@Override
public Iterable<String> select(SensorReading value) {
if (value.getTemperature() > 38.0) {
return Collections.singletonList("high");
} else {
return Collections.singletonList("normal");
}
}
});
DataStream<SensorReading> high = splitStream.select("high");
DataStream<SensorReading> normal = splitStream.select("normal");
SingleOutputStreamOperator<Tuple2<String, Double>> warningDataStream = high.map(new MapFunction<SensorReading, Tuple2<String, Double>>() {
@Override
public Tuple2<String, Double> map(SensorReading sensorReading) throws Exception {
return new Tuple2<>(sensorReading.getId(), sensorReading.getTemperature());
}
});
ConnectedStreams<Tuple2<String, Double>, SensorReading> connectStream = warningDataStream.connect(normal);
SingleOutputStreamOperator<Object> result = connectStream.map(new CoMapFunction<Tuple2<String, Double>, SensorReading, Object>() {
@Override
public Object map1(Tuple2<String, Double> value) throws Exception {
return new Tuple3<>(value.f0, value.f1, "shenbinle");
}
@Override
public Object map2(SensorReading value) throws Exception {
return new Tuple2<>(value.getId(), "jiankang,meibin");
}
});
result.print("connect");
env.execute("e");
}
}