import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
public class FlatMapDemo {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setMaster("local").setAppName("spark");
JavaSparkContext sc = new JavaSparkContext(conf);
List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6);
List<String> list1 = Arrays.asList("如果 我们 不曾 相遇","我们 会 在 哪里");
JavaRDD<Integer> javaRDD = sc.parallelize(list);
JavaRDD<String> javaRDD1 = sc.parallelize(list1);
JavaRDD<Integer> reduce1 = javaRDD.flatMap(new FlatMapFunction<Integer, Integer>() {
@Override
public Iterator<Integer> call(Integer integer) throws Exception {
List list2 = new ArrayList();
for (int i=0;i<=integer;i++){
list2.add(i);
}
return list2.iterator();
}
});
System.out.println(reduce1.collect());
JavaRDD<String> reduce2 = javaRDD1.flatMap(new FlatMapFunction<String, String>() {
@Override
public Iterator<String> call(String s) throws Exception {
return Arrays.asList(s.split(" ")).iterator();
}
});
System.out.println(reduce2.collect());
JavaRDD<Integer> reduce3 = javaRDD.flatMap(integer -> {
List<Integer> list2 = new ArrayList<>();
for (int i=0;i<=integer;i++){
list2.add(i);
}
return list2.iterator();
});
System.out.println(reduce3.collect());
JavaRDD<String> reduce4 = javaRDD1.flatMap(s -> Arrays.asList(s.split(" ")).iterator());
System.out.println(reduce4.collect());
}
}