Spark Core的WordCount Java代码
方式一
import java.util.Arrays;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;
public class WordCount {
public static void main(String[] args) {
SparkConf sc = new SparkConf().setAppName("wordcount").setMaster("local[1]");
JavaSparkContext context = new JavaSparkContext(sc);
JavaRDD<String> input = context.textFile("wc");
JavaRDD<String> words = input.flatMap(new FlatMapFunction<String, String>() {
private static final long serialVersionUID = 1L;
@Override
public Iterable<String> call(String line) throws Exception {
return Arrays.asList(line.split(" "));
}
});
JavaPairRDD<String, Integer> pairs = words.mapToPair(new PairFunction<String, String, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Tuple2<String, Integer> call(String word) throws Exception {
return new Tuple2<String,Integer>(word, 1);
}
});
JavaPairRDD<String, Integer> results = pairs.reduceByKey(new Function2<Integer, Integer, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Integer call(Integer v1, Integer v2) throws Exception {
return v1 + v2;
}
});
results.foreach(new VoidFunction<Tuple2<String,Integer>>() {
private static final long serialVersionUID = 1L;
@Override
public void call(Tuple2<String, Integer> tuple) throws Exception {
System.out.println(tuple);
}
});
context.close();
}
}
方式二
import java.util.Arrays;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;
public class WordCount2 {
public static void main(String[] args) {
SparkConf sc = new SparkConf().setAppName("wordcount").setMaster("local[1]");
JavaSparkContext context = new JavaSparkContext(sc);
JavaRDD<String> rdd1 = context.textFile("wc");
JavaRDD<String> rdd2 = rdd1.flatMap(new FlatMapFunction<String, String>() {
private static final long serialVersionUID = 1L;
@Override
public Iterable<String> call(String line) throws Exception {
return Arrays.asList(line.split(" "));
}
});
JavaPairRDD<String, Integer> rdd3 = rdd2.mapToPair(new PairFunction<String, String, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Tuple2<String, Integer> call(String word) throws Exception {
return new Tuple2<String,Integer>(word, 1);
}
});
JavaPairRDD<String, Iterable<Integer>> rdd4=rdd3.groupByKey();
JavaPairRDD<String, Integer> rdd5=rdd4.mapToPair(new PairFunction<Tuple2<String,Iterable<Integer>>, String, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Tuple2<String, Integer> call(
Tuple2<String, Iterable<Integer>> t) throws Exception {
int sum=0;
for(Integer i:t._2()){
sum=sum+i;
}
return new Tuple2<String, Integer>(t._1(), sum);
}
});
rdd5.foreach(new VoidFunction<Tuple2<String,Integer>>() {
private static final long serialVersionUID = 1L;
@Override
public void call(Tuple2<String, Integer> t) throws Exception {
System.out.println(t);
}
});
context.close();
}
}
方式三
import java.util.Arrays;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;
public class WordCount3 {
public static void main(String[] args) {
SparkConf sc = new SparkConf().setAppName("wordcount").setMaster("local[1]");
JavaSparkContext context = new JavaSparkContext(sc);
JavaRDD<String> rdd1 = context.textFile("wc");
JavaRDD<String> rdd2 = rdd1.flatMap(new FlatMapFunction<String, String>() {
private static final long serialVersionUID = 1L;
@Override
public Iterable<String> call(String line) throws Exception {
return Arrays.asList(line.split(" "));
}
});
JavaPairRDD<String, Integer> rdd3 = rdd2.mapToPair(new PairFunction<String, String, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Tuple2<String, Integer> call(String word) throws Exception {
return new Tuple2<String,Integer>(word, 1);
}
});
JavaPairRDD<String, Integer> rdd4=rdd3.aggregateByKey(0, new Function2<Integer,Integer,Integer>(){
private static final long serialVersionUID = 1L;
@Override
public Integer call(Integer v1, Integer v2)
throws Exception {
return v1+v2;
}
}, new Function2<Integer,Integer,Integer>(){
private static final long serialVersionUID = 1L;
@Override
public Integer call(Integer v1, Integer v2)
throws Exception {
return v1+v2;
}
});
rdd4.foreach(new VoidFunction<Tuple2<String,Integer>>() {
private static final long serialVersionUID = 1L;
@Override
public void call(Tuple2<String, Integer> t) throws Exception {
System.out.println(t);
}
});
context.close();
}
}
方式四
import java.util.ArrayList;
import java.util.List;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;
public class WordCount4 {
public static void main(String[] args) {
SparkConf sc = new SparkConf().setAppName("wordcount").setMaster("local[1]");
JavaSparkContext context = new JavaSparkContext(sc);
JavaRDD<String> rdd1 = context.textFile("wc");
JavaPairRDD<String, Integer> rdd2=rdd1.flatMapToPair(new PairFlatMapFunction<String, String, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Iterable<Tuple2<String, Integer>> call(String t)
throws Exception {
List<Tuple2<String,Integer>> list=new ArrayList<Tuple2<String,Integer>>();
String[] arrs=t.split(" ");
for(String s:arrs){
list.add(new Tuple2<String, Integer>(s, 1));
}
return list;
}
});
JavaPairRDD<String, Integer> rdd3=rdd2.reduceByKey(new Function2<Integer, Integer, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Integer call(Integer v1, Integer v2) throws Exception {
return v1+v2;
}
});
rdd3.foreach(new VoidFunction<Tuple2<String,Integer>>() {
private static final long serialVersionUID = 1L;
@Override
public void call(Tuple2<String, Integer> t) throws Exception {
System.out.println(t);
}
});
context.close();
}
}
方式五
import java.util.Arrays;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;
public class WordCount5 {
public static void main(String[] args) {
SparkConf sc = new SparkConf().setAppName("wordcount").setMaster("local[1]");
JavaSparkContext context = new JavaSparkContext(sc);
JavaRDD<String> rdd1 = context.textFile("wc");
JavaRDD<String> rdd2 = rdd1.flatMap(new FlatMapFunction<String, String>() {
private static final long serialVersionUID = 1L;
@Override
public Iterable<String> call(String line) throws Exception {
return Arrays.asList(line.split(" "));
}
});
JavaPairRDD<String, Integer> rdd3 = rdd2.mapToPair(new PairFunction<String, String, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Tuple2<String, Integer> call(String word) throws Exception {
return new Tuple2<String,Integer>(word, 1);
}
});
JavaPairRDD<String, Integer> rdd4=rdd3.foldByKey(0, new Function2<Integer, Integer, Integer>(){
private static final long serialVersionUID = 1L;
@Override
public Integer call(Integer v1, Integer v2) throws Exception {
return v1+v2;
}
});
rdd4.foreach(new VoidFunction<Tuple2<String,Integer>>() {
private static final long serialVersionUID = 1L;
@Override
public void call(Tuple2<String, Integer> t) throws Exception {
System.out.println(t);
}
});
context.close();
}
}
## 方式六
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;
public class WordCount6 {
public static void main(String[] args) {
SparkConf sc = new SparkConf().setAppName("wordcount").setMaster("local[1]");
JavaSparkContext context = new JavaSparkContext(sc);
JavaRDD<String> input = context.textFile("wc");
JavaRDD<String> words = input.flatMap(new FlatMapFunction<String, String>() {
private static final long serialVersionUID = 1L;
@Override
public Iterable<String> call(String line) throws Exception {
return Arrays.asList(line.split(" "));
}
});
JavaPairRDD<String, Integer> pairs=words.mapPartitionsToPair(new PairFlatMapFunction<Iterator<String>, String, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Iterable<Tuple2<String, Integer>> call(Iterator<String> t)
throws Exception {
List<Tuple2<String,Integer>> list=new ArrayList<Tuple2<String,Integer>>();
for(String s:ListUtil.copyIterator(t)){
list.add(new Tuple2<String, Integer>(s, 1));
}
return list;
}
});
JavaPairRDD<String, Integer> results = pairs.reduceByKey(new Function2<Integer, Integer, Integer>() {
private static final long serialVersionUID = 1L;
@Override
public Integer call(Integer v1, Integer v2) throws Exception {
return v1 + v2;
}
});
results.foreach(new VoidFunction<Tuple2<String,Integer>>() {
private static final long serialVersionUID = 1L;
@Override
public void call(Tuple2<String, Integer> tuple) throws Exception {
System.out.println(tuple);
}
});
context.close();
}
}
总结
WordCount.java:运用flatMap、mapToPair、reduceByKey算子
WordCount2.java:运用flatMap、mapToPair、groupByKey算子
WordCount3.java:运用flatMap、mapToPair、aggregateByKey算子
WordCount4.java:运用flatMapToPair、reduceByKey算子
WordCount5.java:运用flatMap、mapToPair、foldByKey算子
WordCount6.java:运用flatMap、mapPartitionsToPair、reduceByKey算子