import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;
import java.util.Arrays;
import java.util.Iterator;
/**
* @author Administrator
* @date 2020/8/4 0004 21:35
* @description
* JavaSparkContext:wordcount
*/
public class JavaWordCountTest {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("JavaWordCountTest");
//创建初始JavaSparkContext
JavaSparkContext jsc = new JavaSparkContext(conf);
JavaRDD<String> lines = jsc.textFile(args[0]);
//切分,压平
//JavaRDD<String> word = lines.flatMap(w -> Arrays.stream(w.split(" ")).iterator());
JavaRDD<String> word = lines.flatMap(new FlatMapFunction<String, String>() {
@Override
public Iterator<String> call(String s) throws Exception {
return Arrays.stream(s.split(" ")).iterator();
}
});
//分组
//JavaPairRDD<String, Integer> wordAndOne = word.mapToPair(w -> Tuple2.apply(w, 1));
JavaPairRDD<String, Integer> wordAndOne = word.mapToPair(new PairFunction<String, String, Integer>() {
@Override
public Tuple2<String, Integer> call(String s) throws Exception {
return Tuple2.apply(s, 1);
}
});
//聚合
//JavaPairRDD<String, Integer> reduced = wordAndOne.reduceByKey((i, j) -> i + j);
JavaPairRDD<String, Integer> reduced = wordAndOne.reduceByKey(new Function2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer integer, Integer integer2) throws Exception {
return integer + integer2;
}
});
//互换kv
//JavaPairRDD<Integer, String> vk = reduced.mapToPair(tp -> tp.swap());
JavaPairRDD<Integer, String> vk = reduced.mapToPair(new PairFunction<Tuple2<String, Integer>, Integer, String>() {
@Override
public Tuple2<Integer, String> call(Tuple2<String, Integer> stringIntegerTuple2) throws Exception {
return stringIntegerTuple2.swap();
}
});
//排序
JavaPairRDD<Integer, String> sorted = vk.sortByKey(false);
//再次互换kv得到结果
// JavaPairRDD<String, Integer> res = sorted.mapToPair(tp -> tp.swap());
JavaPairRDD<String, Integer> res = sorted.mapToPair(new PairFunction<Tuple2<Integer, String>, String, Integer>() {
@Override
public Tuple2<String, Integer> call(Tuple2<Integer, String> integerStringTuple2) throws Exception {
return integerStringTuple2.swap();
}
});
//保存数据
res.saveAsTextFile(args[1]);
//关闭资源
jsc.stop();
}
}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
/**
* @author Administrator
* @date 2020/8/4 0004 16:05
* @description
* SparkContext:wordcount
*/
object WordCount {
def main(args: Array[String]): Unit = {
//创建SparkContext
val conf: SparkConf = new SparkConf().setAppName("wordcount")
//本地模式
//.setMaster("local[2]")
//SparkContext是用来创建原始的RDD
val sc: SparkContext = new SparkContext(conf)
//创建RDD(lazy)
val lines = sc.textFile(args(0))
//TransFormation,开始
//切分压平
val words: RDD[String] = lines.flatMap(_.split(" "))
//组合
val wordAndOne: RDD[(String, Int)] = words.map((_, 1))
//分组聚合
val reduced: RDD[(String, Int)] = wordAndOne.reduceByKey(_ + _)
//排序
val res: RDD[(String, Int)] = reduced.sortBy(_._2, false)
//Action算子,会触发任务执行
//保存数据
res.saveAsTextFile(args(1))
//关闭资源
sc.stop()
}
}