java spark RDD 算子 WordCount

本文介绍使用Apache Spark实现WordCount的全过程,从配置Spark环境到读取文本文件,再到使用flatMap、mapToPair和reduceByKey等函数进行单词计数,并展示最终的运行结果。
package examples;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;

import java.util.Arrays;
import java.util.Iterator;

/**
 * @Description
 * @Copyright credlink
 * @Author luzhen
 * @Create 2019/6/6 9:08
 */
public class wordCount3 {
    public static void main(String args[]) {
        countTest();
    }
    public static void countTest() {
        SparkConf conf = new SparkConf().setAppName("wordCount").setMaster("local[1]");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        JavaRDD<String> lines = jsc.textFile("D:\\LICENSE");

        JavaRDD<String> words = lines.flatMap(
                new FlatMapFunction<String, String>() {
                    @Override
                    public Iterator<String> call(String s) throws Exception {
                        return Arrays.asList(s.split(" ")).iterator();
                    }
                }
        );
        JavaPairRDD<String, Integer> counts = words.mapToPair(
                new PairFunction<String, String, Integer>() {
                    @Override
                    public Tuple2<String, Integer> call(String s) throws Exception {
                        return new Tuple2<String, Integer>(s, 1);
                    }
                }
        );
        JavaPairRDD<String, Integer> results = counts.reduceByKey(
                new Function2<Integer, Integer, Integer>() {
                    @Override
                    public Integer call(Integer xx1, Integer xx2) throws Exception {
                        return xx1 + xx2;
                    }
                }
        );
        results.foreach(new VoidFunction<Tuple2<String, Integer>>() {
            @Override
            public void call(Tuple2<String, Integer> stringIntegerTuple2) throws Exception {
                System.out.println(stringIntegerTuple2);
            }
        });
    }
}

 

 

输出结果:

 

(additional,4)
(Unless,3)
(For,3)
(NON-INFRINGEMENT,,1)
(agree,1)
(reproduce,,1)
(offer,1)
(executed,1)
(event,1)
((or,3)
("Contributor",1)
(Grant,2)
(work.,1)
(include,3)
(content,1)
(nothing,1)
(MERCHANTABILITY,,1)
(add,2)
(through,1)
(However,,1)
(perform,,1)
(files;,1)
(result,1)
(been,2)
(goodwill,,1)
(herein,1)
(appropriateness,1)
(direct,,1)
(To,1)
(any,28)
(contract,,1)
(ANY,2)

在 IDEA 中实现 Spark 相关任务(包括 RDD 基本算子实验和 WordCount 综合实验),需要先设置好开发环境,再基于具体的业务需求编代码。下面详细介绍如何实现实验内容: ### 第一部分:掌握 RDD 算子的使用 #### 实现步骤 1. **准备环境** - 创建一个 Maven 工程。 - 修改 `pom.xml` 添加 Spark 依赖: ```xml <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_2.12</artifactId> <version>3.0.1</version> </dependency> ``` 2. **初始化 SparkSession 和 SparkContext** 这是所有 Spark 操作的基础。 ```java import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; public class RddOperatorDemo { private static final String SPARK_MASTER_URL = "local"; public static void main(String[] args) { // 初始化 Spark Context SparkConf conf = new SparkConf().setAppName("RddOperator").setMaster(SPARK_MASTER_URL); JavaSparkContext sc = new JavaSparkContext(conf); rddOperators(sc); // 调用算子练习函数 sc.close(); // 关闭上下文 } /** * 测试各种 RDD 转换算子与行动算子的功能 */ private static void rddOperators(JavaSparkContext sc) { List<String> wordsList = Arrays.asList("hello", "world", "hello spark", "hadoop"); // 生成初始 RDD JavaRDD<String> initialRDD = sc.parallelize(wordsList); // map 示例:将每个单词转成大形式 System.out.println("--- Map Operator ---"); JavaRDD<String> mappedRDD = initialRDD.map(s -> s.toUpperCase()); mappedRDD.collect().forEach(System.out::println); // distinct 示例:去重操作 System.out.println("\n--- Distinct Operator ---"); JavaRDD<String> distinctRDD = initialRDD.distinct(); distinctRDD.collect().forEach(System.out::println); // flatMap 示例:对字符串按空格拆分出单个词语集合 System.out.println("\n--- FlatMap Operator ---"); JavaRDD<String> flattenWordsRDD = initialRDD.flatMap(s -> Arrays.asList(s.split(" ")).iterator()); flattenWordsRDD.collect().forEach(System.out::println); // filter 示例:过滤掉长度小于等于4的元素 System.out.println("\n--- Filter Operator ---"); JavaRDD<String> filteredRDD = flattenWordsRDD.filter(word -> word.length() > 4); filteredRDD.collect().forEach(System.out::println); // reduceByKey & mapValues 示例:词频统计前处理阶段 System.out.println("\n--- ReduceByKey and MapValues Operators ---"); JavaPairRDD<String, Integer> pairRDD = flattenWordsRDD.mapToPair(word -> new Tuple2<>(word, 1)); JavaPairRDD<String, Integer> reducedRDD = pairRDD.reduceByKey((v1, v2) -> v1 + v2); reducedRDD.collect().forEach(tuple -> System.out.println(tuple._1 + ": " + tuple._2)); // groupByKey 示例:按照 key 分组值列表 System.out.println("\n--- GroupByKey Operator ---"); JavaPairRDD<String, Iterable<Integer>> groupedRDD = pairRDD.groupByKey(); groupedRDD.collect().forEach(entry -> System.out.printf("%s => %s\n", entry._1(), Lists.newArrayList(entry._2()))); // sortByKey 示例:按键排序键值对 System.out.println("\n--- SortByKey Operator ---"); JavaPairRDD<Integer, String> invertedPairsRDD = flattenedWordsRDD.mapToPair(word -> new Tuple2<>(word.hashCode(), word)); invertedPairsRDD.sortByKey(true).collect().forEach(pair -> System.out.println(pair._2())); } } ``` --- ### 第二部分:Spark 算子综合实验 —— WordCount (词频统计) #### 实现步骤 1. 同样在上面基础上继续新增一段针对 WordCount 的完整代码逻辑。 ```java public static void runWordCountExample(JavaSparkContext sc){ String inputFilePath = "/path/to/input/text/file"; // 替换为实际输入路径 JavaRDD<String> linesRDD = sc.textFile(inputFilePath); // 加载文本数据作为行级 RDD // 单词分割、映射到(key,value) JavaRDD<String> allWords = linesRDD.flatMap(line -> Arrays.asList(line.toLowerCase().split("\\W+")).iterator()); JavaPairRDD<String,Integer> pairs = allWords.mapToPair(word->new Tuple2<>(word,1)); // 计算每种词汇的数量总计 JavaPairRDD<String,Integer> counts = pairs.reduceByKey(Integer::sum); // 输出最终结果集 counts.foreach(tup->{System.out.println(tup._1()+" : "+tup._2());}); } ``` 2. 最后记得修改主方法调用新加入的任务入口: ``` public static void main(String[] args) { ... runWordCountExample(sc); ...} ```
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值