# 1.查找spark所在目录
find / -name spark
# 2.进入目录
cd /export/servers/spark
# 3.启动sparkshell
bin/spark-shell --master local[6]
# 4.编写wordcount代码并执行
import org.apache.spark.sql.SparkSession
val sc = SparkSession.builder.master("local[*]").appName("test").getOrCreate()
val rdd1 = sc.sparkContext.textFile("file:///export/data/wordcount.txt")
val flattenCountRdd = rdd1.flatMap(_.split(" ")).map((_, 1))
val aggCountRdd = flattenCountRdd.reduceByKey(_ + _)
val result = aggCountRdd.collect
print(result)