import org.apache.spark.{SparkConf, SparkContext}
object WordCount {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("WC").setMaster("local[2]")
val sc = new SparkContext(conf)
sc.textFile("E:\\words.txt").
flatMap(_.split(" ")).
map((_,1)).
reduceByKey(_+_,1).
sortBy(_._2,false).
saveAsTextFile("E:\\outputFile")
sc.stop()
}
}
import org.apache.spark.{SparkContext,SparkConf}
object WordCount{
def main(args: Array[String]): Unit = {
/al conf = new SparkConf().setAppName("WordCount").setMaster("local")
val conf = new SparkConf().setAppName("WordCount")
val sc = new SparkContext(conf)
val input = sc.textFile("/rootpark.txt")
val lines = input.flatMap(line=>line.split(" "))
val count = lines.map(word=>(word,1)).reduceByKey{case(x,y)=>x+y}
val r = count.collect()
r.foreach(println)
}
}