import org.apache.spark.{SparkConf, SparkContext}
object MySparkUtil {
//获取本地的SparkContext
def apply(appName: String): SparkContext = {
val conf = new SparkConf()
.setAppName(appName)
.setMaster("local[*]")
new SparkContext(conf)
}
}
object TempDemo {
def main(args: Array[String]): Unit = {
//本地数据
val d1 = Array(("bj", 28.1), ("sh", 28.7), ("gz", 32.0), ("sz", 33.1))
val d2 = Array(("bj", 27.3), ("sh", 30.1), ("gz", 33.3))
val d3 = Array(("bj", 28.2), ("sh", 29.1), ("gz", 32.0), ("sz", 30.5))
//Spark进程
val sc = MySparkUtil.apply(getClass.getSimpleName)
//数组拼接
val data = d1 ++ d2 ++ d3
//将数据转换成RDD
/** rdd1.foreach(println)
*
* (sh,28.7)
* (bj,28.1)