spark:将list里的数据保存到本地文件案例
代码:
package sparkQL
import org.apache.spark.sql.SparkSession
object jsonTest {
case class Person(id:Int,name:String,age:Int)
def main(args: Array[String]): Unit = {
val spark = SparkSession.builder().master("local").appName("sql").getOrCreate()
import spark.implicits._
//把内容写入文件
val list = List(new Person(1,"zhangsan",20),new Person(2,"lisi",20),new Person(3,"zhangwu",50),new Person(4,"limazi",25),new Person(5,"wangermazi",30))
val df = list.toDF()
df.write.parquet("D://wc//123")
}
}