package com.pan.sparksql import org.apache.spark.SparkConf import org.apache.spark.SparkContext /** * Created by pan on 2016/11/11. */ object Demo { case class SgmRating(uid:Int,gid:Int,rating: Float) def main(args: Array[String]) { val conf = new SparkConf() val sc = new SparkContext(conf) val sqlContext = new org.apache.spark.sql.SQLContext(sc); import sqlContext.implicits._ val ratingRdd = sc.textFile("/user/sys_imoment/ratings.dat").map(_.split("::")).map(p=>SgmRating(p(0).toInt,p(1).toInt,p(3).toFloat)).toDF; ratingRdd.registerTempTable("rating") ratingRdd.show() val res = sqlContext.sql("SELECT * FROM rating") val rr=res.map(t => "uid: " + t(0)).collect(); } }
spark sql
最新推荐文章于 2024-07-09 22:21:42 发布