from pyspark import SparkContext
from pyspark import HiveContext,SQLContext
if __name__ == "__main__":
sc = SparkContext(appName = "test")
sqlContext = HiveContext(sc)
print "**************"
table_df = sqlContext.sql("select * from test")
print "&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&"
table_df.cache()
print table_df.count()
sc.stop()
print "*******end***************************************"
spark单独运行正常运行,用到HiveSpark报如下错误:
<