基于sparksql调用shell脚本执行SQL,sparksql提供了类似hive中的 -e , -f ,-i的选项
1、定时调用脚本
- #!/bin/sh
- # upload logs to hdfs
- yesterday=`date --date='1 days ago' +%Y%m%d`
- /opt/modules/spark/bin/spark-sql -i /opt/bin/spark_opt/init.sql --master spark://10.130.2.20:7077 --executor-memory 6g --total-executor-cores 45 --conf spark.ui.port=4075 -e "\
- insert overwrite table st.stock_realtime_analysis PARTITION (DTYPE='01' )
- select t1.stockId as stockId,
- t1.url as url,
- t1.clickcnt as clickcnt,
- 0,
- round((t1.clickcnt / (case