1.下载scala-2.11.8包解压到指定目录
#SCALA VARIABLES START
export SCALA_HOME=/opt/app/tools/scala-2.11.8
export PATH=$SCALA_HOME/bin:$PATH
#SCALA VARIABLES END
2.下载spark-1.6.1-bin-hadoop2.6包解压到指定目录
#SPARK VARIABLES START
export SPARK_HOME=/opt/app/spark/spark-1.6.1-bin-hadoop2.6
export PATH=$SPARK_HOME/bin:$PATH
#SPARK VARIABLES END
3.配置spark-env.sh
SPARK_MASTER_IP=192.168.56.101SPARK_MASTER_PORT=7077
SPARK_WORKER_PORT=8086
SPARK_MASTER_WEBUI_PORT=8085
SPARK_LOCAL_DIRS=/opt/app/spark/data
#SPARK_MASTER_OPTS=
SPARK_LOCAL_IP=192.168.56.101
#SPARK_WORKER_CORES=
SPARK_WORKER_MEMORY=512m
SPARK_WORKER_PORT=8087
SPARK_WORKER_WEBUI_PORT=8088
SPARK_WORKER_INSTANCES=1
SPARK_WORKER_DIR=/opt/app/spark/worker
#SPARK_WORKER_OPTS=
SPARK_DAEMON_MEMORY=512m
#SPARK_HISTORY_OPTS=
#SPARK_SHUFFLE_OPTS=
#SPARK_DAEMON_JAVA_OPTS=
#SPARK_PUBLIC_DNS=
4.spark-default.xml配置
spark.eventLog.enabled true
4.启动
启动主节点:./sbin/start-master.sh
启动后查看spark目录下的logs文件夹,看日志中默认启动端口
MasterWebUI: Started MasterWebUI at http://192.168.56.101:8085
启动从节点: ./sbin/start-slave.sh spark://192.168.56.101:7077
5.验证
浏览器打开http://192.168.56.101:8085,workers下面会多一个功能进程
6.MySQL数据库连接
添加MySQL驱动:在spark-env.sh中添加SPARK_CLASSPATH={path}/mysql-connector-java-5.1.38.jar配置