export JAVA_HOME=/usr/local/src/jdk1.8.0_202
export HADOOP_HOME=/usr/local/hadoop
export SCALA_HOME=/usr/local/src/scala
export SPARK_MASTER_IP=master1-1
export SPARK_MASTER_PORT=7077
export SPARK_DIST_CLASSPATH=$(/usr/local/hadoop/bin/hadoop classpath)
export HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop
export SPARK_YARN_USER_ENV="CLASSPATH=/usr/local/hadoop/etc/hadoop"
export YARN_CONF_DIR=/usr/local/hadoop/etc/hadoop
export SPARK_DAEMON_JAVA_OPTS="-Dspark.deploy.recoveryMode=ZOOKEEPER -Dspark.deploy.zookeeper.url=master1-1:2181,slave1-1:2181,slave1-2:2181 -Dspark.deploy.zookeeper.dir=/spark"
这篇博客详细介绍了如何设置JAVA_HOME、HADOOP_HOME、SCALA_HOME环境变量,并配置SPARK_MASTER_IP、SPARK_MASTER_PORT、ZOOKEEPER相关参数,以实现Spark在Hadoop和Zookeeper环境下的集群部署。
522

被折叠的 条评论
为什么被折叠?



