http://mirrors.aliyun.com/apache/spark/spark-3.3.3/spark-3.3.3-bin-hadoop3.tgz?spm=a2c6h.25603864.0.0.602857e85YO3xf
cd /usr/local/spark/
tar -xvf spark-3.3.3-bin-hadoop3.tgz
mv spark-3.3.3-bin-hadoop3 spark
chown -R root spark
配置环境变量
[root@master ~]#
vim /root/.bash_profile
export SPARK_HOME=/usr/local/spark
export PATH=$PATH:$SPARK_HOME/sbin:$SPARK_HOME/bin
source /root/.bash_profile
cd spark
cp ./conf/spark-env.sh.template ./conf/spark-env.sh
cd conf
vi /usr/local/spark/conf/spark-env.sh
JAVA_HOME=/usr/local/jdk1.8.0_144
HADOOP_CONF_DIR=/home/hadoop/hadoop-3.2.3/etc/hadoop
YARN_CONF_DIR=/home/hadoop/hadoop-3.2.3/etc/hadoop
export SPARK_MASTER_HOST=hadoop1
export SPARK_MASTER_PORT=7077
SPARK_MASTER_WEBUI_PORT=8080
cd /usr/local/spark/bin
./spark-shell
cd /usr/local/spark/sbin/
./start-all.sh
本文详细介绍了如何在Linux系统中下载并配置ApacheSpark3.3.3版本,包括解压、环境变量设置、Spark配置文件的修改以及启动SparkShell和所有服务的过程。
2685

被折叠的 条评论
为什么被折叠?



