编写文件docker-compose.yml
version: '3'
services:
spark-master:
image: bde2020/spark-master:3.1.1-hadoop3.2
container_name: spark-master
ports:
- "8080:8080"
- "7077:7077"
- "2220:22"
volumes:
- F:\spark-data\m1:/data
environment:
- INIT_DAEMON_STEP=setup_spark
spark-worker-1:
image: bde2020/spark-worker:3.1.1-hadoop3.2
container_name: spark-worker-1
depends_on:
- spark-master
ports:
- "8081:8081"
- "2221:22"
volumes:
- F:\spark-data\w1:/data
environment:
- "SPARK_MASTER=spark://spark-master:7077"
spark-worker-2:
image: bde2020/spark-worker:3.1.1-hadoop3.2
container_name: spark-worker-2
depends_on:
- spark-master
ports:
- "8082:8081"
- "2222:22"
volumes:
- F:\spark-data\w2:/data
environment:
- "SPARK_MASTER=spark://spark-master:7077"
启动
docker-compose up -d
进入master安装ssh并设置密码和