部署JDK
sftp上传jdk
解压并改名
mv jdk1.8.0_202/ jdk1.8
配置环境变量
export JAVA_HOME=/data/soft/jdk1.8
export PATH=.:$JAVA_HOME/bin:$PATH
设置免密登录
ssh-keygen -t rsa
ll ~/.ssh/
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
部署Hadoop
解压
tar -zxvf hadoop-3.2.0.tar.gz
配置环境变量
vi /etc/profile
export PATH=.:$JAVA_HOME/bin:$HADOOP_HOME/sbin:$HADOOP_HOME/bin:$PATH
修改配置文件
- hadoop-env.sh
export JAVA_HOME=/data/soft/jdk1.8
export HADOOP_LOG_DIR=/data/hadoop_repo/logs/hadoop
- core-site.xml
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://bigdata:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/data/hadoop_repo</value>
</property>
</configuration>
- hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
- mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
- Yarn-site.xml
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.env-whitelist</name>
<value>JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CLASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_MAPRED_HOME</value>
</property>
</configuration>
- Workers
bigdata
- 格式化hdfs
cd /data/soft/hadoop-3.2.0
bin/hdfs namenode -format
-
增加start-dfs.sh,stop-dfs.sh以及start-yarn.sh,stop-yarn.sh下的日志信息
-
start-dfs.sh,stop-dfs.sh
HDFS_DATANODE_USER=root
HDFS_DATANODE_SECURE_USER=hdfs
HDFS_NAMENODE_USER=root
HDFS_SECONDARYNAMENODE_USER=root
- start-yarn.sh,stop-yarn.sh
YARN_RESOURCEMANAGER_USER=root
HADOOP_SECURE_DN_USER=yarn
YARN_NODEMANAGER_USER=root
- 启动集群
start-all.sh