1.删除原系统上的JAVA
rpm -qa|grep jdk
rpm -qa|grep gcj
yum -y remove java-1.7.0-openjdk-1.7.0.99-2.6.5.1.el6.x86_64
yum -y remove ldapjdk-4.18-6.el6.x86_64
yum -y remove java-1.8.0-openjdk-1.8.0.91-1.b14.el6.x86_64
yum -y remove java-1.6.0-openjdk-1.6.0.38-1.13.10.4.el6.x86_64
yum -y remove java-1.8.0-openjdk-headless-1.8.0.91-1.b14.el6.x86_64
yum -y remove java-1.5.0-gcj-1.5.0.0-29.1.el6.x86_64
yum -y remove libgcj-4.4.7-17.el6.x86_64
2. 重新安装JAVA
2.1 新建/soft, /download目录
# mkdir /soft
# mkdir /download
2.2 download JAVA安装程式 (jdk-8u65-linux-x64.tar.gz)
2.3 tar JAVA 到 /soft
# tar -zxvf jdk-8u65-linux-x64.tar.gz
# mv /download/jdk1.8.0_65/ /soft
2.4 check java version
#>cd /soft/jdk/bin
#>./java –version
2.4 建立 ln for java
# ln -s /soft/jdk1.8.0_65/ /soft/jdk
2.5 建立java环境变量
# vim /etc/profile
export JAVA_HOME=/soft/jdk
export PATH=$PATH:$JAVA_HOME/bin
# source /etc/source
# java -version //check执行JAVA
3.关闭firewall和Selinux
3.1 关闭firewall
# chkconfig --list iptables
# service iptables stop
# service iptables status
# chkconfig iptables off
3.2 关闭selinux
# vim /etc/selinux/config
SELINUX 改为disable
设定后重启主机
4.在其它Node上执行第1,第2 ,第3
5.修改各Node的主机名
#vim /etc/sysconfig/network
HOSTNAME=S101
6. 修改hosts文件
# vim /etc/hosts
7. setup ssh
7.1 在S101上执行# ssh-keygen –t rsa
7.2 # cd /root/.ssh/
# cat id_rsa.pub >> authorized_keys
7.3 copy authorized_keys 到各node的/root/.ssh中
7.4 在S101上执行ssh s101, ssh S102 …以确认ssh有效
8. 下载和解压hadoop 2.7.3
8.1 download Hadoop 2.7.3到download目录hadoop-2.7.3.tar.gz
8.2 # tar –zxvf hadoop-2.7.3.tar.gz
8.3 # mv /download/hadoop-2.7.3 /soft
8.4 # vim /etc/profile
添加:
export HADOOP_HOME=/soft/hadoop
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
# source /etc/profile
8.5 # ln –s /soft/hadoop-2.7.3 /soft/hadoop
-
- 在各node上执行8.1-8.5
9. 配置hadoop,使用符号连接的方式,让三种配置形态共存。
9.1 创建三个配置目录,内容等同于hadoop目录
cp -r /soft/Hadoop/etc/hadoop /soft/Hadoop/etc/local
cp -r /soft/Hadoop/etc/hadoop /soft/Hadoop/etc/pesudo
cp -r /soft/Hadoop/etc/hadoop /soft/Hadoop/etc/full
${hadoop_home}/etc/local
${hadoop_home}/etc/pesudo
${hadoop_home}/etc/full
9.2 # rm –rf /soft/Hadoop/etc/Hadoop
# cd /soft/hadoop/etc
# ln –s /soft/Hadoop/etc/full Hadoop
10. 配置完全分布式(${hadoop_home}/etc/hadoop/)
[core-site.xml]
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://s101/</value>
</property>
</configuration>
[hdfs-site.xml]
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>dfs.replication</name>
<value>3</value>
</property>
</configuration>
[mapred-site.xml]
<?xml version="1.0"?>
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
[yarn-site.xml]
<?xml version="1.0"?>
<configuration>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>s201</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
#vim slaves
S102
S103
S104
[hadoop-env.sh]
...
export JAVA_HOME=/soft/jdk
11.将full目录内容分发给各node
# cd /soft/Hadoop/etc
# scp –r full s102:/soft/Hadoop/etc
# scp –r full s103:/soft/Hadoop/etc
# scp –r full s104:/soft/Hadoop/etc
12.格式化文件系统
$>hadoop namenode -format
13.启动hadoop进程
$>start-all.sh
14. web确认
http://101:50070/