煮酒品茶:大晚上的,有气无力的呻吟,让你深深的感觉听我的声音是一种折磨。没录好,笔记本声音也没录好,将就着听吧。哈哈
# Install Hadoop Master
# DownLoad Hadoop and Jdk packets
One : stop all hosts Selinux and iptables.
# service iptables stop
# chkconfig iptables off
# sed -i 's/SELINUX=enforcing/SELINUX=disabled/g' /etc/selinux/config
# setenforce 0
Two : change hostname and hosts
# cat /etc/sysconfig/network
NETWORKING=yes
HOSTNAME=n1
GATEWAY=192.168.0.1
#HOSTNAME=n1
# cat /etc/hosts
127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4
::1 localhost localhost.localdomain localhost6 localhost6.localdomain6
192.168.0.180n1.hadoop.comn1
192.168.0.190d1.hadoop.comd1
192.168.0.200d2.hadoop.comd2
Three : Install Jdk
# tar zxvf jdk-7u25-linux-x64.gz
# mv mv jdk1.7.0_25 /usr/local/java
# cat /etc/profile
.......
export JAVA_HOME=/usr/local/java
export JRE_HOME=/usr/local/java/jre
export CLASSPATH=.:$JAVA_HOME/lib:$JRE_HOME/lib:$CLASSPATH
export PATH=$JAVA_HOME/bin:$JRE_HOME/bin:$PATH
# souce /etc/profile
Four : Install Hadoop Master
# tar zxvf hadoop-0.20.2.tar.gz
# mv hadoop-0.20.2 /usr/local/hadoop0.20.2
# cd /usr/local/hadoop0.20.2/conf
# cat /etc/profile
.......
export HADOOP_HOME=/usr/local/hadoop0.20.2
export PATH=$HADOOP_HOME/bin:$PATH
Five : Change Config file
# cat hadoop-env.sh |grep JAVA_HOME
export JAVA_HOME=/usr/local/java #change your $JAVA_HOME
# cat core-site.xml
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!-- Put site-specific property overrides in this file. -->
<configuration>
<property>
<name>fs.default.name</name>
<value>hdfs://n1:9000</value>
</property>
</configuration>
# cat hdfs-site.xml
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!-- Put site-specific property overrides in this file. -->
<configuration>
<property>
<name>dfs.data.dir</name>
<value>/hadoop/hdfs1</value>
</property>
<property>
<name>dfs.replication</name>
<value>3</value>
</property>
</configuration>
# cat masters
n1
# cat slaves
d1
d2
# Six : non-user login and all hosts
# useradd hadoop
# su - hadoop
# cd ./.ssh
# ssh-keygen -t rsa
<Enter...>
# scp id_rsa.pub d1:/home/hadoop/n1
# scp id_ras.pub d2:/home/hadoop/n1
To d1
# cat n1 >> .ssh/authorized_keys
# chmod 755 .ssh/authorized_keys
To d2
# cat n1 >> .ssh/authorized_keys
# chmod 755 .ssh/authorized_keys
D1 D2 gogogo!
# Senven : tar This dir and scp to slaves
# cd /usr/local
# tar zcvf hadoop0.20.2.tar.gz hadoop2.20.2
# scp hadoop0.20.2.tar.gz d1:/home/root
# scp hadoop0.20.2.tar.gz d2:/home/root
D1: install Jdk and export
# tar zxvf hadoop2.20.2.tar.gz
# mv hadoop2.20.2 /usr/local/hadoop2.20.2
D2: : install Jdk and export
# tar zxvf hadoop2.20.2.tar.gz
# mv hadoop2.20.2 /usr/local/hadoop2.20.2
# Eight : Run server
N1 :
use hadoop user:
$ hadoop namenode -format
$ start-all.sh
jps test
$ jps
50030 WebUi
http://n1:50030
D1:
$ start-dfs.sh
D2:
$ start-dfs.sh
Config file tree:
[root@n1 conf]# tree
.
├── 1
├── capacity-scheduler.xml
├── configuration.xsl
├── core-site.xml
├── hadoop
│ └── hdfs1
├── hadoop-env.sh
├── hadoop-metrics.properties
├── hadoop-policy.xml
├── hdfs-site.xml
├── log4j.properties
├── mapred-site.xml
├── masters
├── slaves
├── ssl-client.xml.example
└── ssl-server.xml.example
2 directories, 14 files
exp:
hadoop jar hadoop-0.20.2-examples.jar wordcount iis_log cgw_iis_log