准备
事先安装好VMware虚拟机
安装JDK并且配置环境变量
下载好hadoop文件
安装步骤
建立一个sh文件,输入以下脚本并输入Hadooo的tgz包作为参数,运行该脚本即可安装配置Hadoop,并验证Hadoop服务是否启动成功。
源代码
#1、压缩包存在
pack=$1
if [[ ! "$pack" =~ ^.*hadoop.*\.(tar\.gz|tgz)$ ]];then
echo "ERROR : ONLY SUPPORT tar.gz OR tgz HADOOP COMPASS FORMAT"
exit 1
fi
#2.1、 检查 Hadoop 服务,若存在则关闭
sc=$(jps|awk 'BEGIN {c=0}/DataNode|SecondaryNameNode|NodeManager|ResourceManager|NameNode/{c++}END{print c}')
if [ $sc -gt 0 ];then
stop-all.sh 1>/dev/null 2>hadoop_err.log || jps|awk '/DataNode|SecondaryNameNode|NodeManager|ResourceManager|NameNode/{print $1}|xargs kill -9'
if [ $? -ne 0 ];then
echo "ERROR : FAIL TO STOP RUNNING HADOOP SERVICES"
exit 1
else
echo "INFO : SUCCESS TO STOP OLD RUNNING HADOOP SERVICES"
fi
fi
#2.2、目标目录(不存在/opt/software则创建,存在子目录则删除)
dest=${2%/}
echo $dest
old=$(ls $dest|grep ^hadoop)
if [ $? -eq 0 ];then
rm -rf $dest/$old
echo "INFO : OLD HADOOP EDITION FOUND AND REMOVED"
fi
if [ ! -e $dest ];then
mkdir -p $dest
echo "INFO : DEST DIR NOT EXISTS BUT CREATED"
fi
if [ ! -d $dest ];then
echo "ERROR : DEST FOR ARG 2 MUSY BE A DIRECTORY"
exit 2
fi
#3、解压
tar -zxf $pack -C $dest
if [ $? -eq 0 ];then
echo -n "INFO : SUCCESS"
else
echo -n "ERROR : FAIL"
exit 3
fi
echo "TO DECOMPRESS $pack TO $dest"
#4、环境变量(存在则删除,再追加新环境变量)
env=/etc/profile.d/myenv.sh
if [ ! -e $env ];then
touch $env
echo "INFO : ENV FILE NOT EXIST BUT TOUCHED"
fi
old=$(cat $env|awk 'BEGIN{b=0; e=0; ln=0;}{ln++; if(b>0 && match($0,/^#.*hadoop/)) e=ln-1; if(match($0,/^#.*hadoop/)) b=ln}END{if(b>0 && e==0) e=ln; print b","e}')
if [ "$old" != "0,0" ];then
sed -i "${old}d" $env
echo "INFO : ENV VARIABLES FOR HADOOP FOUND BUT REMOVED LINE $OLD IN $env "
fi
old=$(ls $dest|grep ^hadoop)
dest="$dest/$old"
cat >> $env <<EOF
# hadoop 3.1.3
export HADOOP_HOME=$dest
export PATH=\$PATH:\$HADOOP_HOME/bin:\$HADOOP_HOME/sbin:\$HADOOP_HOME/lib
export HDFS_NAMENODE_USER=root
export HDFS_DATANODE_USER=root
export HDFS_SECONDARYNAMENODE_USER=root
export HDFS_JOURNALNODE_USER=root
export HDFS_ZKFC_USER=root
export YARN_RESOURCEMANAGER