自动部署spark集群

#!/usr/bin/env bash

software_dir=/root/hadoop/
spark_path=/opt/spark
namenode_addre=hdfs://hacluster
zookeeper_nodes=master:2181,node1:2181,node2:2181
zookeeper_dir=/spark

decom_install_pack()
{
mkdir -p $spark_path
echo "========= Start extracting the $1 installation package ========="
pv ${software_dir}*$1* |tar zxf - --strip=1 -C $spark_path
}

conf_env ()
{
cat > /etc/profile.d/spark.sh <<EOF
export LD_LIBRARY_PATH=$HADOOP_HOME/lib/native
export SPARK_HOME=$spark_path
export PATH=\$PATH:\$SPARK_HOME/bin:\$SPARK_HOME/sbin:\$LD_LIBRARY_PATH
EOF
xsync /etc/profile.d/spark.sh
jpsall &>/dev/null
}

spark_config ()
{
chown -R root:root $spark_path
cp $spark_path/conf/spark-env.sh.template $spark_path/conf/spark-env.sh

fname=workers
if [ -f $spark_path/conf/slaves.template ]
then
   fname=slaves
fi
cp $spark_path/conf/$fname.template $spark_path/conf/$fname
sed -i '/^localhost/d'  $spark_path/conf/$fname
cat >> $spark_path/conf/$fname <<EOF
master
node1
node2
EOF

cp $spark_path/conf/spark-defaults.conf.template $spark_path/conf/spark-defaults.conf

cat >> $spark_path/conf/spark-env.sh <<EOF
export SPARK_MASTER_WEBUI_PORT=8900
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
# retainedApplications 指定保存 Application历史记录的个数,如果超过这个值,旧的应用程序
# 信息将被删除,这个是内存中的应用数,而不是页面上显示的应用数
export SPARK_HISTORY_OPTS="
-Dspark.history.ui.port=4000
-Dspark.history.retainedApplications=10
-Dspark.history.fs.logDirectory=$namenode_addre/spark_log"

# 开启Spark Master 的高可用 交给Zookeeper 监督 Spark Master 节点可用状态
export SPARK_DAEMON_JAVA_OPTS="
-Dspark.deploy.recoveryMode=ZOOKEEPER
-Dspark.deploy.zookeeper.url=$zookeeper_nodes
-Dspark.deploy.zookeeper.dir=$zookeeper_dir"
EOF

cat >>$spark_path/conf/spark-defaults.conf<<EOF
spark.eventLog.enabled   true
spark.eventLog.dir       $namenode_addre/spark_log
spark.eventLog.compress  true
EOF
hdfs dfs -mkdir -p /spark_log
xsync $spark_path
. /etc/profile.d/spark.sh
zkCli.sh rmr /spark &>/dev/null
spark-start-all.sh
}

modify_script()
{ 
mv $spark_path/sbin/$1 $3
sed -i "/\/$2$/ s/$/ \| \sed \'s\/^\/master: \/\'/" $3
sed -i "/\/$2/a ssh node1 \"\${SPARK_HOME}\/sbin\"\/$2 \| \sed \'s\/^\/node1: \/\'" $3

str=${1%-*} 
cat >> $3 <<EOF

# $str histrory server
\${SPARK_HOME}/sbin/$str-history-server.sh | sed 's/^/master: /'
EOF
}

uninstall()
{
# stop application
jps |egrep "Worker|Master" &>/dev/null
[ $? -eq 0 ] && spark-stop-all.sh

# delete env
jpsall "rm -f /etc/profile.d/spark.sh" "rm -rf $spark_path" &>/dev/null
}

install ()
{
decom_install_pack spark	
conf_env
modify_script stop-all.sh  stop-master.sh $spark_path/sbin/spark-stop-all.sh
modify_script start-all.sh start-master.sh $spark_path/sbin/spark-start-all.sh
spark_config
}

main ()
{
case "$1" in
install)
   install
   ;;
reinstalled)
   uninstall
   install 
   ;;
uninstall)
   uninstall
   ;;
*)
   echo $"Usage: $0 {install|reinstalled|uninstall}"
   exit 2
esac 
}
main $@
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值