2024年网络安全最新Hadoop集群的启动脚本整理及守护线程源码_(1)

#!/usr/bin/env bash

Runs a Hadoop command as a daemon. 以守护进程的形式运行hadoop命令


…、

使用方法 command就是hadoop指令,下面有判读

usage=“Usage: hadoop-daemon.sh [–config ] [–hosts hostlistfile] [–script script] (start|stop) <args…>”


#使用hadoop-config.sh加载环境变量
DEFAULT_LIBEXEC_DIR=" b i n " / . . / l i b e x e c H A D O O P L I B E X E C D I R = bin"/../libexec HADOOP_LIBEXEC_DIR= bin"/../libexecHADOOPLIBEXECDIR={HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/hadoop-config.sh

#使用hadoop-env.sh加载环境变量
if [ -f “ H A D O O P C O N F D I R / h a d o o p − e n v . s h " ] ; t h e n . " {HADOOP_CONF_DIR}/hadoop-env.sh" ]; then . " HADOOPCONFDIR/hadoopenv.sh"];then."{HADOOP_CONF_DIR}/hadoop-env.sh”
fi


case $startStop in

(start)

[ -w "$HADOOP_PID_DIR" ] ||  mkdir -p "$HADOOP_PID_DIR"

if [ -f $pid ]; then
  if kill -0 `cat $pid` > /dev/null 2>&1; then
    echo $command running as process `cat $pid`.  Stop it first.
    exit 1
  fi
fi

if [ "$HADOOP_MASTER" != "" ]; then
  echo rsync from $HADOOP_MASTER
  rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $HADOOP_MASTER/ "$HADOOP_PREFIX"
fi

hadoop_rotate_log $log
echo starting $command, logging to $log
cd "$HADOOP_PREFIX"

#判断command是什么指令,然后调用bin/hdfs指令 读取配置文件,执行相关指令
case c o m m a n d i n n a m e n o d e ∣ s e c o n d a r y n a m e n o d e ∣ d a t a n o d e ∣ j o u r n a l n o d e ∣ d f s ∣ d f s a d m i n ∣ f s c k ∣ b a l a n c e r ∣ z k f c ) i f [ − z " command in namenode|secondarynamenode|datanode|journalnode|dfs|dfsadmin|fsck|balancer|zkfc) if [ -z " commandinnamenodesecondarynamenodedatanodejournalnodedfsdfsadminfsckbalancerzkfc)if[z"HADOOP_HDFS_HOME" ]; then
hdfsScript=“ H A D O O P P R E F I X " / b i n / h d f s e l s e h d f s S c r i p t = " HADOOP_PREFIX"/bin/hdfs else hdfsScript=" HADOOPPREFIX"/bin/hdfselsehdfsScript="HADOOP_HDFS_HOME”/bin/hdfs
fi
nohup nice -n $HADOOP_NICENESS $hdfsScript --config $HADOOP_CONF_DIR c o m m a n d " command " command"@" > “$log” 2>&1 < /dev/null &
;;
(*)
nohup nice -n $HADOOP_NICENESS $hadoopScript --config $HADOOP_CONF_DIR c o m m a n d " command " command"@" > “$log” 2>&1 < /dev/null &
;;
esac


esac


在hadoop-daemon.sh脚本中,同样读取了环境变量,然后依据传入的参数$@(上一个脚本中)来判断要启动的hadoop的守护线程($command),最后调用bin/hdfs指令 读取配置信息 并启动hadoop的守护线程。


#### 7、bin/hdfs


这是一个指令,而非shell脚本。我们可以发现,在启动hadoop集群时,不管使用什么脚本,最终都指向了bin/hdfs这个指令,那么这个指令里到底是什么呢,我们来看一下,就明白了。



bin=which $0
bin=dirname ${bin}
bin=cd "$bin" > /dev/null; pwd

DEFAULT_LIBEXEC_DIR=“$bin”/…/libexec

HADOOP_LIBEXEC_DIR=KaTeX parse error: Expected '}', got 'EOF' at end of input: …P_LIBEXEC_DIR:-DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/hdfs-config.sh

#除了上面继续加载环境变化外,这个函数其实就是提示我们在使用什么
#比如namenode -format 是格式化DFS filesystem
#再比如 namenode 说的是运行一个DFS namenode

我们往下看

function print_usage(){
echo “Usage: hdfs [–config confdir] [–loglevel loglevel] COMMAND”
echo " where COMMAND is one of:"
echo " dfs run a filesystem command on the file systems supported in Hadoop."
echo " classpath prints the classpath"
echo " namenode -format format the DFS filesystem"
echo " secondarynamenode run the DFS secondary namenode"
echo " namenode run the DFS namenode"
echo " journalnode run the DFS journalnode"
echo " zkfc run the ZK Failover Controller daemon"
echo " datanode run a DFS datanode"
echo " dfsadmin run a DFS admin client"
echo " haadmin run a DFS HA admin client"
echo " fsck run a DFS filesystem checking utility"
echo " balancer run a cluster balancing utility"
echo " jmxget get JMX exported values from NameNode or DataNode."
echo " mover run a utility to move block replicas across"
echo " storage types"
echo " oiv apply the offline fsimage viewer to an fsimage"
echo " oiv_legacy apply the offline fsimage viewer to an legacy fsimage"
echo " oev apply the offline edits viewer to an edits file"
echo " fetchdt fetch a delegation token from the NameNode"
echo " getconf get config values from configuration"
echo " groups get the groups which users belong to"
echo " snapshotDiff diff two snapshots of a directory or diff the"
echo " current directory contents with a snapshot"
echo " lsSnapshottableDir list all snapshottable dirs owned by the current user"
echo " Use -help to see options"
echo " portmap run a portmap service"
echo " nfs3 run an NFS version 3 gateway"
echo " cacheadmin configure the HDFS cache"
echo " crypto configure HDFS encryption zones"
echo " storagepolicies list/get/set block storage policies"
echo " version print the version"
echo “”
echo “Most commands print help when invoked w/o parameters.”

There are also debug commands, but they don’t show up in this listing.

}

if [ $# = 0 ]; then
print_usage
exit
fi

COMMAND=$1
shift

case $COMMAND in

usage flags

–help|-help|-h)
print_usage
exit
;;
esac

Determine if we’re starting a secure datanode, and if so, redefine appropriate variables

if [ “KaTeX parse error: Expected 'EOF', got '&' at position 26: …= "datanode" ] &̲& [ "EUID” -eq 0 ] && [ -n “ H A D O O P S E C U R E D N U S E R " ] ; t h e n i f [ − n " HADOOP_SECURE_DN_USER" ]; then if [ -n " HADOOPS

评论 6
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值