测试数据准备
sudo -u hdfs hadoop fs -mkdir /user/zll/20190722/
sudo -u hdfs hadoop fs -mkdir /user/zll/20190723/
sudo -u hdfs hadoop fs -put 2019072201.txt /user/zll/20190722/
sudo -u hdfs hadoop fs -put 2019072202.txt /user/zll/20190722/
sudo -u hdfs hadoop fs -put 2019072301.txt /user/zll/20190723/
sudo -u hdfs hadoop fs -put 2019072302.txt /user/zll/20190723/
vi getfilename.sh
chmod a+x getfilename.sh
./getfilename.sh
#!/bin/bash
source /etc/profile
if [ -f "/usr/local/test/f1.txt" ];then
rm -rf /usr/local/test/f1.txt
else
sudo -u hdfs hadoop fs -ls /user/hive/warehouse/test/ > f1.txt
sed -i '1d' f1.txt
cat ./f1.txt | while read line1
do
echo "$line1"
if [ -f "/usr/local/test/f2.txt" ];then
rm -rf /usr/local/test/f2.txt
else
sudo -u hdfs hadoop fs -ls ${$line1:-8} > f2.txt
cat ./f2.txt | while read line2
do
echo "$line2"
done
fi
done
fi
t1.sh