#!/bin/bash
#存放日志的文件夹
log_dir=/opt/user/log/log/
#待上传的日志临时目录
log_upload_dir=/opt/user/log/upload/
#上传到HDFS的目录
hadoop_log=/test/log/
num=0
#创建日志文件夹
if [ ! -e $log_dir ]
then
mkdir -p $log_dir
fi
#创建待上传目录
if [ ! -e $log_upload_dir ]
then
mkdir -p $log_upload_dir
fi
#扫描日志目录,将日志放入待上传列表
ls $log_dir | while read line
do
if [[ $line = access.log.* ]]
then
echo $line
date=$(date +%Y_%m_%d)
num=$[$num+1]
logName="access.log_"$num"_"$date
echo $logName
echo $log_dir$line
cp $log_dir$line $log_upload_dir$logName
echo $log_upload_dir$logName >> $log_upload_dir"willDoing"
fi
done
echo "***************以下是需要上传至HDFS服务器的日志*****************"
cat $log_upload_dir"willDoing"
echo "************************开始上传HDFS****************************"
sleep 2s
ls $log_upload_dir | grep will | grep -v "_DONE" | grep -v "_COPE" | while read line
do
mv $log_upload_dir$line $log_upload_dir$line"_COPE"
hadoop fs -mkdir -p $hadoop_log
cat $log_upload_dir$line"_COPE" | while read file
do
hadoop fs -put -f $file $hadoop_log
done
echo $date
mv $log_upload_dir$line"_COPE" $log_upload_dir$line"_DONE_"$RANDOM
done
echo "************************上传HDFS结束***************************"
[Shell]备份日志文件至HDFS服务器
最新推荐文章于 2022-05-26 19:12:18 发布