#!/bin/bash

#set java env
export JAVA_HOME=/opt/java/jdk1.8.0_144
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:$PATH

#set hadoop env
export HADOOP_HOME=/opt/hadoop/hadoop-2.8.1
export PATH=${HADOOP_HOME}/bin:${HADOOP_HOME}/sbin:$PATH


#日志文件存放的目录
log_src_dir=/root/logs/log/

#待上传文件存放的目录
log_waitUpload_dir=/root/logs/waitUpload/

#日志文件上传到hdfs的路径
date=`date +%y%m%d`
hdfs_dir=/data/log/$date/ #把上传到hdfs的目录改成根据当前时间动态获取
hadoop fs -test -e $hdfs_dir
if [[ $? -ne 0 ]] ; then
    hadoop fs -mkdir $hdfs_dir
    echo "mkdir:${hdfs_dir}"
fi

#打印环境变量信息
echo "envs: hadoop_home: $HADOOP_HOME"

#读取日志文件的目录，判断是否有需要上传的文件
echo "log_src_dir:"$log_src_dir
ls $log_src_dir | while read fileName   #循环读取文件名并注入到变量中
do
    if [[ "$fileName" == access.log.* ]]; then
        date=`date +%Y_%m_%d_%H_%M_%S`
#        将文件移动到待上传目录并重命名
        echo "moving $log_src_dir$fileName to $log_waitUpload_dir"log_$fileName"$date"
        mv $log_src_dir$fileName $log_waitUpload_dir"log_$fileName"$date
#        将待上传的文件path写入一个列表文件willDoing
        echo $log_waitUpload_dir"log_$fileName"$date >> $log_waitUpload_dir"willDoing."$date
    fi
done

#找到列表文件willDoing
ls $log_waitUpload_dir | grep will |grep -v "_UPLOADING_" | grep -v "_DONE_" | while read line #搜索带will的文件并排除带_COPY_和_DONE_的文件，然后循环读取每一行
do
    #    打印信息
    echo "waitUpload is in file:"$line
    #将待上传文件列表willDoing改名为willDoing_COPY_
    mv $log_waitUpload_dir$line $log_waitUpload_dir$line"_UPLOADING_"
#    读列表文件willDoing_COPY_的内容（一个一个的待上传文件名），此处的line 就是列表中的一个待上传文件的path
    cat $log_waitUpload_dir$line"_UPLOADING_" |while read line
    do
#        打印信息
        echo "puting...$line to hdfs path....$hdfs_dir"
        hadoop fs -put $line $hdfs_dir
    done
    mv $log_waitUpload_dir$line"_UPLOADING_" $log_waitUpload_dir$line"_DONE_"
done
#发现失败，发邮件，发短信
#wget http://smssendplatform.com/sendsms?custom=cid&smscontent=xxx&receiver=132165312