#!/bin/bash
###
# auther:hongxing.fan
# description:get login_log
# date:2015年03月16日 Monday
# file:Pull_login.sh $1
####

# args $1=$? $2=cluster $3=operate $4=table $5=partition
operate_date=`date  -d "-1 day" +%Y%m%d`
if [[ $# -eq 1 ]]
then
	operate_date=$1
fi
stat(){
	end_time=$(date '+%Y-%m-%d %H:%M:%S')
	if [[ $1 -eq 0 ]];then
		echo "$2	$end_time	$3	$4	$5	$6	$7	$8	$9	success" >> $log
	else
		echo "$2	$end_time	$3	$4	$5	$6	$7	$8	$9	error" >> $log
		return 1
	fi
	return 0
}

hdfs_path="/app/passport/pass_data/pass_data.db/reg_log"
local_path="/home/work/workToHadoop/reg_log"
log="/home/work/workToHadoop/log/${operate_date}.log"
touch $log && chmod 666 $log
get_reg="/home/work/workToHadoop/log/get_reg_${operate_date}.txt"
rm $get_reg
touch $get_reg
#echo $operate_date
source ~/.bashrc
tryTime=0
while true;do
	s=$(/home/work/.jumbo/bin/hadoop dfs -ls $hdfs_path/dt=$operate_date)
	if [[ $? -ne 0 ]];then
		if [[ $tryTime -lt 72 ]];then
			sleep 600 # 30minute
			tryTime=`expr $tryTime + 1`
			continue
		else
			#stat 1 "work" "get" "login_log" "dt=$operate_date" "tryTime=$tryTime"
			exit
		fi
	fi
	break
done
mkdir -p $local_path/dt=$operate_date
cd $local_path/dt=$operate_date
tpls=$(/home/work/.jumbo/bin/hadoop dfs -ls $hdfs_path/dt=$operate_date/* | awk '{print $5,$8}' | sort -k1 -nr | awk '{print $2}')
host=$(hostname)
for t in ${tpls[*]};do
	tpl=$(echo $t | awk -F"dt=$operate_date/" '{print $2}')
	tpl_2=$(echo $t | awk -F"reg_log/" '{print $2}')
	local_dir=${local_path}/dt=$operate_date
	begin_time=$(date '+%Y-%m-%d %H:%M:%S')
	$(/home/work/.jumbo/bin/hadoop dfs -get $hdfs_path/dt=$operate_date/$tpl $local_dir)
	stat $? "$begin_time" "reg_log" "$operate_date" "$host" "get" "" "$hdfs_path/$tpl_2" "$local_dir/$tpl"
	echo "$local_dir/$tpl" >> $get_reg
done
