#!/bin/bash
###
# auther:hongxing.fan
# description:重新整合文件，合并小文件，拆分大文件
# date:2015年03月22日 星期日
# file:Pull_ssnlog.sh
###


operate_date=`date  -d "-1 day" +%Y%m%d`
## 每个节点获取处理的文件
if [[ $# -eq 1 ]]
then
	operate_date=$1
fi
stat(){
	end_time=`date '+%Y-%m-%d %H:%M:%S'`
	if [[ $1 -eq 0 ]];then
		echo "$2	$end_time	$3	$4	$5	$6	$7	$8	$9	success" >> $log
	else
		echo "$2	$end_time	$3	$4	$5	$6	$7	$8	$9	error" >> $log
		return 1
	fi
	return 0
}

source ~/.bashrc
hdfs_path="/app/passport/pass_data/pass_data.db/ssn_log"
local_path="/home/work/workToHadoop/ssn_log"
get_ssn="/home/work/workToHadoop/log/get_ssn_${operate_date}.txt"
rm $get_ssn
touch $get_ssn
log="/home/work/workToHadoop/log/${operate_date}.log"
touch $log && chmod 666 $log
tryTime=0
while true;do
	tpl=`/home/work/.jumbo/bin/hadoop dfs -ls $hdfs_path/tpl=*/dt=$operate_date`
	if [[ $? -ne 0 ]];then
		if [[ $tryTime -lt 120 ]];then
			tryTime=`expr $tryTime + 1`
			#stat 1 "work" "sleep" "ssn_log" "dt=$operate_date" "tryTime=$tryTime"
			sleep 600 # 30 minute
			continue
		else
			#stat 1 "work" "get" "ssn_log" "dt=$operate_date" "tryTime=$tryTime"
			exit
		fi
	fi
	break # run next codes
done
## 这样即使没有匹配的目录返回值也是0(成功执行)，因为后续命令执行成功了
tpls=$(/home/work/.jumbo/bin/hadoop dfs -ls $hdfs_path/tpl=*/dt=$operate_date | awk '{print $5,$8}' | sort -k1 -nr | awk '{print $2}')
## shell进程号
#tpls=(1 2 3 4)
errors="/home/work/workToHadoop/log/get_errors.log"
echo "" > $errors
id=0
num=0
total=3
host=$(hostname)
cd $local_path
for t in ${tpls[*]};do
	#echo "tt--$t"
	num=$(expr $num + 1)
	i=$(expr $num % $total)
	if [[ $i -ne $id ]]
	then
		continue
	fi
	tpl=$(echo $t | awk -F"tpl=|/dt=" '{print $2}')
	tpl_2=$(echo $t | awk -F"ssn_log/" '{print $2}')
	# 创建本地目录
	local_dir=$local_path/tpl=${tpl}/dt=$operate_date
	mkdir -p $local_dir
	##cd $local_path/tpl=${tpl}/dt=$operate_date
	#echo "$j $wc_f $p_num"
	#p_num=$(ps axu | grep "Get_ssn" | wc -l)
	p_num=$(ps aux | grep "\-get" | grep "dt=$operate_date" | wc -l | awk '{print $1}')
	while [[ $p_num -ge 11 ]];do
		sleep 5
		p_num=$(ps aux | grep "\-get" | grep "dt=$operate_date" | wc -l | awk '{print $1}')
		#p_num=$(ps axu | grep "Get_ssn" | wc -l)
	done
	{
		try=0
		flag=0
		begin_time=`date '+%Y-%m-%d %H:%M:%S'`
		$(/home/work/.jumbo/bin/hadoop dfs -get $t $local_dir 2>> $errors)
		s=$?
		while [[ $s -ne 0 && $try -lt 50 ]]
		do
			has=$(grep -Ec "$tpl_2.*already.*exists$" $errors)
			if [[ $has -eq 1 ]];then
				flag=1
				break
			fi
			sleep 2
			try=$(expr $try + 1)
			$(/home/work/.jumbo/bin/hadoop dfs -get $t $local_dir)
			s=$?
		done
		if [[ $flag -eq 0 ]];then
			stat $s "$begin_time" "ssn_log" "$operate_date" "$host" "get" $num "$hdfs_path/$tpl_2" "$local_path/$tpl_2"
		fi
		echo "${local_path}/$tpl_2" >> $get_ssn
	} &
	sleep 1
done
wait
