#!/bin/bash

if [[ $# -eq 0 ]]
then
	op_date=$(date +%Y%m%d -d "-1 day")
elif [[ $# -eq 1 ]]
then
	op_date=$1
else
	echo "input 0 or 1 args"
	exit 1
fi
echo $op_date

source ~/.bashrc

phone='13426082139 13811063282 13261676918 13520011291'
phone='13811063282'
function sendMsg(){
  for number in $phone
  do
    gsmsend-script $number@$1 /home/hadoop/udm/run_etlMR.sh is error: "dt=$op_date"
  done
}

in=/user/hive/warehouse/pass_data.db/ssn_log/tpl=*/dt=$op_date
out=/tmp/ssn_etl_$op_date

## dir is exists
tryTime=$(expr 24 \* 6)
files=$(hdfs dfs -ls $in)
stat=$?
while [[ $stat -ne 0 && $tryTime -gt 0 ]];do
	tryTime=$(expr $tryTime + 1)
	sleep $(expr 1 \* 60 \* 10)
	files=$(hdfs dfs -ls $in)
	stat=$?
done
if [[ $stat -ne 0 ]];then
	sendMsg $host
	exit
fi
## start app
i=$(date +%Y%m%d -d "-1 day $op_date")
while [[ $i -ge 20150415 ]]
do
	in=${in},/user/hive/warehouse/pass_data.db/user_reg/dt=$i
	i=$(date +%Y%m%d -d "-1 day $i")
done

source /etc/profile
source /home/hadoop/.bashrc

result=$(hdfs dfs -rm -r $out)
echo $result
$(hadoop jar udm-1.0-jar-with-dependencies.jar com.baidu.udm.etl.EtlUserModelMR --input $in --output $out)
stat=$?

size=$(hdfs dfs -du -s $out | cut -d" " -f1)
host=$(hostname)
if [[ $size -lt 10000 ]]
then
	sendMsg $host
	#exit 1
fi

if [[ $stat -eq 0 ]];then
	echo "use pass_data; alter table user_login drop partition(dt=$op_date);"
	$(hive -e "use pass_data; alter table user_login drop partition(dt=$op_date);")
	echo "use pass_data; load data inpath 'hdfs://$out/login*' into table user_login partition(dt=$op_date);"
	$(hive -e "use pass_data; load data inpath 'hdfs://$out/login*' into table user_login partition(dt=$op_date);")

	echo "use pass_data; alter table user_reg drop partition(dt=$op_date);"
	$(hive -e "use pass_data; alter table user_reg drop partition(dt=$op_date);")
	echo "use pass_data; load data inpath 'hdfs://$out/reg*' into table user_reg partition(dt=$op_date);"
	$(hive -e "use pass_data; load data inpath 'hdfs://$out/reg*' into table user_reg partition(dt=$op_date);")
fi
