#!/bin/bash
source /etc/profile
source /root/TDH-Client/init.sh

#SQOOP_LOCAL_DIR_IN_TDH_CILENT=sqooop
#export SQOOP_HOME=/root/TDH-Client/$SQOOP_LOCAL_DIR_IN_TDH_CILENT
#export PATH=$PATH:/root/TDH-Client/$SQOOP_LOCAL_DIR_IN_TDH_CILENT/bin

tables=$(pwd)/tables_clob_v1
log_path=$(pwd)/log
time_log_path=${log_path}/time.log
result_log_path=${log_path}/result.log
cnt=0
from_time=$1
to_time=$2
if [ $# != 2 ];then
        echo "please set from_time and to_time like sh execute.sh '2018-12-01 00:00:00' '2019-01-01 00:00:00'"
        exit 1
    else
        echo "start with from_time $from_time and to_time $to_time"
fi
head="--------start sqoop job at $(date '+%Y-%m-%d %H:%M:%S') with from_time $from_time and to_time $to_time--------"
echo "" >> ${time_log_path}
echo "" >> ${result_log_path}
echo ${head} >> ${time_log_path}
echo ${head} >> ${result_log_path}
cat $tables | while read line 
do
	array=(${line//,/ })
	table_name=
	attribute_name= 
	if [ ${#array[@]} -ge 2 ]; then
		table_name=${array[0]}
		column_name="${array[1]}=String"
		for(( i=2; i<${#array[@]}; i++)) do
			column_name="${column_name},${array[i]}=String";
		done;
		attribute_name="--map-column-java ${column_name}"
	else
		table_name=${array[0]}
	fi
	
	kinit -kt /root/keytab/hdfs.keytab hdfs@TDH
	begin_time=$(date "+%s")
	echo "----------------------table_name:${table_name}---------------------"
	#sqoop eval --connect jdbc:oracle:thin:@192.168.81.42:1521/NINVOICE --username jrwz2_zx --password jrwz2_zx --query "select count(1) from $table_name limit 1"
	sqoop import --username jrwz2_zx --password jrwz2_zx --connect jdbc:oracle:thin:@192.168.103.231:1521/NINVOICE \
		--query "select * from jrwz2_zx.$table_name where lrsj >= to_date('$from_time','yyyy-mm-dd hh24:mi:ss') \
		and lrsj < to_date('$to_time','yyyy-mm-dd hh24:mi:ss') and \$CONDITIONS" \
		--target-dir /tmp/daydata/$table_name/  \
		-m 1 --hive-delims-replacement "" \
		$attribute_name \
		--fields-terminated-by '\001' \
		--delete-target-dir 
	sqoop_append_result=$?
	end_time=$(date "+%s")
	if [ $sqoop_append_result -eq 0 ];then
		echo "$(date '+%Y-%m-%d %H:%M:%S') ----- $table_name sqoop append execute success" >> $result_log_path
	else 
		echo "$(date '+%Y-%m-%d %H:%M:%S') ----- $table_name  sqoop append execute fail" >> $result_log_path
	fi
	let cnt+=1
	time_distance=$(($end_time - $begin_time))
	echo "$table_name spend $time_distance" >> $time_log_path
	echo "###############"
	echo "------$cnt $table_name-----"
	echo "###############"
done

sqls=$(pwd)/sqls_clob_v1
sql_log_path=${log_path}/sql_result.log
sql_time_log_path=${log_path}/sql_time.log
cnt=0

tmp_sqls=$(pwd)/tmp/sqls
rm -f $tmp_sqls
cp $sqls $tmp_sqls
sed -i "s/?1/$from_time/g" $tmp_sqls
sed -i "s/?2/$to_time/g" $tmp_sqls

sql_head="--------start append_sql at $(date '+%Y-%m-%d %H:%M:%S') with from_time $from_time and to_time $to_time--------"
echo "" >> ${sql_log_path}
echo "" >> ${sql_time_log_path}
echo ${sql_head} >> ${sql_log_path}
echo ${sql_head} >> ${sql_time_log_path}

cat $tmp_sqls | while read append_sql 
do
	sql_begin_time=$(date "+%s")
	beeline -u jdbc:hive2://192.168.110.211:10000 -n hive -p hive123 -e "$append_sql"
	sql_append_result=$?
	sql_end_time=$(date "+%s")
	if [ $sql_append_result -eq 0 ];then
		echo "$(date '+%Y-%m-%d %H:%M:%S')  execute success" >> $sql_log_path
		echo "$append_sql" >> $sql_log_path
	else 
		echo "$(date '+%Y-%m-%d %H:%M:%S')  execute fail" >> $sql_log_path
		echo "$append_sql" >> $sql_log_path
	fi
	let cnt+=1
	sql_time_distance=$(($sql_end_time - $sql_begin_time))
	echo "$sql_time_distance cost by $append_sql" >> $sql_time_log_path
	echo "###############"
	echo "------$cnt $append_sql-----"
	echo "###############"	
done

exit 0
