#!/bin/sh

if [ -z $1 ];then
	uname1=$(uname)
	if [ 'Linux' = $uname1 ];then
      day=$(date -d "1 days ago" +%Y%m%d)
    else
	  day=$(date -v -1d +%Y%m%d)
    fi
else
  day=$1
fi

filename=$2

suffix=`date -d "${day}" +%y`
partition_value=$day
CUR_DIR=$(cd "$(dirname "$0")"; pwd)
start=$(date +%s)
files=`ls ${CUR_DIR}/export_files/*/*.config`
if [[ ${filename} != "" ]];then
	files=`ls ${CUR_DIR}/$filename`
fi
for f in `ls ${files}`
do
source ${f}
type=`echo ${f} | awk -F '/' '{print $(NF-1)}'`
if [[ "$type" == "week" ]];then
        whichday=`date -d $day +%w`
        partition_value=`date -d "${day} -$[${whichday}-1] days" +%Y%m%d`
fi
if [[ "$type" == "month" ]];then
        partition_value=`date -d "${day}" +%Y%m01`
fi
if [[ "$type" == "year" ]];then
        partition_value=`date -d "${day}" +%Y0101`
fi
echo "开始导入${day}号${table_name}分区为${partition_value}的数据"
sqoop export \
-D mapred.job.queue.name=spark -D sqoop.export.records.per.statement=100 -D sqoop.export.statements.per.transaction=100 \
--connect ${jdbc_url} \
--username ${username} \
--password ${password} \
--update-key ${update_key} \
--update-mode allowinsert \
--table ${table_name}_${suffix} \
--fields-terminated-by '\t' \
--hcatalog-database ${hive_database} \
--hcatalog-table ${hive_table} \
--hive-partition-key ${partition_key} \
--hive-partition-value ${partition_value} \
--columns ${columns}
    if [ $? -ne 0 ] ; then
        echo "${f}执行失败..."
    else
        echo "${f}执行成功..."
    fi
done
end=$(date +%s)
echo "ENDTIME：`date '+%Y%m%d-%H%M%S'`"  
time=$(( $end - $start ))
echo "DURATION(S): $time"
