#!/bin/bash
# 将数据从hive转存到mysql中
CUR_DIR=$(cd "$(dirname "${BASH_SOURCE-$0}")"; pwd)
source ${CUR_DIR}/system_env.sh

XML_FILES_DIR="${CUR_DIR}/exportFiles"
#如果system_env.sh 加载失败，则退出
[ $? -ne 0 ] && exit 1

#sqoop 导出数据
export SQOOP_EXPORT_PARAM1=" -D sqoop.export.records.per.statement=100 -D sqoop.export.statements.per.transaction=100  -D mapred.job.queue.name=spark "
export SQOOP_EXPORT_APP="sqoop export  \
 ${SQOOP_EXPORT_PARAM1} \
 --connect jdbc:mysql://${db_url}/${db_database}?useSSL=false --username ${db_username} --password ${db_pwd}  "

export SQOOP_EXPORT_COMMON_UPDATE_MODE=" --update-mode allowinsert "
export SQOOP_EXPORT_COMMON_OTHER="  "
doSqoopExport(){
	echo "---------------------"
	echo "[mysqlTable]:${mysqlTable}"
	echo "[mysqlUpdateKey]:${mysqlUpdateKey}"
	echo "[hiveTable]:${hiveTable}"
	echo "[hivePartitionKeys]:${hivePartitionKeys}"
	echo "[hivePartitionValues]:${hivePartitionValues}"
	echo "[columns]:${columns}"
	echo "---------------------"

	if [ -n "${hivePartitionKeys}" ];then
	   HIVE_PARTATION_OPTION=" --hcatalog-partition-keys='${hivePartitionKeys}' \
	--hcatalog-partition-values='${hivePartitionValues}'"
	  #columns 上把分区列放到最后，hive分区列需与mysql的中列同名
	  columns="${columns},${hivePartitionKeys}"
	else
	  HIVE_PARTATION_OPTION=""
	fi

	myexec="${SQOOP_EXPORT_APP} \
	${SQOOP_EXPORT_COMMON_OTHER} \
	${SQOOP_EXPORT_COMMON_UPDATE_MODE} \
	--update-key '${mysqlUpdateKey}' \
	--table '${mysqlTable}' \
	--hcatalog-database=${hive_db} \
	--hcatalog-table ${hiveTable} \
	  ${HIVE_PARTATION_OPTION} \
	--columns '${columns}'\
	"
	echo $myexec
	bash -c "$myexec"

}


#hive加载数据语句
aryHiveLoadSql=()
hiveLoadSql=""

#导出一个文件
exportOneFile(){
	element=$1
 	export mysqlTable=$(getXmlContentValue  $element "mysqlTable")
    export hiveTable=$(getXmlContentValue  $element "hiveTable")
    export mysqlUpdateKey=$(getXmlContentValue  $element "mysqlUpdateKey")
    export hivePartitionKeys=$(getXmlContentValue  $element "hivePartitionKeys")
    export hivePartitionValues=$(getXmlContentValue  $element "hivePartitionValues")
    export columns=$(getXmlContentValue  $element "columns")
    doSqoopExport
}

appendHiveLoadSql(){
   	mysqlTable=$(getXmlContentValue  $element "mysqlTable")
    hiveTable=$(getXmlContentValue  $element "hiveTable")
    mysqlUpdateKey=$(getXmlContentValue  $element "mysqlUpdateKey")
    hivePartitionKeys=$(getXmlContentValue  $element "hivePartitionKeys")
    hivePartitionValues=$(getXmlContentValue  $element "hivePartitionValues")
    columns=$(getXmlContentValue  $element "columns")
    if [ "${hive_db}" == "default" ];then
      hive_db_path=""
    else
     hive_db_path="${hive_db}.db/"
    fi
    strHiveLoadPatationData="load data inpath '${hive_warehouse_base_path}/${hive_db_path}${hiveTable}/${hivePartitionKeys}=${hivePartitionValues}'  INTO TABLE ${hive_db}.${hiveTable} PARTITION(${hivePartitionKeys}='${hivePartitionValues}');"

    #判断是否重复添加
    find="false"
    for ((i = 0; i < ${#aryHiveLoadSql[@]}; i++))
    do
        sql="${aryHiveLoadSql[$i]}"
        if [[  "${sql}" ==  "${strHiveLoadPatationData}" ]];
        then
        	find="true"
            break
        fi
    done
    if [[  "false" ==  "${find}" ]];
    then
    	aryHiveLoadSql["${#aryHiveLoadSql[*]}"]="${strHiveLoadPatationData}"
    	hiveLoadSql="${hiveLoadSql}
${strHiveLoadPatationData}"
    fi
}
#如果没有输入xml文件名称，则全部导出
#如果输入了xml文件名称，则只导出批定的表数据
oneXmlFileName=$1

#执行hive load data，加载当前分区数据
for element in `ls ${XML_FILES_DIR}/*.xml`
do
   if [ -z ${oneXmlFileName} ];then
     echo "$element"
     appendHiveLoadSql $element
   else
      fileName=$(basename $element)
      if [[  ${fileName} =~  ${oneXmlFileName} ]]
		then
		    appendHiveLoadSql $element
		     break
		fi
   fi
done

echo "-----hiveSql:${hiveLoadSql}"

#hive数据加载，功能需改造，暂时注释
#hive -e "${hiveLoadSql}"

#导出数据
for element in `ls ${XML_FILES_DIR}/*.xml`
do  
   if [ -z ${oneXmlFileName} ];then
     echo "$element"
     exportOneFile $element
   else
      fileName=$(basename $element)
      if [[  ${fileName} =~  ${oneXmlFileName} ]]
		then
		    exportOneFile $element
		     break
		fi
   fi
 done
