#!/bin/bash

set -e

# 用于处理hive数据同步导oracle

#hive配置
hiveDataBase="jms_dm"
HiveTableName="dm_cn_departure_transfer_timely_route_agg_dt"
datacolfield="dt"
execData="{{ execution_date | cst_ds }}"

###判断传入日期是否是当月1号或者2号
date_dd="`date -d ${execData} +%d`"
echo ${date_dd}
day=`expr $date_dd + 0`
echo $day
if [ ${day} -eq 1 ]
then
  startDate="`date -d "${execData} -1 month" +%Y-%m-01`"
  endDate="`date -d "${execData} -1 day" +%Y-%m-%d`"
else
  startDate="${execData}"
  endDate="${execData}"
fi
echo ${startDate}
###

# +-------------------------
# |   Oracle配置
# +-------------------------
#Oracle库名
oracle_db="jms_bigdata_report"
#Oracle临时表名
oracleTable_tmp="dm_cn_departure_transfer_timely_route_mth_dt_tmp"
#Oracle目标表名
oracleTable="dm_cn_departure_transfer_timely_route_mth_dt"
#url连接
oracleUrl="jdbc:oracle:thin:@//pro-bigdatareport-2023-readwrite-ora.yl.com:1521/orcl"
#主机
oracle_host="pro-bigdatareport-2023-readwrite-ora.yl.com"
#端口
oracle_port="1521"
#实例
oracle_instance="orcl"
#用户名
oracleUser="jms_bigdata_report"
#密码
oraclePassword="H15DCte03YPW3B2pEA"
viewname="dm_cn_departure_transfer_timely_route_mth_dt_airflow_view"

# +-------------------------
# |   数据导入
# +-------------------------

sqlText="
CREATE TEMPORARY VIEW ${viewname}
    USING org.apache.spark.sql.jdbc
    OPTIONS (
      url 'jdbc:oracle:thin:@//${oracle_host}:${oracle_port}/${oracle_instance}',
      dbtable '${oracleTable_tmp}',
      driver 'oracle.jdbc.driver.OracleDriver',
      user '${oracleUser}',
      password '${oraclePassword}',
      isolationLevel 'NONE',
      batchsize 10000,
      truncate true
    );
INSERT overwrite TABLE ${viewname}
select dt
       ,start_code
       ,order_source_code
       ,mage_region_code
       ,mage_region_name
       ,start_name
       ,order_source_name
       ,center_type
       ,start_agent_code
       ,start_agent_name
       ,send_account
       ,route_lack_account
       ,transfer_intime_account
       ,transfer_intime_rate
       ,without_arrival_account
       ,error_send_account
       ,opp_delay_account
       ,send_delay_account
       ,shipment_delay_account
       ,stop_delay_account
       ,all_line_transfer_intime_account
       ,all_line_transfer_intime_rate
       ,shipment_error_account
       ,temporary_dispatch_account
       ,reserve_route_account
       ,epidemic_hoarding_account
       ,store_count
       ,express_type_code
       ,express_type_name
       ,from_unixtime(unix_timestamp(),'yyyy-MM-dd HH:mm:ss')   as update_time     
  from jms_dm.dm_cn_departure_transfer_timely_route_agg_dt 
 where dt>='${startDate}' and dt<= '${endDate}'
   and date_format('${execData}','d')=1;
"
   
echo "${sqlText}"

if [ $? -ne 0 ];then
  echo -e "ERROR - 数据导入SQL加载失败 \n"
  exit -1
else
  echo -e "INFO - Execution Sql: ${sqlText} \n"
fi

spark-sql --driver-memory 4G \
          --executor-cores 4  \
          --num-executors  4 \
          --executor-memory 8g   \
          --conf spark.dynamicallocation.enabled=true  \
          --conf spark.dynamicAllocation.maxExecutors=15  \
          --conf spark.dynamicAllocation.cachedExecutorIdleTimeout=120 \
          --conf spark.sql.sources.partitionOverwriteMode=dynamic \
          --conf spark.shuffle.service.enabled=true  \
          --conf spark.driver.maxResultSize=12G  \
          --conf spark.sql.broadcastTimeout=3600 \
          --conf spark.executor.memoryOverhead=1G \
          --conf spark.sql.shuffle.partitions=1000 \
          --conf spark.sql.autoBroadcastJoinThreshold=104857600 \
          --hiveconf hive.exec.dynamic.partition=true  \
          --hiveconf hive.exec.dynamic.partition.mode=nonstrict  \
          --hiveconf hive.exec.max.dynamic.partitions.pernode=2000 \
          --queue route \
          --jars hdfs:///scheduler/jms/spark/oracle/ojdbc8-19.3.0.0.jar \
          --name ${oracleTable}_${execData} \
          -e "${sqlText}" || exit -1

# +-------------------------
# |   数据合并
# +-------------------------


now_date=`date  "+%Y-%m-%d %H:%M:%S"`
end_time=30
run_time=`date  "+%Y%m%d%H%M%S"`
echo $run_time
flag_date=`date -d "$end_time minute"  "+%Y%m%d%H%M%S"`
echo $flag_date

# --------sqoop 执行存储过程
# --------异步执行数据同步，大于5min退出，继续执行


set +e
sqoop eval \
--connect jdbc:oracle:thin:@//${oracle_host}:${oracle_port}/${oracle_instance} \
--username ${oracleUser} \
--password ${oraclePassword} \
--query "begin pro_sr_into_ora_save('${oracleTable_tmp}','${oracleTable}','${startDate}','${endDate}','${datacolfield}'); end;" 2> /dev/null &

# 等待上面的任务执行完成
sleep 1m

echo "sqoop异步提交,开始日期:${startDate} 结束日期:${endDate}"

#----------------------------------end-------------------------------

