#!/bin/bash

set -e

# 用于处理hive数据同步导oracle

#hive配置
hiveDataBase="jms_dm"
HiveTableName="dm_cusc_entity_customer_network_province_kg_summary_day_dt"

datacolfield="date_time"
dueDayNum="14"
dueDayNum2="10"
execData1="{{ execution_date | cst_ds }}"

startDate=`date -d "${execData1} -${dueDayNum} days" "+%Y-%m-%d"`
execData=`date -d "${execData1} -${dueDayNum2} days" "+%Y-%m-%d"`

echo ${startDate}
echo ${execData}

# +-------------------------
# |   Oracle配置
# +-------------------------
#Oracle库名
oracle_db="jms_bigdata_report"
#Oracle临时表名
oracleTable_tmp="dm_cusc_entity_customer_network_province_kg_summary_day_dt_tmp"
#Oracle目标表名
oracleTable="dm_cusc_entity_customer_network_province_kg_summary_day_dt"
#url连接
oracleUrl="jdbc:oracle:thin:@//pro-bigdatareport-2023-readwrite-ora.yl.com:1521/orcl"
#主机
oracle_host="pro-bigdatareport-2023-readwrite-ora.yl.com"
#端口
oracle_port="1521"
#实例
oracle_instance="orcl"
#用户名
oracleUser="jms_bigdata_report"
#密码
oraclePassword="H15DCte03YPW3B2pEA"
viewname="dm_cusc_entity_customer_network_province_kg_summary_day_dt_airflow_view"

# +-------------------------
# |   数据导入
# +-------------------------

sqlText="
CREATE TEMPORARY VIEW ${viewname}
    USING org.apache.spark.sql.jdbc
    OPTIONS (
      url 'jdbc:oracle:thin:@//${oracle_host}:${oracle_port}/${oracle_instance}',
      dbtable '${oracleTable_tmp}',
      driver 'oracle.jdbc.driver.OracleDriver',
      user '${oracleUser}',
      password '${oraclePassword}',
      isolationLevel 'NONE',
      batchsize 1000,
      truncate true
    );

INSERT overwrite TABLE ${viewname}
SELECT customer_entity_code
      ,customer_entity_name
      ,customer_network_code
      ,customer_network_name
      ,customer_agent_code
      ,customer_agent_name
      ,customer_virt_code
      ,customer_virt_name
      ,customer_code
      ,customer_name
      ,receiver_province_id
      ,receiver_province_name
      ,kg_part
      ,kg_part_two
      ,weight_total
      ,waybill_num
      ,customer_waybill_num
      ,entity_waybill_num
      ,date_time
      ,year_month
      ,year_week
      ,week_first_day
      ,month_fst_date
      ,update_time
  from jms_dm.dm_cusc_entity_customer_network_province_kg_summary_day_dt
 where dt>='${startDate}' and dt<='${execData}'
;
"
echo "${sqlText}"

if [ $? -ne 0 ];then
  echo -e "ERROR - 数据导入SQL加载失败 \n"
  exit -1
else
  echo -e "INFO - Execution Sql: ${sqlText} \n"
fi

spark-sql --driver-memory 10G \
          --executor-cores 4  \
          --num-executors  10 \
          --executor-memory 10g   \
          --conf spark.dynamicallocation.enabled=true  \
          --conf spark.dynamicAllocation.maxExecutors=15  \
          --conf spark.dynamicAllocation.cachedExecutorIdleTimeout=120 \
          --conf spark.sql.sources.partitionOverwriteMode=dynamic \
          --conf spark.shuffle.service.enabled=true  \
          --conf spark.driver.maxResultSize=12G  \
          --conf spark.sql.broadcastTimeout=3600 \
          --conf spark.executor.memoryOverhead=1G \
          --conf spark.sql.shuffle.partitions=180 \
          --conf spark.sql.autoBroadcastJoinThreshold=104857600 \
          --hiveconf hive.exec.dynamic.partition=true  \
          --hiveconf hive.exec.dynamic.partition.mode=nonstrict  \
          --hiveconf hive.exec.max.dynamic.partitions.pernode=2000 \
          --queue route \
          --jars hdfs:///scheduler/jms/spark/oracle/ojdbc8-19.3.0.0.jar \
          --name ${oracleTable}_${execData} \
          -e "${sqlText}" || exit -1

# +-------------------------
# |   数据合并
# +-------------------------


now_date=`date  "+%Y-%m-%d %H:%M:%S"`
end_time=70
run_time=`date  "+%Y%m%d%H%M%S"`
echo $run_time
flag_date=`date -d "$end_time minute"  "+%Y%m%d%H%M%S"`
echo $flag_date

# --------sqoop 执行存储过程
# --------异步执行数据同步，大于5min退出，继续执行


set +e
sqoop eval \
--connect jdbc:oracle:thin:@//${oracle_host}:${oracle_port}/${oracle_instance} \
--username ${oracleUser} \
--password ${oraclePassword} \
--query "begin pro_sr_into_ora_save_n('${oracleTable_tmp}','${oracleTable}','${startDate}','${execData}','${datacolfield}'); end;" 2> /dev/null &

# 等待上面的任务执行完成
sleep 1m

# 检查存储过程执行状态

check_result_sql="
select case when failed_reason is not null then 2
            when duration is null then 0
            when NVL(duration,0) >0 then 1
            else null end status
from
(
        select a.*,
               row_number() over(partition by a.procedure_name order by a.createdate desc) as rn
        from tab_job_monitor a
        where upper(a.procedure_name) = upper('pro_sr_into_ora_save_n')
              and createdate>= trunc(sysdate)
              and description like '%"${oracleTable}"%'
) t
where rn =1
"
echo $check_result_sql

while (( run_time <= flag_date ))
do
   echo "进入循环"
   flag=`sqoop eval --connect jdbc:oracle:thin:@//${oracle_host}:${oracle_port}/${oracle_instance} --username ${oracleUser} --password ${oraclePassword} --query "$check_result_sql" | awk '{gsub(/-/, "");print}' | sed ''/^$/d'' |awk 'NR >3 {print}' | grep -o '[0-9]' `

   echo '存储过程结果:' $flag
   if((flag==1));
     then break
    elif((flag==2));
     then
     echo '调用失败'
     break
    elif((flag==0));
    then echo "执行中"
    else echo "存储过程无执行记录，请检查存储过程"
     break
   fi
   echo '未执行完成：开始睡眠'
   sleep 1m
   run_time=`date  "+%Y%m%d%H%M%S"`
   echo $run_time
done

if((flag==1));
 then
      echo "正常退出,数据同步成功,succeed!"
      exit 0
 else
      echo "异常退出,数据同步失败,failed!"
      exit 1
fi

#----------------------------------end-------------------------------

