#!/bin/sh

# 用于处理hive数据同步导Oracle
BASE_DIR="$(cd "$(dirname "$0")"; pwd)"
echo -e "INFO - Base Directory: ${BASE_DIR} \n"

# +-------------------------
# |   Oracle配置
# +-------------------------
#Oracle库名
oracle_db="jms_bigdata_report"
#Oracle临时表名
oracle_tb_tmp="dm_weight_flow_daily_summary_mt_tmp"
#Oracle目标表名
oracle_tb="dm_weight_flow_daily_summary_mt"

#主机
oracle_host="pro-bigdatareport-2023-readwrite-ora.yl.com"
#端口
oracle_port="1521"
#用户名
oracle_user="jms_bigdata_report"
#密码
oracle_pass="H15DCte03YPW3B2pEA"

hiveDataBase="jms_dm"
HiveTableName="dm_weight_flow_daily_summary_mt"
oracleTable="dm_weight_flow_daily_summary_mt"

oracleUrl="jdbc:oracle:thin:@//pro-bigdatareport-2023-readwrite-ora.yl.com:1521/orcl"
oracleUser="jms_bigdata_report"
oraclePassword="H15DCte03YPW3B2pEA"

# +-------------------------
# |   数据导入
# +-------------------------
sqlText="
  CREATE OR REPLACE TEMPORARY VIEW jdbcTable
    USING org.apache.spark.sql.jdbc
    OPTIONS (
      url 'jdbc:oracle:thin:@//${oracle_host}:${oracle_port}/orcl',
      dbtable '${oracle_tb_tmp}',
      driver 'oracle.jdbc.driver.OracleDriver',
      user '${oracle_user}',
      password '${oracle_pass}',
      isolationLevel 'NONE',
      batchsize 1000,
      truncate true
    );
  INSERT OVERWRITE TABLE jdbcTable SELECT statistic_date,customer_code,customer_name,send_agent_id,send_agent_code,send_agent_name,send_network_code,send_network_name,receiver_province_id,receiver_province_name,segment_flag,waybill_num,customer_fee,cost_weight,sales_price,piece_fee_cost,transfer_cost,operation_cost,center_packge_cost,distribu_depart_cost,distribu_opera_cost,distribu_packge_cost,duration_whole,signed_num,update_time,market_staff_code,market_staff_name,staff_belong_type,customer_source_id FROM jms_dm.dm_weight_flow_daily_summary_mt WHERE dt = '{{ execution_date | cst_ds }}';
"

if [ $? -ne 0 ];then
  echo -e "ERROR - 数据导入SQL加载失败 \n"
  exit -1
else
  echo -e "INFO - Execution Sql: ${sqlText} \n"
fi

spark-sql --driver-memory 10G \
          --executor-cores 4  \
          --num-executors 10  \
          --executor-memory 4g   \
          --conf spark.dynamicallocation.enabled=true  \
          --conf spark.dynamicAllocation.maxExecutors=20  \
          --conf spark.dynamicAllocation.cachedExecutorIdleTimeout=120 \
          --conf spark.sql.sources.partitionOverwriteMode=dynamic \
          --conf spark.shuffle.service.enabled=true  \
          --conf spark.driver.maxResultSize=12G  \
          --conf spark.sql.broadcastTimeout=3600 \
          --conf spark.executor.memoryOverhead=1G \
          --conf spark.sql.shuffle.partitions=200 \
          --conf spark.sql.autoBroadcastJoinThreshold=104857600 \
          --hiveconf hive.exec.dynamic.partition=true  \
          --hiveconf hive.exec.dynamic.partition.mode=nonstrict  \
          --hiveconf hive.exec.max.dynamic.partitions.pernode=2000 \
          --queue route \
          --jars hdfs:///scheduler/jms/spark/oracle/ojdbc8-19.3.0.0.jar \
          --name ${oracle_tb}_{{ execution_date | cst_ds }} \
          -e "${sqlText}" || exit -1

# +-------------------------
# |   数据合并
# +-------------------------

now_date=`date  "+%Y-%m-%d %H:%M:%S"`
end_time=40
run_time=`date  "+%Y%m%d%H%M%S"`
echo $run_time
flag_date=`date -d "$end_time minute"  "+%Y%m%d%H%M%S"`
echo $flag_date

# --------sqoop 执行存储过程
# --------异步执行数据同步，大于5min退出，继续执行


set +e
sqoop eval \
--connect jdbc:oracle:thin:@//${oracle_host}:${oracle_port}/orcl \
--username ${oracle_user} \
--password ${oracle_pass} \
--query "begin pro_merge_temp_to_result('${oracle_tb_tmp}','${oracle_tb}','{{ execution_date | add_month(-3) | cst_month_dash }}-01 00:00:00','{{ execution_date | cst_month_dash }}-01 23:59:59'); end;" 2> /dev/null &


# 等待上面的任务执行完成
sleep 1m

# 检查存储过程执行状态

check_result_sql="
select case when failed_reason is not null then 2
            when duration is null then 0
            when instr(description,'开始时间:'||to_char(start_time,'yyyy-mm-dd'))>0 then 1
            else 0 end status
from
(
        select a.*,
               row_number() over(partition by a.procedure_name order by a.createdate desc,a.start_time asc) as rn
        from tab_job_monitor a
        where upper(a.procedure_name) = upper('pro_merge_temp_to_result')
              and createdate>= trunc(sysdate)
              and description like '%dm_weight_flow_daily_summary_mt_tmp%'
) t
where rn =1
"
echo $check_result_sql

while (( run_time <= flag_date ))
do
   echo "进入循环"
   flag=`sqoop eval --connect jdbc:oracle:thin:@//${oracle_host}:${oracle_port}/orcl --username ${oracle_user} --password ${oracle_pass} --query "$check_result_sql" | awk '{gsub(/-/, "");print}' | sed ''/^$/d'' |awk 'NR >3 {print}' | grep -o '[0-9]' `

   echo '存储过程结果:' $flag
   if((flag==1));then
    echo "正常退出,数据同步成功,succeed!"
    exit 0
   elif((flag==2));then
    echo "异常退出,数据同步失败,failed!"
    exit 1
   elif((flag==0));then
    echo '未执行完成：开始睡眠'
    sleep 1m
    run_time=`date  "+%Y%m%d%H%M%S"`
    echo $run_time
   else
    echo "存储过程无执行记录，请检查存储过程"
    break
   fi
done

#----------------------------------end-------------------------------