#!/bin/sh

script_location="/usr/local/airflow/dags/ai_ageing_cusc_hi/dm/dm_complaints_warning_indicator_monitor_hi"

sql_1=$(cat ${script_location}/execute_1.sql \
              | sed 's/execution_date/{{ execution_date | hour_add(6) | cst_hour }}/g' \
              | sed 's/execution_pre_date/{{ execution_date | hour_add(6) | date_add(-1) | cst_hour }}/g')

content_1=$(spark-sql --driver-memory 4G \
          --executor-cores 4  \
          --num-executors 10  \
          --executor-memory 4g   \
          --conf spark.dynamicallocation.enabled=true  \
          --conf spark.dynamicAllocation.maxExecutors=20  \
          --conf spark.dynamicAllocation.cachedExecutorIdleTimeout=120 \
          --conf spark.sql.sources.partitionOverwriteMode=dynamic \
          --conf spark.shuffle.service.enabled=true  \
          --conf spark.driver.maxResultSize=12G  \
          --conf spark.sql.broadcastTimeout=3600 \
          --conf spark.executor.memoryOverhead=1G \
          --conf spark.sql.shuffle.partitions=200 \
          --conf spark.sql.autoBroadcastJoinThreshold=104857600 \
          --hiveconf hive.exec.dynamic.partition=true  \
          --hiveconf hive.exec.dynamic.partition.mode=nonstrict  \
          --hiveconf hive.exec.max.dynamic.partitions.pernode=2000 \
          --queue route \
          --name dm_complaints_warning_indicator_monitor1_hi_{{ execution_date | hour_add(6) | cst_hour }} \
          -e "${sql_1}")

if [ $? -ne 0 ];then
  echo "任务1执行失败~"
  exit 1
else
  echo "任务1执行成功~"
fi

#sql_2=$(cat ${script_location}/execute_2.sql | sed 's/execution_date/{{ execution_date | hour_add(6) | cst_hour }}/g')
#content_2 = `spark-sql --driver-memory 4G \
#          --executor-cores 4  \
#          --num-executors 10  \
#          --executor-memory 4g   \
#          --conf spark.dynamicallocation.enabled=true  \
#          --conf spark.dynamicAllocation.maxExecutors=20  \
#          --conf spark.dynamicAllocation.cachedExecutorIdleTimeout=120 \
#          --conf spark.sql.sources.partitionOverwriteMode=dynamic \
#          --conf spark.shuffle.service.enabled=true  \
#          --conf spark.driver.maxResultSize=12G  \
#          --conf spark.sql.broadcastTimeout=3600 \
#          --conf spark.executor.memoryOverhead=1G \
#          --conf spark.sql.shuffle.partitions=200 \
#          --conf spark.sql.autoBroadcastJoinThreshold=104857600 \
#          --hiveconf hive.exec.dynamic.partition=true  \
#          --hiveconf hive.exec.dynamic.partition.mode=nonstrict  \
#          --hiveconf hive.exec.max.dynamic.partitions.pernode=2000 \
#          --queue route \
#          --name dm_complaints_warning_indicator_monitor2_hi_{{ execution_date | cst_ds }} \
#          -e "${sql_2}"`
#
#if [ $? -ne 0 ];then
#  echo "任务2执行失败~"
#  exit 1
#else
#  echo "任务2执行成功~"
#fi

sql_3=$(cat ${script_location}/execute_3.sql \
              | sed 's/\bexecution_date\b/{{ execution_date | hour_add(6) | cst_hour }}/g' \
              | sed 's/\bexecution_date_yesterday\b/{{ execution_date | hour_add(6) | date_add(-1) | cst_ds }}/g')

content_3=$(spark-sql --driver-memory 4G \
          --executor-cores 4  \
          --num-executors 10  \
          --executor-memory 4g   \
          --conf spark.dynamicallocation.enabled=true  \
          --conf spark.dynamicAllocation.maxExecutors=20  \
          --conf spark.dynamicAllocation.cachedExecutorIdleTimeout=120 \
          --conf spark.sql.sources.partitionOverwriteMode=dynamic \
          --conf spark.shuffle.service.enabled=true  \
          --conf spark.driver.maxResultSize=12G  \
          --conf spark.sql.broadcastTimeout=3600 \
          --conf spark.executor.memoryOverhead=1G \
          --conf spark.sql.shuffle.partitions=200 \
          --conf spark.sql.autoBroadcastJoinThreshold=104857600 \
          --hiveconf hive.exec.dynamic.partition=true  \
          --hiveconf hive.exec.dynamic.partition.mode=nonstrict  \
          --hiveconf hive.exec.max.dynamic.partitions.pernode=2000 \
          --queue route \
          --name dm_complaints_warning_indicator_monitor1_hi_{{ execution_date | hour_add(6) | cst_hour }} \
          -e "${sql_3}")

if [ $? -ne 0 ];then
  echo "任务3执行失败~"
  exit 1
else
  echo "任务3执行成功~"
fi

echo "content_3 is '${content_3}'"
atList=""
high_percent_str=$(echo ${content_3} | sed -r "s/.*高中低分布：(.*?)\.[0-9]{2}%，.*%，.*%/\1/g")
high_percent_number=$((high_percent_str))
echo "high_percent_number is [${high_percent_number}]"
if [ $high_percent_number -ge 60 ];then
  atList="01095754,01087542"
fi
echo "atList is '${atList}'"

#发送飞书消息
url="https://bgdmapi.jtexpress.com.cn/monitor/feishu/send"
alert_url="https://open.feishu.cn/open-apis/bot/v2/hook/e320e6a1-e9d9-42f3-9855-75e3ed7e64c0"

#重点指标监控
/usr/local/anaconda/bin/python3 ${script_location}/dm_complaints_warning_indicator_monitor_hi.py \
    ${url} \
    ${alert_url} \
    "{{ execution_date | hour_add(6) | cst_hour }}批次重点指标报告" \
    ${content_1} \
    "2,1,1,2,1,2" \
    ${content_3} \
    "${atList}"

if [ $? -ne 0 ];then
  echo "任务1飞书告警失败~"
  exit 1
else
  echo "任务1飞书告警成功~"
fi

#非重点指标监控
#/usr/local/anaconda/bin/python3 ${script_location}/dm_complaints_warning_indicator_monitor_hi.py ${alert_url} "{{ execution_date | cst_hour }}批次非重点指标报告" ${content_2} "2,1,1,1,1,1"

#if [ $? -ne 0 ];then
#  echo "任务2飞书告警失败~"
#  exit 1
#else
#  echo "任务2飞书告警成功~"
#fi