#!/bin/sh

script_location="/usr/local/airflow/dags/jms_dm_todo/dm/dm_frm_network_summary_monitor_dt"
execution_date={{ execution_date | cst_ds }}
sql_1=$(cat ${script_location}/execute_1.sql)
content_1=$(spark-sql --driver-memory 4G \
          --executor-cores 4  \
          --num-executors 10  \
          --executor-memory 4g   \
          --conf spark.driver.maxResultSize=8G  \
          --conf spark.executor.memoryOverhead=1G \
          --conf spark.sql.shuffle.partitions=200 \
          --queue route \
          --name jms_dm__dm_frm_network_summary_monitor_dt_$execution_date \
          -d execution_date=$execution_date \
          -e "${sql_1}")

if [ $? -ne 0 ];then
  echo "任务1执行失败~"
  exit 1
else
  echo "任务1执行成功~"
fi

echo ${content_1}

post_url="https://bgdmapi.jtexpress.com.cn/monitor/feishu/send"
#发送飞书消息 自己
alert_url="https://open.feishu.cn/open-apis/bot/v2/hook/b0938a0a-8ceb-4884-8ab7-1d02ca779365"
#发送飞书消息 兔do监控群
alert_url_pro="https://open.feishu.cn/open-apis/bot/v2/hook/af9b272b-85e0-4a10-9433-5bc3ad0dc055"

set -e
#重点指标监控
/usr/local/anaconda/bin/python3 ${script_location}/python_monitor_feishu.py ${post_url} "兔do数据质量" ${content_1} "2,1,1,1,1,1,1,1,1,1,1,1" ${alert_url}
/usr/local/anaconda/bin/python3 ${script_location}/python_monitor_feishu.py ${post_url} "兔do数据质量" ${content_1} "2,1,1,1,1,1,1,1,1,1,1,1" ${alert_url_pro}

if [ $? -ne 0 ];then
  echo "任务1飞书告警失败~"
  exit 1
else
  echo "任务1飞书告警成功~"
fi
set +e