#!/bin/bash
#设置任务异常退出
set -eo pipefail
dt='{{ execution_date | cst_ds }}'
name='菜鸟多多补推出仓汇总'
# 在退出时清理文件
trap "rm -f /tmp/${name}_${dt}.zip /tmp/${name}_${dt}.csv && echo 'removed tmp files'" EXIT
# 创建txt文件
touch /tmp/${name}_${dt}.txt
#将中文字段名输入文件顶部
#将查询结果追加到文件中
{
  echo -e "网点\t日期\t网点业务量\t菜鸟驿站补推量\t多多代收点补推量\t菜鸟驿站最早补推量\t多多代收点最早补推量\t菜鸟最早补推占比\t多多最早补推占比\t">/tmp/${name}_${dt}.txt;
  spark-sql --driver-memory 8G --num-executors 8 --executor-memory 12g --executor-cores 6  --conf spark.executor.memoryOverhead=4g --conf spark.driver.maxResultSize=3g -S -e "
  with tmp_deliver as (
      select
      *
      from (
          select
              *
             ,row_number() over(partition by waybill_no,scan_site_code,pistol_id order by scan_time) as rn
          from (
              select
                 waybill_no  --运单编号_ef
                ,scan_site   --扫描网点_ef
                ,scan_site_code
                ,scan_time   --扫描时间_ef
                ,case when pistol_id = 'HTKY' then '菜鸟扫描'
                      when pistol_id = 'DDGP20220907001' then '多多扫描'
                      else '网点扫描'
                 end as pistol_id --巴枪ID_ef
              from jms_dwd.dwd_tab_barscan_deliver_base_dt
              where dt >= date_add('{{ execution_date |  cst_ds }}',-3) and dt <= '{{ execution_date |  cst_ds }}'
              and date(scan_time) = '{{ execution_date |  cst_ds }}'
              and network_type = 4
          ) tmp
      ) tmp where rn = 1
  )

  ,tmp_result as (
      select
          max(scan_site) as scan_site,
          '{{ execution_date |  cst_ds }}' as wangdian_scan_time,
          count(distinct waybill_no) as cnt_total,   --总量
          sum(case when cainiao_scan_time is not null then 1 else 0 end) as cainiao_cnt,  --菜鸟驿站补推量
          sum(case when duoduo_scan_time is not null then 1 else 0 end) as duoduo_cnt,  --多多补推量
          sum(case when cainiao_scan_time is not null and wangdian_scan_time is not null and cainiao_scan_time < wangdian_scan_time then 1
                   when wangdian_scan_time is null and cainiao_scan_time is not null then 1
              else 0 end
             ) as cainiao_erly_cnt,  --菜鸟驿站最早补推量
          sum(case when duoduo_scan_time is not null and wangdian_scan_time is not null and duoduo_scan_time < wangdian_scan_time  then 1
                   when wangdian_scan_time is null and duoduo_scan_time is not null then 1
              else 0 end
             ) as duoduo_erly_cnt  --多多最早补推量
      from (
          select
               waybill_no
              ,scan_site_code
              ,max(scan_site) as scan_site
              ,max(case when pistol_id = '网点扫描' then scan_time else null end ) as wangdian_scan_time --网点扫描时间
              ,max(case when pistol_id = '菜鸟扫描' then scan_time else null end ) as cainiao_scan_time  --菜鸟扫描时间
              ,max(case when pistol_id = '多多扫描' then scan_time else null end ) as duoduo_scan_time   --多多扫描时间
          from tmp_deliver
          group by waybill_no,scan_site_code
      ) tmp group by scan_site_code
  )

  select
      scan_site,
      wangdian_scan_time,
      cnt_total,
      cainiao_cnt,
      duoduo_cnt,
      cainiao_erly_cnt,
      duoduo_erly_cnt,
      cainiao_erly_cnt/cnt_total as cainiao_erly_cnt_rate,
      duoduo_erly_cnt/cnt_total as duoduo_erly_cnt_rate
  from tmp_result
  ;
 " >>/tmp/${name}_${dt}.txt
}

# 替换分隔符，并输出csv
cat /tmp/${name}_${dt}.txt | tr -s '\t' ',' > /tmp/${name}_${dt}.csv
# 压缩并删除 csv 文件
zip -jrm /tmp/${name}_${dt}.zip /tmp/${name}_${dt}.csv
# 上传文件
/usr/local/python3/bin/python3  /usr/local/airflow/dags/utils/operators/upload_oss.py ${name}/${name}_${dt}.zip /tmp/${name}_${dt}.zip
# zip 文件将在进程退出后被自动清理

#声明文件大小
data_size=$(ls -lh /tmp/${name}_${dt}.zip | awk '{print $5}')
#打印文件信息
Message="${name}_${dt}.zip 上传成功 大小: $data_size"
#配置钉钉告警
curl 'https://oapi.dingtalk.com/robot/send?access_token=816a8f35fd84432ffa6bff9d7714abc322628bb4a8ded86908550d2335aba613' \
   -H 'Content-Type: application/json' \
   -d "
  {\"msgtype\": \"text\",
    \"text\": {
        \"content\": \"监控内容:$Message\"
     }
  }" || true
rm -f  /tmp/${name}_${dt}.txt