#!/bin/bash
#设置任务异常退出
set -eo pipefail
dt='{{ execution_date | cst_ds }}'
name='拼多多渠道代收点汇总'
# 在退出时清理文件
trap "rm -f /tmp/${name}_${dt}.zip /tmp/${name}_${dt}.csv && echo 'removed tmp files'" EXIT
# 创建txt文件
touch /tmp/${name}_${dt}.txt
#将中文字段名输入文件顶部
#将查询结果追加到文件中
{
  echo -e "加盟商名称\t省\t市\t区县\t网点名称\t合作伙伴名称\t存放地址\t运单数量\t统计日期\t">/tmp/${name}_${dt}.txt;
  spark-sql --driver-memory 8G --num-executors 8 --executor-memory 12g --executor-cores 6  --conf spark.executor.memoryOverhead=4g --conf spark.driver.maxResultSize=3g -S -e "
select
     nvl(max(franchisee_name),'空网点') as franchisee_name
    ,nvl(max(provider_desc),'空网点')   as provider_desc
    ,nvl(max(city_desc),'空网点')       as city_desc
    ,nvl(max(area_desc),'空网点')       as area_desc
    ,max(scan_site)                    as scan_site
    ,scan_site_code
    ,partner_name
    ,count(1)                          as bill_cnt
    ,input_time
from (
    select
     t1.waybill_no
    ,t1.scan_site_code
    ,t1.scan_site
    ,t3.franchisee_name
    ,t3.provider_desc
    ,t3.city_desc
    ,t3.area_desc
    ,t1.partner_name
    ,t1.storage_address
    ,'{{ execution_date | cst_ds }}' as input_time
    from (
        select
         waybill_no
        ,scan_type
        ,nvl(scan_site_code,'0000') as scan_site_code
        ,nvl(scan_site,'其他')      as scan_site
        ,input_time
        ,partner_name
        ,storage_address
        from jms_dwd.dwd_tab_barscan_collect_base_dt --代收表
        where dt = '{{ execution_date | cst_ds }}'
        and scan_type = '入库扫描'
    ) t1
    left join (
        select
         waybill_id
        ,order_source_code
        from jms_dwd.dwd_yl_oms_oms_order_dt
        where dt between date_add('{{ execution_date | cst_ds }}', -15) and '{{ execution_date | cst_ds }}'
        and order_source_code='D09'
    ) t2  on t1.waybill_no = t2.waybill_id and waybill_id is not null
    left join jms_dim.dim_sys_network_detail_dt t3
    on t1.scan_site_code = t3.code
    and t3.dt = '{{ execution_date | cst_ds }}'
)
group by scan_site_code,partner_name,input_time
;
 " >>/tmp/${name}_${dt}.txt
}

# 替换分隔符，并输出csv
cat /tmp/${name}_${dt}.txt | tr -s '\t' ',' > /tmp/${name}_${dt}.csv
# 压缩并删除 csv 文件
zip -jrm /tmp/${name}_${dt}.zip /tmp/${name}_${dt}.csv
# 上传文件
/usr/local/python3/bin/python3  /usr/local/airflow/dags/utils/operators/upload_oss.py ${name}/${name}_${dt}.zip /tmp/${name}_${dt}.zip
# zip 文件将在进程退出后被自动清理

#声明文件大小
data_size=$(ls -lh /tmp/${name}_${dt}.zip | awk '{print $5}')
#打印文件信息
Message="${name}_${dt}.zip 上传成功 大小: $data_size"
#配置钉钉告警
curl 'https://oapi.dingtalk.com/robot/send?access_token=816a8f35fd84432ffa6bff9d7714abc322628bb4a8ded86908550d2335aba613' \
   -H 'Content-Type: application/json' \
   -d "
  {\"msgtype\": \"text\",
    \"text\": {
        \"content\": \"监控内容:$Message\"
     }
  }" || true
rm -f  /tmp/${name}_${dt}.txt