# -*- coding: utf-8 -*-


from datetime import timedelta
from jms.aigroup.ai_dwd.cn_mysql_lmdm_sys_network_electronic_fence_change import \
    jms_ai_dwd__cn_mysql_lmdm_sys_network_electronic_fence_change
from jms.ods import jms_ods__yl_cn_mysql_lmdm_sys_network, jms_ods__yl_oms_oms_waybill, \
    jms_ods__yl_oms_interceptorpiece, jms_ods__tab_end_piece, jms_ods__yl_oms_oms_order, jms_ods__tab_barscan_difficult
from jms.ods.tab.tab_barscan_deliver import jms_ods__tab_barscan_deliver
from jms.ods.lmdm.yl_lmdm_sys_tail_code import jms_ods__yl_lmdm_sys_tail_code
from jms.ods.lmdm.yl_lmdm_sys_tail_code_staff import jms_ods__yl_lmdm_sys_tail_code_staff

from utils.operators.cluster_for_spark_sql_operator import SparkSqlOperator

# def kwargs():
#     kwargs = {
#         "db": "ai_dwd",
#         "table": "yl_ml_clean_address_day",
#         "desc": "全国日更新地址数据",
#         "taskid": "10040",
#         "ifprivacy": 0,
#         "warnignore": 0,
#     }
#     return kwargs

ai_dw__yl_ml_clean_address_day = SparkSqlOperator(
    task_id='ai_dw__yl_ml_clean_address_day',
    email=['yushuo@jtexpress.com','yl_bigdata@yl-scm.com'],
    conn_id='spark_default',
    pool_slots=4,
    task_concurrency=1,  # 如果任务不支持并发，则将 task_concurrency 设为 1
    name='ai_dw__yl_ml_clean_address_day_{{ execution_date | date_add(1) | cst_ds }}',
    email_on_retry=True,
    sql='jms/aigroup/ai_dw/yl_ml_clean_address_day/execute.sql',
    yarn_queue='pyspark',
    driver_memory='4G',
    executor_memory='16G',
    executor_cores=4,
    num_executors=10,  # spark.dynamicAllocation.enabled 为 True 时，num_executors 表示最少 Executor 数
    conf={'spark.executor.memoryOverhead': '4G',
          'spark.dynamicAllocation.enabled': 'true',
          'spark.shuffle.service.enabled': 'true',
          #'spark.dynamicAllocation.maxExecutors': 100,
          'spark.dynamicAllocation.maxExecutors': 27,
          'spark.sql.sources.partitionOverwriteMode': 'dynamic',
          'spark.dynamicallocation.enabled': 'true',
          'spark.dynamicAllocation.cachedExecutorIdleTimeout': 120,
          'spark.sql.shuffle.partitions': 600
          },
    hiveconf={'hive.exec.dynamic.partition': 'true',
              'hive.exec.dynamic.partition.mode': 'nonstrict',
              'hive.exec.max.dynamic.partitions.pernode': 5000,
              'hive.exec.max.dynamic.partitions': 30000
              },
    execution_timeout=timedelta(hours=3),
    # on_success_callback=yl_threeSegCodeOnSuccess(kwargs(), dingding_conn_id="dingding_ThreeSeg_etl_info"),
    # on_failure_callback=yl_threeSegCodeOnFailure(kwargs(), dingding_conn_id="dingding_ThreeSeg_etl_alert"),
    #sla=timedelta(hours=4),
)

# 设置依赖
ai_dw__yl_ml_clean_address_day << [
    jms_ods__yl_oms_oms_waybill,
    jms_ods__yl_oms_oms_order,
    jms_ai_dwd__cn_mysql_lmdm_sys_network_electronic_fence_change,
    jms_ods__yl_cn_mysql_lmdm_sys_network,
    jms_ods__yl_oms_interceptorpiece,
    jms_ods__tab_end_piece,
    jms_ods__tab_barscan_difficult,
    jms_ods__tab_barscan_deliver,
    jms_ods__yl_lmdm_sys_tail_code,
    jms_ods__yl_lmdm_sys_tail_code_staff,
]
