# -*- coding: utf-8 -*-


from jms.aigroup.ai_dwd.cn_mysql_lmdm_sys_network_electronic_fence_change import \
    jms_ai_dwd__cn_mysql_lmdm_sys_network_electronic_fence_change

from jms.ods import jms_ods__yl_cn_mysql_lmdm_sys_network, jms_ods__yl_oms_oms_waybill, \
    jms_ods__yl_oms_interceptorpiece, jms_ods__tab_end_piece,jms_ods__tab_barscan_difficult
from utils.alerts.yl_threeSegCodeOnFailue import yl_threeSegCodeOnFailure
from utils.alerts.yl_threeSegCodeOnSuccess import yl_threeSegCodeOnSuccess
from utils.operators.spark_submit import SparkSubmitOperator




# def kwargs():
#     kwargs = {
#         "db": "ai_dwd",
#         "table": "yl_ml_clean_address_day",
#         "desc": "全国日更新地址数据",
#         "taskid": "10040",
#         "ifprivacy": 0,
#         "warnignore": 0,
#     }
#     return kwargs

jms_ai_dwd__yl_ml_clean_address_day_tmp = SparkSubmitOperator(
    task_id='jms_ai_dwd__yl_ml_clean_address_day_tmp',
    conn_id='spark_default',
    email=['yushuo@jtexpress.com','yl_bigdata@yl-scm.com'],
    pool_slots=4,
    # depends_on_past=True,  # 如果任务依赖于前一天的同名任务，则将 depends_on_past 设为 True
    task_concurrency=1,  # 如果任务不支持并发，则将 task_concurrency 设为 1
    name='jms_ai_dwd__yl_ml_clean_address_day_tmp_{{ execution_date | date_add(1) | cst_ds }}',  # yarn 任务名称
    driver_memory='4G',
    executor_memory='8G',
    executor_cores=5,
    num_executors=10,
    conf={'spark.executor.memoryOverhead': 2048,
          'spark.core.connection.ack.wait.timeout': 300,
          'spark.default.parallelism': 600,
          'spark.locality.wait': 60,
          },
    hiveconf={'hive.exec.dynamic.partition': 'true',  # 动态分区
              'hive.exec.dynamic.partition.mode': 'nonstrict',
              'hive.exec.max.dynamic.partitions': 100000,  # 每天生成 3 个分区
              'hive.exec.max.dynamic.partitions.pernode': 3,  # 每天生成 3 个分区
              },
    java_class='com.yunlu.bigdata.jobs.ml.CleanAddressForMLUD',  # spark 主类
    # application='hdfs:///user/spark/work/aigroup/yl_ml_clean_address_day/jobs-1.0-SNAPSHOT-jar-with-dependencies.jar',  # spark jar 包
    application='hdfs:///scheduler/jms/spark/lyx/ml/yl_ml_clean_address_day/jobs-1.0-SNAPSHOT-jar-with-dependencies.jar',
    # spark jar 包
    application_args=['{{ execution_date | cst_ds_nodash }}'],  # 参数dt 20201026
    # on_success_callback=yl_threeSegCodeOnSuccess(kwargs(), dingding_conn_id="dingding_ThreeSeg_etl_info"),
    # on_failure_callback=yl_threeSegCodeOnFailure(kwargs(), dingding_conn_id="dingding_ThreeSeg_etl_alert"),

)

# 设置依赖
jms_ai_dwd__yl_ml_clean_address_day_tmp << [
    jms_ods__yl_oms_oms_waybill,
    jms_ai_dwd__cn_mysql_lmdm_sys_network_electronic_fence_change,
    jms_ods__yl_cn_mysql_lmdm_sys_network,
    jms_ods__yl_oms_interceptorpiece,
    jms_ods__tab_end_piece,
    jms_ods__tab_barscan_difficult]
