# -*- coding: utf-8 -*-
from datetime import timedelta
from jms.time_sensor import time_after_01_30
from utils.operators.cluster_for_spark_sql_operator import SparkSqlOperator
from utils.alerts.yl_threeSegCodeOnFailue import yl_threeSegCodeOnFailure
from utils.alerts.yl_threeSegCodeOnSuccess import yl_threeSegCodeOnSuccess
def kwargs():
    kwargs = {
        "db": "ai_dwd",
        "table": "yl_threesegcode_pdd_waybill",
        "desc": "PDD流量运单数据",
        "taskid": "10080",
        "ifprivacy": 1,
        "warnignore": 0,
    }
    return kwargs

jms_ai_dwd__yl_threesegcode_pdd_waybill = SparkSqlOperator(
    task_id='jms_ai_dwd__yl_threesegcode_pdd_waybill',
    email=['yushuo@jtexpress.com','yl_bigdata@yl-scm.com'],
    pool_slots=4,
    # depends_on_past=True,  # 如果任务依赖于前一天的同名任务，则将 depends_on_past 设为 True
    name='jms_ai_dwd__yl_threesegcode_pdd_waybill_{{ execution_date | date_add(1) | cst_ds }}',  # yarn 任务名称
    email_on_retry=True,
    retries=1,
    sql='jms/aigroup/ai_dwd/yl_threesegcode_pdd_waybill/execute.sql',
    yarn_queue='pyspark',
    driver_memory='8G',
    driver_cores=2,
    #executor_memory='12G',
    executor_memory='4G',
    #executor_cores=6,
    executor_cores=3,
    pool='unlimited_pool',
    #num_executors=20,  # spark.dynamicAllocation.enabled 为 True 时，num_executors 表示最少 Executor 数
    num_executors=5,
    conf={'spark.executor.memoryOverhead': '5G',
          'spark.dynamicAllocation.enabled': 'true',
          'spark.shuffle.service.enabled': 'true',
          #'spark.dynamicAllocation.maxExecutors': 30,
          'spark.dynamicAllocation.maxExecutors'             : 6,
          'spark.sql.sources.partitionOverwriteMode': 'dynamic',
          'spark.dynamicallocation.enabled': 'true',
          'spark.dynamicAllocation.cachedExecutorIdleTimeout': 120,
          'spark.sql.shuffle.partitions': 400,
          'spark.port.maxRetries': 100
          },
    hiveconf={'hive.exec.dynamic.partition': 'true',
              'hive.exec.dynamic.partition.mode': 'nonstrict',
              'hive.exec.max.dynamic.partitions.pernode': 5000,
              'hive.exec.max.dynamic.partitions': 30000
              },
    execution_timeout=timedelta(hours=2),
    priority_weight=20,
    on_success_callback=yl_threeSegCodeOnSuccess(kwargs(), dingding_conn_id="dingding_ThreeSeg_etl_info"),
    on_failure_callback=yl_threeSegCodeOnFailure(kwargs(), dingding_conn_id="dingding_ThreeSeg_etl_alert"),
)

# 设置依赖
jms_ai_dwd__yl_threesegcode_pdd_waybill << [
    time_after_01_30]
