# -*- coding: utf-8 -*-
from datetime import timedelta
#from utils.operators.spark_submit import SparkSubmitOperator
from utils.operators.cluster_for_spark_sql_hook_test_for_airflow_bug_operator import SparkSqlOperator
#from jms.dwd.dwd_ep_create_institution_detail_base import jms_dwd__dwd_ep_create_institution_detail_base
#from jms.dwd.dwd_ep_create_institution_base import jms_dwd__dwd_ep_create_institution_base
#from jms.dwd.tab.dwd_tab_barscan_centerarrival_base_dt import jms_dwd__dwd_tab_barscan_centerarrival_base_dt
#from jms.dwd.tab.dwd_tab_barscan_centersend_base_dt import jms_dwd__dwd_tab_barscan_centersend_base_dt
#from jms.dwd.tab.dwd_tab_barscan_bagging_base_dt import jms_dwd__dwd_tab_barscan_bagging_base_dt
#from jms.dwd.tab.dwd_tab_electronic_package_list_base_dt import jms_dwd__dwd_tab_electronic_package_list_base_dt
#from jms.dwd.dwd_ass_scan_small_upper_base_dt import jms_dwd__dwd_ass_scan_small_upper_base_dt
#from jms.dwd.dwd_warhouse.dwd_wide_unsign_summary_waybill_dt import jms_dwd__dwd_wide_unsign_summary_waybill_dt
#from jms.dim.ide.dim_ide_ep_center_classes_base import jms_dim__dim_ide_ep_center_classes_base
#from jms.dwd.tab.dwd_tab_reback_transfer_express_base import jms_dwd__dwd_tab_reback_transfer_express_base
#from jms.time_sensor.time_after_04_30 import time_after_04_30

#下游7点会下载数据
dm__dm_employee_operate_num_detail = SparkSqlOperator(
    task_id='dm__dm_employee_operate_num_detail',
    pool_slots=1,
    task_concurrency=1,  # 如果任务不支持并发，则将 task_concurrency 设为 1
    name='dm__dm_employee_operate_num_detail_{{ execution_date | date_add(1) | cst_ds }}',
    driver_memory='4G',
    executor_memory='10G',
    executor_cores=4,
    num_executors=50,
    email=['guoruiling@jtexpress.com','yl_bigdata@yl-scm.com'],
    sql='jms_route_test/dm/dm_employee_operate_num_detail/execute.hql',
    conf={'spark.executor.memoryOverhead': 2048,
          'spark.core.connection.ack.wait.timeout': 5000,
          'spark.locality.wait': 60,
          'spark.driver.maxResultSize': 20,
          'spark.dynamicAllocation.enabled': 'false',  # 动态资源开启
          'spark.network.timeout': 300,  #所有网络交互的默认超时时间 默认120秒
          'spark.sql.shuffle.partitions': 500,
          'spark.dynamicAllocation.maxExecutors': 50,  # 动态资源最大扩容 Executor 数
          'spark.executor.extraJavaOptions': '-XX:+UseG1GC -XX:ParallelGCThreads=4',
          },
    hiveconf={'hive.exec.dynamic.partition': 'true',  # 动态分区
              'hive.exec.dynamic.partition.mode': 'nonstrict',
              'hive.exec.max.dynamic.partitions': 10,  # -表示一个动态分区语句可以创建的最大动态分区个数，超出报错
              'hive.exec.max.dynamic.partitions.pernode': 10,  # 默认100，一般可以设置大一点，比如1000。表示每个maper或reducer可以允许创建的最大动态分区个数，默认是100，超出则会报错。

              },
    yarn_queue='pro',
    execution_timeout=timedelta(minutes=90),
)

dm__dm_employee_operate_num_detail << [
    #jms_dwd__dwd_ep_create_institution_detail_base,
    #jms_dwd__dwd_ep_create_institution_base,
    #jms_dwd__dwd_tab_barscan_centerarrival_base_dt,
    #jms_dwd__dwd_tab_barscan_centersend_base_dt,
    #jms_dwd__dwd_tab_barscan_bagging_base_dt,
    #jms_dwd__dwd_tab_electronic_package_list_base_dt,
    #jms_dwd__dwd_ass_scan_small_upper_base_dt,
    #jms_dwd__dwd_wide_unsign_summary_waybill_dt,
    #jms_dim__dim_ide_ep_center_classes_base,
    #jms_dwd__dwd_tab_reback_transfer_express_base,
    #time_after_04_30
]
