# -*- coding: utf-8 -*-
from datetime import timedelta

from utils.operators.spark_submit import SparkSubmitOperator

# from jms.dim.dim_network_whole_massage import jms_dim__dim_network_whole_massage
# from jms.dim.tms.dim_tms_branch_delivery_network_base_dt import jms_dim__dim_tms_branch_delivery_network_base_dt
# from jms.dim.tms.dim_tms_main_delivery_network_base_dt import jms_dim__dim_tms_main_delivery_network_base_dt
# from jms.dim.tms.dim_yl_tmsnew_tms_in_warehouse_shi_base_dt import jms_dim__dim_yl_tmsnew_tms_in_warehouse_shi_base_dt
# from jms.dim.tms.dim_yl_tmsnew_tms_out_warehouse_sh_base_dt import jms_dim__dim_yl_tmsnew_tms_out_warehouse_sh_base_dt
# from jms.dim.tms.dim_yl_tms_branch_line_base_dt import jms_dim__dim_yl_tms_branch_line_base_dt
# from jms.dim.tms.dim_yl_tms_branch_time_effective_base_dt import jms_dim__dim_yl_tms_branch_time_effective_base_dt
# from jms.dim.tms.dim_yl_tms_cs_shift_base_dt import jms_dim__dim_yl_tms_cs_shift_base_dt
# from jms.dim.tms.dim_tab_tmsnew_time_effective_base_dt import jms_dim__dim_tab_tmsnew_time_effective_base_dt
# from jms.dim.tms.dim_yl_tmsnew_tms_transfer_shift_base_dt import jms_dim__dim_yl_tmsnew_tms_transfer_shift_base_dt
# from jms.dim.tms.dim_yl_tmsnew_tms_vehicle_line_base_dt import jms_dim__dim_yl_tmsnew_tms_vehicle_line_base_dt
# from jms.dim.tms.dim_yl_tmsnew_tms_vehicle_point_base_dt import jms_dim__dim_yl_tmsnew_tms_vehicle_point_base_dt

dm__dm_route_local_city_tmp1 = SparkSubmitOperator(
    conn_id='spark_route',
    task_id='dm__dm_route_local_city_tmp1',
    pool_slots=6,
    email=['zhangqinglin@jtexpress.com','yl_bigdata@yl-scm.com'],
    name='dm__dm_route_local_city_tmp1',
    driver_memory='20G',
    executor_memory='20G',
    executor_cores=4,
    num_executors=80,
    conf={
        'spark.dynamicAllocation.enabled': 'true',  # 动态资源开启
        'spark.shuffle.service.enabled' : 'true',  # 动态资源 Shuffle 服务开启
        'spark.dynamicAllocation.maxExecutors'  : 80,  # 动态资源最大扩容 Executor 数
        'spark.dynamicAllocation.cachedExecutorIdleTimeout': 60,  # 动态资源自动释放闲置 Executor 的超时时间(s)
        'spark.sql.sources.partitionOverwriteMode' : 'dynamic',  # 允许删改已存在的分区
        'spark.executor.memoryOverhead'  : '4G',  # 堆外内存
        'spark.sql.shuffle.partitions'  : 1000,
        'spark.sql.auto.repartition' :'true'
    },
    jars='hdfs:///route/package/graphframes-0.7.0-spark2.3-s_2.11.jar,hdfs:///route/package/common-1.0-SNAPSHOT.jar',
    java_class='com.yunlu.bigdata.jobs.route.export.LocalCityTraceExport',
    application='hdfs:///route/package/local_city_test/original-jobs-1.0-SNAPSHOT.jar',
    application_args=['{{ execution_date | cst_ds }}'],
    execution_timeout=timedelta(hours=3),
)

# dm__dm_route_local_city_tmp1 << [
#     jms_dim__dim_tms_branch_delivery_network_base_dt,
#     jms_dim__dim_yl_tms_branch_line_base_dt,
#     jms_dim__dim_yl_tms_branch_time_effective_base_dt,
#     jms_dim__dim_network_whole_massage,
#     jms_dim__dim_yl_tms_cs_shift_base_dt,
#     jms_dim__dim_tms_main_delivery_network_base_dt,
#     jms_dim__dim_tab_tmsnew_time_effective_base_dt,
#     jms_dim__dim_yl_tmsnew_tms_transfer_shift_base_dt,
#     jms_dim__dim_yl_tmsnew_tms_vehicle_line_base_dt,
#     jms_dim__dim_yl_tmsnew_tms_in_warehouse_shi_base_dt,
#     jms_dim__dim_yl_tmsnew_tms_out_warehouse_sh_base_dt,
#     jms_dim__dim_yl_tmsnew_tms_vehicle_point_base_dt,
# ]
