# # -*- coding: utf-8 -*-
# from datetime import timedelta
#
# from jms.dim.dim_network_whole_massage import jms_dim__dim_network_whole_massage
# from jms.dwd.dwd_s03_barscan_operations_hist_dt import jms_dwd__dwd_s03_barscan_operations_hist_dt
# from jms.dwd.tms.dwd_tmsnew_shipment_stop_union_base_dt import jms_dwd__dwd_tmsnew_shipment_stop_union_base_dt
# from jms.dwd.tms.dwd_tmsnew_shipment_union_base_dt import jms_dwd__dwd_tmsnew_shipment_union_base_dt
# from utils.operators.spark_submit import SparkSubmitOperator
# from airflow.operators.dummy_operator import DummyOperator
#
# # 这个程序底层基于运单号进行分组，然后基于运单所有的操作进行排序，此程序对线程数的要求很高，调优主要基于增加核数，增加任务并发
# # 核心调优参数：executor_cores ↑   num_executors ↑   spark.sql.shuffle.partitions ↑  spark.default.parallelism ↑
#
# jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_0110 = SparkSubmitOperator(
#     task_id='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_0110',
#     pool_slots=9,
#     task_concurrency=1,  # 如果任务不支持并发，则将 task_concurrency 设为 1
#     name='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_0110_{{ execution_date | date_add(1) | cst_ds }}',  # yarn 任务名称
#     driver_memory='32G',
#     executor_memory='28G',
#     executor_cores=12,
#     retries=0,
#     num_executors=100,
#     # yarn_queue='pro',
#     email=['rongguangfan@jtexpress.com','yl_bigdata@yl-scm.com'],
#     conf={'spark.executor.memoryOverhead': 3072,
#           'spark.core.connection.ack.wait.timeout': 3000,
#           'spark.default.parallelism': 3600,
#           'spark.sql.shuffle.partitions': 3600,
#           'spark.locality.wait': 60,
#           'spark.network.timeout': 300,
#           'spark.driver.maxResultSize': 20,
#           'spark.driver.cores': 8,
#           'spark.yarn.queue': 'default',
#           'spark.shuffle.service.enabled': 'false',
#           'spark.dynamicAllocation.enabled': 'false',
#           'spark.sql.parquet.compression.codec': 'gzip'
#     },
#     java_class='com.yunlu.bigdata.jobs.udaf.YlBarscanToTraceNew',  # spark 主类
#     application='hdfs:///scheduler/jms/spark/dwd_warhouse/dwd_wide_rank_basic_scaninfo_tms_dt/jobs-1.0-SNAPSHOT.jar',
#     # spark jar 包
#     application_args=['{{ execution_date | cst_ds }}', '9', '0'],
#     # 参数dt 2020-10-26
#     execution_timeout=timedelta(hours=2),
# )
#
# jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_1120 = SparkSubmitOperator(
#     task_id='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_1120',
#     pool_slots=9,
#     task_concurrency=1,  # 如果任务不支持并发，则将 task_concurrency 设为 1
#     name='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_1120_{{ execution_date | date_add(1) | cst_ds }}',  # yarn 任务名称
#     driver_memory='32G',
#     executor_memory='28G',
#     executor_cores=12,
#     retries=0,
#     num_executors=100,
#     email=['rongguangfan@jtexpress.com','yl_bigdata@yl-scm.com'],
#     conf={'spark.executor.memoryOverhead': 3072,
#           'spark.core.connection.ack.wait.timeout': 3000,
#           'spark.default.parallelism': 3600,
#           'spark.sql.shuffle.partitions': 3600,
#           'spark.locality.wait': 60,
#           'spark.network.timeout': 300,
#           'spark.driver.maxResultSize': 20,
#           'spark.driver.cores': 8,
#           'spark.yarn.queue': 'default',
#           'spark.shuffle.service.enabled': 'false',
#           'spark.dynamicAllocation.enabled': 'false',
#           'spark.sql.parquet.compression.codec': 'gzip'
#           },
#     java_class='com.yunlu.bigdata.jobs.udaf.YlBarscanToTraceNew',  # spark 主类
#     application='hdfs:///scheduler/jms/spark/dwd_warhouse/dwd_wide_rank_basic_scaninfo_tms_dt/jobs-1.0-SNAPSHOT.jar',
#     # spark jar 包
#     application_args=['{{ execution_date | cst_ds }}', '19', '10'],
#     # 参数dt 2020-10-26
#     execution_timeout=timedelta(hours=2),
# )
#
# jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2130 = SparkSubmitOperator(
#     task_id='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2130',
#     pool_slots=9,
#     task_concurrency=1,  # 如果任务不支持并发，则将 task_concurrency 设为 1
#     name='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2130_{{ execution_date | date_add(1) | cst_ds }}',  # yarn 任务名称
#     driver_memory='32G',
#     executor_memory='28G',
#     executor_cores=12,
#     retries=0,
#     num_executors=100,
#     email=['rongguangfan@jtexpress.com','yl_bigdata@yl-scm.com'],
#     conf={'spark.executor.memoryOverhead': 3072,
#           'spark.core.connection.ack.wait.timeout': 3000,
#           'spark.default.parallelism': 3600,
#           'spark.sql.shuffle.partitions': 3600,
#           'spark.locality.wait': 60,
#           'spark.network.timeout': 300,
#           'spark.driver.maxResultSize': 20,
#           'spark.driver.cores': 8,
#           'spark.yarn.queue': 'default',
#           'spark.shuffle.service.enabled': 'false',
#           'spark.dynamicAllocation.enabled': 'false',
#           'spark.sql.parquet.compression.codec': 'gzip'
#           },
#     java_class='com.yunlu.bigdata.jobs.udaf.YlBarscanToTraceNew',  # spark 主类
#     application='hdfs:///scheduler/jms/spark/dwd_warhouse/dwd_wide_rank_basic_scaninfo_tms_dt/jobs-1.0-SNAPSHOT.jar',
#     # spark jar 包
#     application_args=['{{ execution_date | cst_ds }}', '29', '20'],
#     # 参数dt 2020-10-26
#     execution_timeout=timedelta(hours=2),
# )
#
# # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_3140 = SparkSubmitOperator(
# #     task_id='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_3140',
# #     pool_slots=9,
# #     task_concurrency=1,  # 如果任务不支持并发，则将 task_concurrency 设为 1
# #     name='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_3140_{{ execution_date | date_add(1) | cst_ds }}',  # yarn 任务名称
# #     driver_memory='32G',
# #     executor_memory='28G',
# #     executor_cores=12,
# #     retries=0,
# #     num_executors=100,
# #     email=['rongguangfan@jtexpress.com','yl_bigdata@yl-scm.com'],
# #     conf={'spark.executor.memoryOverhead': 3072,
# #           'spark.core.connection.ack.wait.timeout': 3000,
# #           'spark.default.parallelism': 3600,
# #           'spark.sql.shuffle.partitions': 3600,
# #           'spark.locality.wait': 60,
# #           'spark.network.timeout': 300,
# #           'spark.driver.maxResultSize': 20,
# #           'spark.driver.cores': 8,
# #           'spark.yarn.queue': 'default',
# #           'spark.shuffle.service.enabled': 'false',
# #           'spark.dynamicAllocation.enabled': 'false',
# #           'spark.sql.parquet.compression.codec': 'gzip'
# #           },
# #     java_class='com.yunlu.bigdata.jobs.udaf.YlBarscanToTraceNew',  # spark 主类
# #     application='hdfs:///scheduler/jms/spark/dwd_warhouse/dwd_wide_rank_basic_scaninfo_tms_dt/jobs-1.0-SNAPSHOT.jar',
# #     # spark jar 包
# #     application_args=['{{ execution_date | cst_ds }}', '39', '30'],
# #     # 参数dt 2020-10-26
# #     execution_timeout=timedelta(hours=2),
# # )
#
# # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_4150 = SparkSubmitOperator(
# #     task_id='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_4150',
# #     pool_slots=9,
# #     task_concurrency=1,  # 如果任务不支持并发，则将 task_concurrency 设为 1
# #     name='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_4150_{{ execution_date | date_add(1) | cst_ds }}',  # yarn 任务名称
# #     driver_memory='15G',
# #     executor_memory='20G',
# #     executor_cores=10,
# #     retries=0,
# #     num_executors=80,
# #     email=['rongguangfan@jtexpress.com','yl_bigdata@yl-scm.com'],
# #     conf={'spark.executor.memoryOverhead': 3072,
# #           'spark.core.connection.ack.wait.timeout': 3000,
# #           'spark.default.parallelism': 3600,
# #           'spark.sql.shuffle.partitions': 3600,
# #           'spark.locality.wait': 60,
# #           'spark.network.timeout': 300,
# #           'spark.driver.maxResultSize': 20,
# #           'driver.cores': 14,
# #           'spark.yarn.queue': 'default',
# #           'spark.shuffle.service.enabled': 'false',
# #           'spark.dynamicAllocation.enabled': 'false',
# #           'spark.sql.parquet.compression.codec': 'gzip'
# #           },
# #     java_class='com.yunlu.bigdata.jobs.udaf.YlBarscanToTraceNew',  # spark 主类
# #     application='hdfs:///scheduler/jms/spark/dwd_warhouse/dwd_wide_rank_basic_scaninfo_tms_dt/jobs-1.0-SNAPSHOT.jar',
# #     # spark jar 包
# #     application_args=['{{ execution_date | cst_ds }}', '44', '40'],
# #     # 参数dt 2020-10-26
# #     execution_timeout=timedelta(hours=2),
# # )
# #
# # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2049 = SparkSubmitOperator(
# #     task_id='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2049',
# #     pool_slots=9,
# #     task_concurrency=1,  # 如果任务不支持并发，则将 task_concurrency 设为 1
# #     name='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2049_{{ execution_date | date_add(1) | cst_ds }}',  # yarn 任务名称
# #     driver_memory='8G',
# #     executor_memory='20G',
# #     executor_cores=6,
# #     retries=0,
# #     num_executors=50,
# #     email=['rongguangfan@jtexpress.com','yl_bigdata@yl-scm.com'],
# #     conf={'spark.executor.memoryOverhead': 3072,
# #           'spark.core.connection.ack.wait.timeout': 3000,
# #           'spark.default.parallelism': 1200,
# #           'spark.sql.shuffle.partitions': 1200,
# #           'spark.locality.wait': 60,
# #           'spark.network.timeout': 300,
# #           'spark.driver.maxResultSize': 20,
# #           'driver.cores': 14,
# #           'spark.yarn.queue': 'default',
# #           },
# #     java_class='com.yunlu.bigdata.jobs.udaf.YlBarscanToTraceNew2049',  # spark 主类
# #     application='hdfs:///scheduler/jms/spark/dwd_warhouse/dwd_wide_rank_basic_scaninfo_tms_dt/jobs-1.0-SNAPSHOT.jar',
# #     # spark jar 包
# #     application_args=['{{ execution_date | cst_ds }}', '9', '0'],
# #     # 参数dt 2020-10-26
# #     execution_timeout=timedelta(hours=2),
# # )
#
# jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_a15 = DummyOperator(
#     task_id='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_a15',
#     email=['rongguangfan@jtexpress.com','yl_bigdata@yl-scm.com'],
#     retries=0,
#     priority_weight=0,
# )
# jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_b15 = DummyOperator(
#     task_id='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_b15',
#     email=['rongguangfan@jtexpress.com','yl_bigdata@yl-scm.com'],
#     retries=0,
#     priority_weight=0,
# )
# jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt = DummyOperator(
#     task_id='jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt',
#     email=['rongguangfan@jtexpress.com','yl_bigdata@yl-scm.com'],
#     retries=0,
#     priority_weight=0,
# )
#
# jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_0110  << [
#     jms_dwd__dwd_s03_barscan_operations_hist_dt,
#     jms_dim__dim_network_whole_massage,
#     jms_dwd__dwd_tmsnew_shipment_stop_union_base_dt,
#     jms_dwd__dwd_tmsnew_shipment_union_base_dt,
# ]
#
# jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_1120  << [
#     jms_dwd__dwd_s03_barscan_operations_hist_dt,
#     jms_dim__dim_network_whole_massage,
#     jms_dwd__dwd_tmsnew_shipment_stop_union_base_dt,
#     jms_dwd__dwd_tmsnew_shipment_union_base_dt,
# ]
#
# jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2130  << [
#     jms_dwd__dwd_s03_barscan_operations_hist_dt,
#     jms_dim__dim_network_whole_massage,
#     jms_dwd__dwd_tmsnew_shipment_stop_union_base_dt,
#     jms_dwd__dwd_tmsnew_shipment_union_base_dt,
# ]
#
# # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_3140  << [
# #     jms_dwd__dwd_s03_barscan_operations_hist_dt,
# #     jms_dim__dim_network_whole_massage,
# #     jms_dwd__dwd_tmsnew_shipment_stop_union_base_dt,
# #     jms_dwd__dwd_tmsnew_shipment_union_base_dt,
# # ]
#
# # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_4150  << [
# #     jms_dwd__dwd_s03_barscan_operations_hist_dt,
# #     jms_dim__dim_network_whole_massage,
# #     jms_dwd__dwd_tmsnew_shipment_stop_union_base_dt,
# #     jms_dwd__dwd_tmsnew_shipment_union_base_dt,
# # ]
# #
# # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2049  << [
# #     jms_dwd__dwd_s03_barscan_operations_hist_dt,
# #     jms_dim__dim_network_whole_massage,
# #     jms_dwd__dwd_tmsnew_shipment_stop_union_base_dt,
# #     jms_dwd__dwd_tmsnew_shipment_union_base_dt,
# # ]
#
# jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_a15 << [
#     jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_0110,
#     jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_1120,
#     jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2130,
#     # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_3140,
#     # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_4150,
#     # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2049,
# ]
#
# jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_b15 << [
#     jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_0110,
#     jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_1120,
#     jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2130,
#     # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_3140,
#     # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_4150,
#     # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2049,
# ]
#
# jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt << [
#     jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_0110,
#     jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_1120,
#     jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2130,
#     # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_3140,
#     # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_4150,
#     # jms_dwd__dwd_wide_rank_basic_scaninfo_tms_dt_2049,
# ]