# -*- coding: utf-8 -*-
from utils.operators.spark_submit import SparkSubmitOperator
from datetime import timedelta

from jms.ods.tab.tab_barscan_taking import jms_ods__tab_barscan_taking
from jms.ods.tab.tab_barscan_warehousing import jms_ods__tab_barscan_warehousing
from jms.ods.tab.tab_barscan_unloading import jms_ods__tab_barscan_unloading
from jms.ods.tab.tab_barscan_loading import jms_ods__tab_barscan_loading
from jms.ods.tab.tab_barscan_bagging import jms_ods__tab_barscan_bagging
from jms.ods.tab.tab_barscan_sitesend import jms_ods__tab_barscan_sitesend
from jms.ods.tab.tab_barscan_centersend import jms_ods__tab_barscan_centersend
from jms.ods.tab.tab_barscan_sitearrival import jms_ods__tab_barscan_sitearrival
from jms.ods.tab.tab_barscan_centerarrival import jms_ods__tab_barscan_centerarrival
from jms.ods.tab.tab_barscan_deliver import jms_ods__tab_barscan_deliver
from jms.ods.tab.tab_barscan_sign import jms_ods__tab_barscan_sign
from jms.ods.tab.tab_barscan_collect import jms_ods__tab_barscan_collect

jms_dwd__dwd_scan_detail_dt = SparkSubmitOperator(
    task_id='jms_dwd__dwd_scan_detail_dt',
    email=['suning@jtexpress.com','yl_bigdata@yl-scm.com'],
    name='jms_dwd__dwd_scan_detail_dt_{{ execution_date | date_add(1) | cst_ds }}',
    conn_id='spark_route',
    pool_slots=5,
    driver_memory='4G',
    driver_cores=2,
    executor_memory='16G',
    executor_cores=6,
    num_executors=100,
    conf={'spark.sql.shuffle.partitions': 600,
          'spark.executor.memoryOverhead': 2048,
          'spark.core.connection.ack.wait.timeout': 300,
          'spark.default.parallelism': 600,
          'spark.locality.wait': 60,
          },
    hiveconf={'hive.exec.dynamic.partition': 'true',
              'hive.exec.dynamic.partition.mode': 'nonstrict',
              'hive.exec.max.dynamic.partitions': 300,
              'hive.exec.max.dynamic.partitions.pernode': 300,
              },
    jars='hdfs:///scheduler/jms/spark/chk/report/dynamic_route_data/common-1.0-SNAPSHOT.jar',  # 依赖 jar 包
    java_class='com.yunlu.bigdata.jobs.report.ScanDetailData',  # spark 主类
    application='hdfs:///scheduler/jms/spark/chk/report/dynamic_route_data/original-jobs-1.0-SNAPSHOT.jar',
    # spark jar 包
    application_args=['{{ execution_date  | date_add(1)| cst_ds }}'],  # 参数dt 跑数据当天
    execution_timeout=timedelta(hours=4),
)

# 设置依赖
jms_dwd__dwd_scan_detail_dt << [
    jms_ods__tab_barscan_taking,
    jms_ods__tab_barscan_warehousing,
    jms_ods__tab_barscan_sitesend,
    jms_ods__tab_barscan_centersend,
    jms_ods__tab_barscan_sitearrival,
    jms_ods__tab_barscan_centerarrival,
    jms_ods__tab_barscan_unloading,
    jms_ods__tab_barscan_loading,
    jms_ods__tab_barscan_bagging,
    jms_ods__tab_barscan_deliver,
    jms_ods__tab_barscan_sign,
    jms_ods__tab_barscan_collect,
]
