# -*- coding: utf-8 -*-
from utils.operators.spark_submit import SparkSubmitOperator

from jms.ods import jms_ods__tab_barscan_deliver, \
    jms_ods__tab_barscan_collect, \
    jms_ods__tab_barscan_centersend, \
    jms_ods__tab_barscan_centerarrival, \
    jms_ods__tab_barscan_sign

jms_dm__agent_point = SparkSubmitOperator(
    task_id='jms_dm__agent_point',
    email=['zhengwenjin@yl-scm.com','yl_bigdata@yl-scm.com'],
    pool_slots=3,
    task_concurrency=1,  # 如果任务不支持并发，则将 task_concurrency 设为 1
    name='jms_dm__agent_point_{{ execution_date | cst_ds }}',  # yarn 任务名称
    driver_memory='2G',
    executor_memory='4G',
    executor_cores=4,
    num_executors=12,
    # jars='hdfs:///user/spark/work/dm/zhengwenjin/route/common-1.0-SNAPSHOT.jar',
    jars='hdfs:///scheduler/jms/spark/zwj/route/common-1.0-SNAPSHOT.jar',
    # application='hdfs:///user/spark/work/dm/zhengwenjin/route/original-jobs-1.0-SNAPSHOT.jar',
    application='hdfs:///scheduler/jms/spark/zwj/route/original-jobs-1.0-SNAPSHOT.jar',
    java_class='com.yunlu.bigdata.jobs.export.ExportAgentPointData',  # spark 主类
    application_args=['{{ execution_date | cst_ds }}'],  # 参数dt 20201226
)

# 设置依赖
jms_dm__agent_point << [
    jms_ods__tab_barscan_deliver,
    jms_ods__tab_barscan_collect,
    jms_ods__tab_barscan_centersend,
    jms_ods__tab_barscan_centerarrival,
    jms_ods__tab_barscan_sign]
