# -*- coding: utf-8 -*-
from datetime import timedelta
from utils.operators.spark_submit import SparkSubmitOperator
from airflow.operators.bash import BashOperator
from jms.dwd.dwd_transfer_interrupted_detail import jms_dwd__dwd_transfer_interrupted_detail

oss__transfer_interrupted_detail = SparkSubmitOperator(
    task_id='oss__transfer_interrupted_detail',
    email=['hejian@yl-scm.com','yl_bigdata@yl-scm.com'],
    name='oss__transfer_interrupted_detail',
    pool_slots=2,
    driver_memory='4G',
    executor_memory='4G',
    executor_cores=2,
    num_executors=8,
    # jars='hdfs:///user/airflow/heshouwu/TransferInterrupted/Common_SparkSqlExecute.jar',
    jars='hdfs:///scheduler/jms/spark/hj/TransferInterrupted/Common_SparkSqlExecute.jar',
    java_class='com.yunlu.bigdata.jobs.export.TransferInterruptedNew',
    #application='hdfs:///user/spark/work/dm/hejian/TransferInterruptedNew/Job_TransferInterrupted.jar',
    application='hdfs:///scheduler/jms/spark/hj/TransferInterruptedNew/Job_TransferInterrupted.jar',
    application_args=['{{ execution_date | cst_ds }}'],
    execution_timeout=timedelta(hours=3),
)

upload__transfer_interrupted_detail = BashOperator(
    task_id='upload__transfer_interrupted_detail',
    email=['hejian@yl-scm.com','yl_bigdata@yl-scm.com'],
    bash_command="jms/oss/TransferInterrupted/TransferInterrupted.sh",
    pool='unlimited_pool',
    priority_weight=10,
)

upload__transfer_interrupted_detail << oss__transfer_interrupted_detail << jms_dwd__dwd_transfer_interrupted_detail
