
from datetime import timedelta
from jms.ods.mysql.yl_cn_mysql_lmdm_sys_network_electronic_fence import jms_ods__yl_cn_mysql_lmdm_sys_network_electronic_fence
from utils.operators.cluster_for_spark_sql_operator import SparkSqlOperator

jms_dwd__dwd_yl_cn_mysql_lmdm_sys_network_electronic_fence_base_dt = SparkSqlOperator(
    task_id='jms_dwd__dwd_yl_cn_mysql_lmdm_sys_network_electronic_fence_base_dt',
    email= ['rabie.zhuang@jtexpress.com','yl_bigdata@yl-scm.com'],
    master='yarn',
    name='jms_dwd__dwd_yl_cn_mysql_lmdm_sys_network_electronic_fence_base_dt_{{ execution_date | date_add(1) | cst_ds }}',
    sql='jms/dwd/sqs/dwd_yl_cn_mysql_lmdm_sys_network_electronic_fence_base_dt/execute.sql',
    pool_slots=2,
    executor_cores=2 ,
    executor_memory='2G' ,
    num_executors=3 ,
    conf={'spark.dynamicAllocation.enabled'                  : 'true',
          'spark.shuffle.service.enabled'                    : 'true',
          'spark.dynamicAllocation.maxExecutors'             : 4 ,
          'spark.dynamicAllocation.cachedExecutorIdleTimeout': 30,
          'spark.sql.sources.partitionOverwriteMode'         : 'dynamic',
          'spark.executor.memoryOverhead'             : '2G' ,
          },
    yarn_queue='pro',
    execution_timeout=timedelta(minutes=20),
)

jms_dwd__dwd_yl_cn_mysql_lmdm_sys_network_electronic_fence_base_dt << [jms_ods__yl_cn_mysql_lmdm_sys_network_electronic_fence]

