# -*- coding: utf-8 -*-
# from datetime import timedelta
# from utils.operators.spark_submit import SparkSubmitOperator
#
# jdbcUrl = '{{ var.json.mysql_spmi.db03_spmi }}'
# username = '{{ var.json.mysql_spmi.username }}'
# password = '{{ var.json.mysql_spmi.password }}'
# nowdt = '{{ execution_date | cst_ds }}'
# nextdt = '{{ execution_date | date_add(1) | cst_ds }}'
# table = "spmi_network_relation"
# env = '{{ var.value.env_sync }}'
#
# jsonpara = """{
# "reader":{
# "connect":{
# "url":"jdbcUrlpara",
# "username":"usernamepara",
# "password":"passwordpara",
# "driver":"com.mysql.cj.jdbc.Driver"
# },
# "dbtype":"mysql",
# "tableName":"tablepara",
# "subTableList":"",
# "where":"1=1",
# "query":"",
# "splitColumn":"",
# "equalitySectioning":0,
# "containsnull":0,
# "fetchsize":"1024",
# "threadNumber":0
# },
# "channel":{
# "filterAbnormalCharacter":0
# },
# "writer":{
# "dbtype":"hive",
# "tableName":"tablepara",
# "database":"spmi_ods",
# "writeMode": "overwrite",
# "partitionColumn":"dt",
# "partitionValue":"nowdt"},
# "settting":{
# "env":"envpara"}
# }""".replace("jdbcUrlpara", jdbcUrl).replace("usernamepara", username).replace("passwordpara", password). \
#     replace("nowdt", nowdt).replace("nextdt", nextdt). \
#     replace("tablepara", table). \
#     replace("envpara", env)
#
#
# spmi_ods__spmi_network_relation = SparkSubmitOperator(
#     task_id='spmi_ods__spmi_network_relation',
#     email=['rongguangfan@jtexpress.com','yl_bigdata@yl-scm.com'],
#     name='spmi_ods__spmi_network_relation_{{ execution_date | date_add(1) | cst_ds }}',
#     pool='spmi_transfer',
#     pool_slots=2,
#     execution_timeout=timedelta(hours=2),
#     driver_memory='4G',
#     executor_memory='4G',
#     executor_cores=2,
#     num_executors=2,
#     java_class='com.yunlu.bigdata.jobs.synchrotool.DataSynchDriver',  # spark 主类
#     application='hdfs:///scheduler/jms/spark/sync/mysql/spark_sync.jar',  # spark jar 包
#     application_args=[jsonpara,],
# )
