# -*- coding: utf-8 -*-
from datetime import timedelta
from utils.operators.spark_submit import SparkSubmitOperator

from utils.alerts.yl_threeSegCodeOnFailue import yl_threeSegCodeOnFailure
from utils.alerts.yl_threeSegCodeOnSuccess import yl_threeSegCodeOnSuccess
from ..tab_barscan_collect__yestoday import jms_ods__tab_barscan_collect__yestoday

jdbcUrl='{{ var.json.oracle_bigdata_19c51.url }}'
username='{{ var.json.oracle_bigdata_19c51.username }}'
password='{{ var.json.oracle_bigdata_19c51.password }}'
nowdt='{{ execution_date | cst_ds }}'
nextdt='{{ execution_date | date_add(1) | cst_ds }}'
table="tab_barscan_collect"
env='{{ var.value.env_sync }}'


# 由于该表在oracle端；数据更新完成需要到1点半，故1点半后开始同步

jsonpara="""{
"reader":{
"connect":{
"url":"jdbcUrlpara",
"username":"usernamepara",
"password":"passwordpara",
"driver":"oracle.jdbc.driver.OracleDriver"
},
"dbtype":"oracle",
"tableName":"tablepara",
"where":"inputtime>=to_date('nowdt 00:00:00','yyyy-mm-dd hh24:mi:ss') and inputtime<to_date('nextdt 00:00:00','yyyy-mm-dd hh24:mi:ss')",
"query":"",
"splitColumn":"billcode",
"equalitySectioning":1,
"containsnull":0,
"fetchsize":"1000",
"threadNumber":5
},
"channel":{
"filterAbnormalCharacter":0
},
"writer":{
"connect":{
"url":"jdbc:oracle:thin:@10.33.130.100:1521:orcl",
"username":"test_bi",
"password":"123456",
"driver":""
},
"dbtype":"hive",
"tableName":"tablepara",
"database":"jms_ods",
"writeMode": "overwrite",
"partitionColumn":"dt",
"partitionValue":"nowdt"},
"settting":{
"env":"envpara"}
}""".replace("jdbcUrlpara",jdbcUrl).replace("usernamepara",username).replace("passwordpara",password). \
    replace("nowdt",nowdt).replace("nextdt",nextdt). \
    replace("tablepara",table). \
    replace("envpara",env)

def kwargs():
    kwargs = {
        "db": "jms_ods",
        "table": "tab_barscan_collect",
        "desc": "ods层collect表",
        "taskid": "10180",
        "ifprivacy": 1,
        "warnignore": 0,
    }
    return kwargs

jms_ods__tab_barscan_collect = SparkSubmitOperator(
    task_id='jms_ods__tab_barscan_collect',
    email=['chenhongping@yl-scm.com','yl_bigdata@yl-scm.com'],
    name='jms_ods__tab_barscan_collect_{{ execution_date | date_add(1) | cst_ds }}',
    pool='oracle_tab',
    pool_slots=1,
    execution_timeout=timedelta(hours=2),
    driver_memory='2G',
    executor_memory='3G',
    executor_cores=2,
    num_executors=1,
    conf={'spark.dynamicAllocation.enabled'                  : 'true',
          'spark.shuffle.service.enabled'                    : 'true',
          'spark.dynamicAllocation.maxExecutors'             : 6,
          'spark.dynamicAllocation.cachedExecutorIdleTimeout': 30,
          'spark.sql.sources.partitionOverwriteMode'         : 'dynamic',
          'spark.executor.memoryOverhead'                    : '1G',
          },
    java_class='com.yunlu.bigdata.jobs.synchrotool.DataSynchDriver',  # spark 主类
    application='hdfs:///scheduler/jms/spark/sync/spark_sync.jar',  # spark jar 包
    application_args=[jsonpara,],
    sla=timedelta(hours=2),
    on_success_callback=yl_threeSegCodeOnSuccess(kwargs(), dingding_conn_id="dingding_ThreeSeg_etl_info"),
    on_failure_callback=yl_threeSegCodeOnFailure(kwargs(), dingding_conn_id="dingding_ThreeSeg_etl_alert"),
)

jms_ods__tab_barscan_collect << jms_ods__tab_barscan_collect__yestoday



# from utils.operators.spark_sql_operator import SparkSqlOperator
# from datetime import timedelta
# from jms.ods.tab.tab_barscan_collect_check import jms_ods__tab_barscan_collect_check
# from utils.alerts.yl_threeSegCodeOnFailue import yl_threeSegCodeOnFailure
# from utils.alerts.yl_threeSegCodeOnSuccess import yl_threeSegCodeOnSuccess
#
# def kwargs():
#     kwargs = {
#         "db": "jms_ods",
#         "table": "tab_barscan_collect",
#         "desc": "ods层collect表",
#         "taskid": "10180",
#         "ifprivacy": 1,
#         "warnignore": 0,
#     }
#     return kwargs
#
# jms_ods__tab_barscan_collect = SparkSqlOperator(
#     task_id='jms_ods__tab_barscan_collect',
#     email=['rongguangfan@jtexpress.com', 'yl_bigdata@yl-scm.com'],
#     master='yarn',
#     name='jms_ods__tab_barscan_collect_{{ execution_date | date_add(1) | cst_ds }}',
#     sql='jms/ods/tab/tab_barscan_collect/execute.sql',
#     pool_slots=2,
#     driver_memory='2G',
#     driver_cores=2,
#     executor_cores=2,
#     executor_memory='3G',
#     num_executors=2,
#     conf={'spark.dynamicAllocation.enabled'                  : 'true',
#           'spark.shuffle.service.enabled'                    : 'true',
#           'spark.dynamicAllocation.maxExecutors'             : 32,
#           'spark.dynamicAllocation.cachedExecutorIdleTimeout': 30,
#           'spark.sql.sources.partitionOverwriteMode'         : 'dynamic',
#           'spark.executor.memoryOverhead'                    : '1G',
#           },
#     hiveconf={'hive.exec.dynamic.partition'             : 'true',  # 动态分区
#               'hive.exec.dynamic.partition.mode'        : 'nonstrict',
#               'hive.exec.max.dynamic.partitions'        : 300,  # 最大分区
#               'hive.exec.max.dynamic.partitions.pernode': 300,  # 最大分区
#               },
#     yarn_queue='pro',
#     execution_timeout=timedelta(minutes=30),
#     on_success_callback=yl_threeSegCodeOnSuccess(kwargs(), dingding_conn_id="dingding_ThreeSeg_etl_info"),
#     on_failure_callback=yl_threeSegCodeOnFailure(kwargs(), dingding_conn_id="dingding_ThreeSeg_etl_alert"),
# )
#
# jms_ods__tab_barscan_collect << [jms_ods__tab_barscan_collect_check]

