# -*- coding: utf-8 -*-
from datetime import timedelta
from utils.operators.spark_submit import SparkSubmitOperator

jdbcUrl='{{ var.json.oracle_bigdata_19c50.url }}'
username='{{ var.json.oracle_bigdata_19c50.username }}'
password='{{ var.json.oracle_bigdata_19c50.password }}'
nowdt='{{ execution_date | cst_ds }}'
nextdt='{{ execution_date | date_add(1) | cst_ds }}'
table="yl_oms_order_delivery_record"
env='{{ var.value.env_sync }}'

jsonpara = """{
"reader":{
"connect":{
"url":"jdbcUrlpara",
"username":"usernamepara",
"password":"passwordpara",
"driver":"oracle.jdbc.driver.OracleDriver"
},
"dbtype":"oracle",
"tableName":"tablepara",
"where":"
(input_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss') and input_time<to_date('nowdt 08:00:00', 'yyyy-mm-dd hh24:mi:ss')) ;
(input_time>=to_date('nowdt 08:00:00', 'yyyy-mm-dd hh24:mi:ss') and input_time<to_date('nowdt 09:00:00', 'yyyy-mm-dd hh24:mi:ss')) ;
(input_time>=to_date('nowdt 09:00:00', 'yyyy-mm-dd hh24:mi:ss') and input_time<to_date('nowdt 09:30:00', 'yyyy-mm-dd hh24:mi:ss')) ;
(input_time>=to_date('nowdt 09:30:00', 'yyyy-mm-dd hh24:mi:ss') and input_time<to_date('nowdt 10:00:00', 'yyyy-mm-dd hh24:mi:ss')) ;
(input_time>=to_date('nowdt 10:00:00', 'yyyy-mm-dd hh24:mi:ss') and input_time<to_date('nowdt 11:00:00', 'yyyy-mm-dd hh24:mi:ss')) ;
(input_time>=to_date('nowdt 11:00:00', 'yyyy-mm-dd hh24:mi:ss') and input_time<to_date('nowdt 12:00:00', 'yyyy-mm-dd hh24:mi:ss')) ;
(input_time>=to_date('nowdt 12:00:00', 'yyyy-mm-dd hh24:mi:ss') and input_time<to_date('nowdt 13:00:00', 'yyyy-mm-dd hh24:mi:ss')) ;
(input_time>=to_date('nowdt 13:00:00', 'yyyy-mm-dd hh24:mi:ss') and input_time<to_date('nowdt 14:00:00', 'yyyy-mm-dd hh24:mi:ss')) ;
(input_time>=to_date('nowdt 14:00:00', 'yyyy-mm-dd hh24:mi:ss') and input_time<to_date('nowdt 15:00:00', 'yyyy-mm-dd hh24:mi:ss')) ;
(input_time>=to_date('nowdt 15:00:00', 'yyyy-mm-dd hh24:mi:ss') and input_time<to_date('nowdt 16:00:00', 'yyyy-mm-dd hh24:mi:ss')) ;
(input_time>=to_date('nowdt 16:00:00', 'yyyy-mm-dd hh24:mi:ss') and input_time<to_date('nowdt 17:00:00', 'yyyy-mm-dd hh24:mi:ss')) ;
(input_time>=to_date('nowdt 17:00:00', 'yyyy-mm-dd hh24:mi:ss') and input_time<to_date('nowdt 20:00:00', 'yyyy-mm-dd hh24:mi:ss')) ;
(input_time>=to_date('nowdt 20:00:00', 'yyyy-mm-dd hh24:mi:ss') and input_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')) ;
(input_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-1 and input_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-1) and ((update_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss') and update_time<to_date('nowdt 15:00:00', 'yyyy-mm-dd hh24:mi:ss'))) ;
(input_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-1 and input_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-1) and ((update_time>=to_date('nowdt 15:00:00', 'yyyy-mm-dd hh24:mi:ss') and update_time<to_date('nowdt 20:00:00', 'yyyy-mm-dd hh24:mi:ss'))) ;
(input_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-1 and input_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-1) and ((update_time>=to_date('nowdt 20:00:00', 'yyyy-mm-dd hh24:mi:ss') and update_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss'))) ;
(input_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-2 and input_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-2) and ((update_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss') and update_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss'))) ;
(input_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-3 and input_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-3) and ((update_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss') and update_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss'))) ;
(input_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-6 and input_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-4) and ((update_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss') and update_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss'))) ;
(input_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-12 and input_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-7) and ((update_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss') and update_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss'))) ;
(input_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-25 and input_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-13) and ((update_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss') and update_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss'))) ;
(input_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-38 and input_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-26) and ((update_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss') and update_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss'))) ;
(input_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss')-39) and ((update_time>=to_date('nowdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss') and update_time<to_date('nextdt 00:00:00', 'yyyy-mm-dd hh24:mi:ss'))) ;
",
"query":"",
"splitColumn":"",
"equalitySectioning":0,
"containsnull":0,
"fetchsize":"1024",
"threadNumber":-1
},
"channel":{
"filterAbnormalCharacter":1
},
"writer":{
"dbtype":"hive",
"tableName":"tablepara",
"database":"jms_ods",
"writeMode": "overwrite",
"partitionColumn":"dt",
"partitionValue":"nowdt"},
"settting":{
"env":"envpara"}
}""".replace("jdbcUrlpara", jdbcUrl).replace("usernamepara", username).replace("passwordpara", password). \
    replace("nowdt", nowdt).replace("nextdt", nextdt). \
    replace("tablepara", table). \
    replace("envpara", env)

from airflow.operators.dummy_operator import DummyOperator

# jms_ods__yl_oms_order_delivery_record = DummyOperator(
#     task_id='jms_ods__yl_oms_order_delivery_record',
#     email='chenhongping@yl-scm.com',
#     retries=0,
#     priority_weight=0,
#     # sla=timedelta(hours=2),
# )

jms_ods__yl_oms_order_delivery_record = SparkSubmitOperator(
    task_id='jms_ods__yl_oms_order_delivery_record',
    name='jms_ods__yl_oms_order_delivery_record_{{ execution_date | date_add(1) | cst_ds }}',
    email=['yl_etl@yl-scm.com','yl_bigdata@yl-scm.com'],
    pool='oracle_tab',
    pool_slots=6,
    execution_timeout=timedelta(minutes=80),
    driver_memory='2G',
    executor_memory='2G',
    executor_cores=2,
    num_executors=2,
    retries=1,
    conf={'spark.dynamicAllocation.enabled'                  : 'true',
          'spark.shuffle.service.enabled'                    : 'true',
          'spark.dynamicAllocation.maxExecutors'             : 5,
          'spark.dynamicAllocation.cachedExecutorIdleTimeout': 30,
          'spark.sql.sources.partitionOverwriteMode'         : 'dynamic',
          'spark.executor.memoryOverhead'                    : '1G',
          },
    java_class='com.yunlu.bigdata.jobs.synchrotool.DataSynchDriver',  # spark 主类
    application='hdfs:///user/hive/work/sparksync/spark_sync.jar',  # spark jar 包
    application_args=[jsonpara, ],
)
