# -*- coding: utf-8 -*-
from time import sleep
from datetime import timedelta, datetime
from utils.operators.spark_submit import SparkSubmitOperator

jdbcUrl = '{{ var.json.oracle_tms_pro.url }}'
username = '{{ var.json.oracle_tms_pro.username }}'
password = '{{ var.json.oracle_tms_pro.password }}'
nowdt = '{{ execution_date | cst_ds }}'
nextdt = '{{ execution_date | date_add(1) | cst_ds }}'
table = "yl_tmsnew_branch_shipment_stop"
env = '{{ var.value.env_sync }}'

jsonpara = """{
"reader":{
"connect":{
"url":"jdbcUrlpara",
"username":"usernamepara",
"password":"passwordpara",
"driver":"oracle.jdbc.driver.OracleDriver"
},
"dbtype":"oracle",
"tableName":"tablepara",
"where":"1=1",
"query":"select id ,shipment_id     ,shipment_no     ,sort_num ,network_id      ,network_code     ,network_name     ,roadsection_id    ,planned_arrival_day   ,planned_arrival_hour  ,planned_arrival_time  ,actual_arrival_time   ,planned_departure_day  ,planned_departure_hour  ,planned_departure_time  ,actual_departure_time  ,stop_time      ,usetime ,to_char(mileage) as mileage ,to_char(speed) as speed  ,batch_time      ,handling_type     ,load_start_time    ,load_end_time     ,unload_start_time   ,unload_end_time    ,app_arrival_time    ,app_departure_time   ,gps_arrival_time    ,gps_departure_time   ,lock_time      ,unlock_time     ,weight_arrival_time   ,weight_departure_time  ,arrival_gross_weight  ,arrival_tare_weight   ,arrival_net_weight   ,departure_gross_weight  ,departure_tare_weight  ,departure_net_weight  ,estimate_arrival_time  ,plan_driving_time   ,actual_driving_time   ,actual_arrival_time_source      ,actual_departure_time_source     ,estimate_batch_time   ,actual_batch_time   ,create_by      ,create_by_name    ,create_time     ,update_by      ,update_by_name    ,update_time     ,simple_name     ,gps_depart_mileage   ,gps_arrival_mileage   ,financial_center_id   ,financial_center_code  ,financial_center_desc  ,clock_time   from yl_tmsnew_branch_shipment_stop",
"splitColumn":"id",
"equalitySectioning":1,
"containsnull":0,
"fetchsize":"1024",
"threadNumber":10
},
"channel":{
"filterAbnormalCharacter":1
},
"writer":{
"dbtype":"hive",
"tableName":"tablepara",
"database":"jms_ods",
"writeMode": "overwrite",
"partitionColumn":"dt",
"partitionValue":"nowdt"},
"settting":{
"env":"envpara"}
}""".replace("jdbcUrlpara", jdbcUrl).replace("usernamepara", username).replace("passwordpara", password). \
    replace("nowdt", nowdt).replace("nextdt", nextdt). \
    replace("tablepara", table). \
    replace("envpara", env)

# from airflow.operators.dummy_operator import DummyOperator
# jms_ods__yl_tmsnew_branch_shipment_stop = DummyOperator(
#     task_id='jms_ods__yl_tmsnew_branch_shipment_stop',
#     email='chenhongping@yl-scm.com',
#     retries=0,
#     priority_weight=0,
#     # sla=timedelta(hours=2),
# )
jms_ods__yl_tmsnew_branch_shipment_stop = SparkSubmitOperator(
    task_id='jms_ods__yl_tmsnew_branch_shipment_stop',
    email=['chenhongping@yl-scm.com','yl_bigdata@yl-scm.com'],
    name='jms_ods__yl_tmsnew_branch_shipment_stop_{{ execution_date | date_add(1) | cst_ds }}',
    pool='oracle_tms',
    pool_slots=3,
    execution_timeout=timedelta(hours=1),
    driver_memory='2G',
    executor_memory='2G',
    executor_cores=2,
    num_executors=5,
    java_class='com.yunlu.bigdata.jobs.synchrotool.DataSynchDriver',  # spark 主类
    application='hdfs:///scheduler/jms/spark/sync/spark_sync.jar',  # spark jar 包
    application_args=[jsonpara],
)
