# -*- coding: utf-8 -*-
from datetime import timedelta
from time import time
import pendulum
from utils.operators.rich_sql_sensor import RichSqlSensor
from jms.dm.dm_mng_complaint_daliy_mth_exclude_mt import jms_dm__dm_mng_complaint_daliy_mth_exclude_mt

from airflow.exceptions import AirflowSkipException
cst = pendulum.timezone('Asia/Shanghai')
class BranchRichSqlSensor(RichSqlSensor):

    def pre_execute(self, context):
        day = cst.convert(context['ti'].execution_date) + timedelta(days=1)

        schedule_date = ['03']

        if day.strftime('%d') not in schedule_date:
            print(f'{day.strftime("%d")} not in {schedule_date}, should skip')
            super().pre_execute(context)
            raise AirflowSkipException()
        else:
            print(f'{day.strftime("%d")} in {schedule_date}, run now')
            super().pre_execute(context)


#doris表名
doris_table = "dm_sqs_workorder_report_exclude_detail_upt"
#label名
label = f"{doris_table}_{int(time())}"
#hive中表名
hive_table = "dm_sqs_workorder_report_exclude_detail_upt"
#hdfs文件路径
hdfs_path ="hdfs://{{var.value.hadoop_namespace}}/dw/hive/jms_dm.db/external/"
timeout = timedelta(hours=2).seconds
#判断当天分区是否存在
file_path = f'"{hdfs_path}{hive_table}/dt={{{{ execution_date | cst_ds }}}}/*"'
partition_lists = []
path_lists = []
for i in range(0, 10):
    if i == 0:
        partition_lists.append("p{{ execution_date | cst_ds_nodash }}")
        path_lists.append(file_path)
    else:
        partition_lists.append("p{{ execution_date | date_add(-" + str(i) + ") | cst_ds_nodash }}")
        path_lists.append(f'"{hdfs_path}{hive_table}/dt={{{{ execution_date | date_add(-{i}) | cst_ds }}}}/*"')

partition_str = ",".join(partition_lists)
path_str = ",".join(path_lists[::-1])
doris_jms_dm__dm_sqs_workorder_report_exclude_detail_upt_mth = BranchRichSqlSensor(
    task_id='doris_jms_dm__dm_sqs_workorder_report_exclude_detail_upt_mth',
    pool='broker_load_pool',
    email=['jimmyk.ji@jtexpress.com','yl_bigdata@yl-scm.com'],
    task_concurrency=1,
    conn_id='doris',
    pre_sql=f"""
                 TRUNCATE TABLE jms_dm.{doris_table} PARTITION ({partition_str});
                 LOAD LABEL jms_dm.{label} (
                     DATA INFILE({path_str})
                     INTO TABLE {doris_table}
                     FORMAT AS 'PARQUET'
                 )
                 WITH BROKER '{{{{ var.json.doris_brokers | random_choice }}}}'
                 PROPERTIES ('timeout'='{timeout}', 'max_filter_ratio'='0.0')""",
    poke_sql=f"SHOW LOAD FROM jms_dm WHERE label = '{label}' ORDER BY CreateTime DESC LIMIT 1",
    sql_on_kill=f"CANCEL LOAD FROM jms_dm WHERE LABEL = '{label}'",
    success=lambda r: (r[2] == 'FINISHED' or ( 'No source file in this table' in str(r[7]) )),
    failure=lambda r: (r[2] is not None and r[2] == 'CANCELLED' and ( 'No source file in this table' not in str(r[7]) ), str(r[7])),
    poke_interval=60,
    execution_timeout=timedelta(seconds=timeout + 120), )

doris_jms_dm__dm_sqs_workorder_report_exclude_detail_upt_mth << [jms_dm__dm_mng_complaint_daliy_mth_exclude_mt]
        
        
        