# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

import asyncio
import datetime
from typing import Any, Dict, Tuple
from datetime import datetime, timedelta

from airflow.triggers.base import BaseTrigger, TriggerEvent
from airflow.utils import timezone
import re
import logging
import time
import datetime
from utils.operators.my_sql_conn import MyPymysqlPool


def get_pymysql_data(sql: str) -> str:
    """
    python3 获取mysql数据库数据
    """
    import pymysql
    import json
    from contextlib import closing
    from airflow.models import Variable
    airflow_customize_config = json.loads(Variable.get(key='airflow_metastore', default_var=''))
    conn = pymysql.connect(host=airflow_customize_config.get("host"),
                           user=airflow_customize_config.get("username"),
                           password=airflow_customize_config.get("password"),
                           port=3306,  # 端口
                           database=airflow_customize_config.get("db"),
                           charset='utf8')
    with closing(conn.cursor()) as cur:
        cur.execute(sql)
        result = cur.fetchall()
        return result[0][0]

class TaskStatusTrigger(BaseTrigger):
    """
    A trigger that fires exactly once, at the given datetime, give or take
    a few seconds.

    The provided datetime MUST be in UTC.
    """

    def __init__(self, external_dag_id: str,
                 external_task_id: str,
                 external_execution_date_time: str,
                 execution_delta: int,
                 execution_delta_time_unit: str,
                 allowed_states: str,):
        self.external_dag_id = external_dag_id
        self.external_task_id = external_task_id
        self.external_execution_date_time = external_execution_date_time
        self.execution_delta = execution_delta
        self.execution_delta_time_unit = execution_delta_time_unit
        self.allowed_states = allowed_states

    def serialize(self) -> Tuple[str, Dict[str, Any]]:
        return ("utils.operators.asyn_trigger.TaskStatusTrigger", {"external_dag_id": self.external_dag_id,
                                                        "external_task_id": self.external_task_id,
                                                        "external_execution_date_time": self.external_execution_date_time,
                                                        "execution_delta": self.execution_delta,
                                                        "execution_delta_time_unit": self.execution_delta_time_unit,
                                                        "allowed_states": self.allowed_states})

    async def run(self):
        """
        Simple time delay loop until the relevant time is met.

        We do have a two-phase delay to save some cycles, but sleeping is so
        cheap anyway that it's pretty loose. We also don't just sleep for
        "the number of seconds until the time" in case the system clock changes
        unexpectedly, or handles a DST change poorly.
        """
        # 先做时间校验，如果非时间格式，直接终止流程
        while True:
            mysql_pool = MyPymysqlPool("airflow_metastore")

            try:
                self.log.info("-------------------开始检查上游dag任务状态-------------------")
                self.log.info("上游dag信息为 - external_dag_id: %s"
                              "| external_task_id: %s "
                              "| external_execution_date_time: %s "
                              "| execution_delta: %s"
                              "| allowed_states: %s"
                              "| execution_delta_time_unit: %s", self.external_dag_id, self.external_task_id
                              , self.external_execution_date_time, self.execution_delta, self.allowed_states,
                              self.execution_delta_time_unit)

                # get_pymysql_data("select * from airflow.dag_run where execution_date = '2022-09-14 09:54:00' and dag_id = 'ExternalTaskSensorExampleUpStream'")

                import datetime
                
                external_execution_date_time = self.external_execution_date_time
                execution_delta_time_unit = self.execution_delta_time_unit
                execution_delta = self.execution_delta
                b = re.findall(":", external_execution_date_time)
                formatted_date_time = ''
                if len(b) >= 2:
                    time.strptime(external_execution_date_time, "%Y-%m-%d %H:%M:%S")
                    # time.strptime(check_date[ck_field], "Y年%m-%d %H:%M:%S")
                    origin_date_time = datetime.datetime.strptime(external_execution_date_time, '%Y-%m-%d %H:%M:%S')
                    if execution_delta_time_unit == "days":
                        formatted_date_time = (origin_date_time + timedelta(days=execution_delta)).strftime(
                            "%Y-%m-%d %H:%M:%S")
                    elif execution_delta_time_unit == "hours":
                        formatted_date_time = (origin_date_time + timedelta(hours=execution_delta)).strftime(
                            "%Y-%m-%d %H:%M:%S")
                    self.log.info(
                        "开始查询数据库 dag_run :" + "select run_id from airflow.dag_run where date_format(execution_date,'%Y-%m-%d %H:%i:%s') = '"
                        + formatted_date_time + "' and dag_id = '" + self.external_dag_id + "' limit 1 ")

                    run_id = mysql_pool.getOne(
                        "select run_id from airflow.dag_run where date_format(execution_date,'%Y-%m-%d %H:%i:%s') = '"
                        + formatted_date_time + "' and dag_id = '" + self.external_dag_id + "' limit 1 ")['run_id'].decode('utf-8')

                elif 0 < len(b) < 2:
                    time.strptime(external_execution_date_time, "%Y-%m-%d %H:%M")
                    origin_date_time = datetime.datetime.strptime(external_execution_date_time, '%Y-%m-%d %H:%M')
                    if execution_delta_time_unit == "days":
                        formatted_date_time = (origin_date_time + timedelta(days=execution_delta)).strftime(
                            "%Y-%m-%d %H:%M")
                    elif execution_delta_time_unit == "hours":
                        formatted_date_time = external_execution_date_time
                    self.log.info(
                        "开始查询数据库 dag_run :" + "select run_id from airflow.dag_run where date_format(execution_date,'%Y-%m-%d %H:%i') = '"
                        + formatted_date_time + "' and dag_id = '" + self.external_dag_id + "' limit 1 ")

                    run_id = mysql_pool.getOne(
                        "select run_id from airflow.dag_run where date_format(execution_date,'%Y-%m-%d %H:%i') = '"
                        + formatted_date_time + "' and dag_id = '" + self.external_dag_id + "' limit 1 ")['run_id'].decode('utf-8')

                else:
                    time.strptime(external_execution_date_time, "%Y-%m-%d")
                    origin_date_time = datetime.datetime.strptime(external_execution_date_time, '%Y-%m-%d')
                    if execution_delta_time_unit == "days":
                        formatted_date_time = (origin_date_time + timedelta(days=execution_delta)).strftime(
                            "%Y-%m-%d")
                    elif execution_delta_time_unit == "hours":
                        formatted_date_time = external_execution_date_time
                    self.log.info(
                        "开始查询数据库 dag_run :" + "select run_id from airflow.dag_run where date_format(execution_date,'%Y-%m-%d') = '"
                        + formatted_date_time + "' and dag_id = '" + self.external_dag_id + "' limit 1 ")

                    run_id = mysql_pool.getOne(
                        "select run_id from airflow.dag_run where date_format(execution_date,'%Y-%m-%d') = '"
                        + formatted_date_time + "' and dag_id = '" + self.external_dag_id + "' limit 1 ")['run_id'].decode('utf-8')

                self.log.info("获取的 runId:%s", run_id)

                self.log.info(
                    "开始查询数据库 task_instance :" + "select state from airflow.task_instance where task_id = '" + self.external_task_id
                    + "' and dag_id = '" + self.external_dag_id + "' and run_id = '" + run_id + "'")

                state = mysql_pool.getOne("select state from airflow.task_instance where task_id = '" + self.external_task_id
                                         + "' and dag_id = '" + self.external_dag_id + "' and run_id = '" + run_id + "'")['state'].decode('utf-8')

                self.log.info("获取的 state:%s", state)

                boo = (state == self.allowed_states)

                self.log.info("获取的 state: %s", state == self.allowed_states)

                if boo is True:
                    break

            except Exception as e:
                self.log.info(e)
                boo = False

            finally:
                self.log.info("-------------------检查上游dag任务状态结束-------------------")
                mysql_pool.dispose()
            await asyncio.sleep(60)

        # Send our single event and then we're done
        yield TriggerEvent({"external_dag_id": self.external_dag_id,
                                                        "external_task_id": self.external_task_id,
                                                        "external_execution_date_time": self.external_execution_date_time,
                                                        "execution_delta": self.execution_delta,
                                                        "execution_delta_time_unit": self.execution_delta_time_unit,
                                                        "allowed_states": self.allowed_states})

