from typing import TYPE_CHECKING, Any, Sequence
from datetime import datetime, timedelta
from airflow.sensors.base import BaseSensorOperator
from utils.operators.asyn_trigger import TaskStatusTrigger
from airflow.utils import timezone

from airflow.utils.context import Context


def get_pymysql_data(sql: str) -> str:
    """
    python3 获取mysql数据库数据
    """
    import pymysql
    import json
    from contextlib import closing
    from airflow.models import Variable
    airflow_customize_config = json.loads(Variable.get(key='airflow_metastore', default_var=''))
    conn = pymysql.connect(host=airflow_customize_config.get("host"),
                           user=airflow_customize_config.get("username"),
                           password=airflow_customize_config.get("password"),
                           port=3306,  # 端口
                           database=airflow_customize_config.get("db"),
                           charset='utf8')
    with closing(conn.cursor()) as cur:
        cur.execute(sql)
        result = cur.fetchall()
        return result[0][0]
    # for row in result:
    # print(row)


"""
 TODO 需要做入参校验

"""


class ExternalDagSensor(BaseSensorOperator):
    """校验所属上游dag中对应task的状态"""

    template_fields: Sequence[str] = ["external_dag_id", "external_task_id", "external_execution_date_time",
                                      "execution_delta", "execution_delta_time_unit", "allowed_states"]

    # template_ext = [".sql", ".hql"] ??

    def __init__(self, *, external_dag_id: str,
                 external_task_id: str,
                 external_execution_date_time: str,
                 execution_delta: int,
                 execution_delta_time_unit: str,
                 allowed_states: str,
                 **kwargs: Any) -> None:
        super().__init__(**kwargs)
        self.external_dag_id = external_dag_id
        self.external_task_id = external_task_id
        self.external_execution_date_time = external_execution_date_time
        self.execution_delta = execution_delta
        self.execution_delta_time_unit = execution_delta_time_unit
        self.allowed_states = allowed_states

    def poke(self, context: "Context") -> bool:
        import datetime
        try:
            self.log.info("-------------------开始检查上游dag任务状态-------------------")
            self.log.info("上游dag信息为 - external_dag_id: %s"
                          "| external_task_id: %s "
                          "| external_execution_date_time: %s "
                          "| execution_delta: %s"
                          "| allowed_states: %s"
                          "| execution_delta_time_unit: %s", self.external_dag_id, self.external_task_id
                          , self.external_execution_date_time, self.execution_delta, self.allowed_states,
                          self.execution_delta_time_unit)

            # get_pymysql_data("select * from airflow.dag_run where execution_date = '2022-09-14 09:54:00' and dag_id = 'ExternalTaskSensorExampleUpStream'")

            origin_date_time = datetime.datetime.strptime(self.external_execution_date_time, '%Y-%m-%d %H:%M:%S')
            formatted_date_time = ''
            if self.execution_delta_time_unit == "days":
                formatted_date_time = (origin_date_time + timedelta(days=self.execution_delta)).strftime(
                    "%Y-%m-%d %H:%M:%S")
            elif self.execution_delta_time_unit == "hours":
                formatted_date_time = (origin_date_time + timedelta(hours=self.execution_delta)).strftime(
                    "%Y-%m-%d %H:%M:%S")

            self.log.info("开始查询数据库 dag_run :" + "select run_id from airflow.dag_run where execution_date = '"
                          + formatted_date_time + "' and dag_id = '" + self.external_dag_id + "'")

            from utils.operators.my_sql_conn import MyPymysqlPool
            mysql_pool = MyPymysqlPool("airflow_metastore")

            run_id = mysql_pool.getOne("select run_id from airflow.dag_run where execution_date = '"
                                       + formatted_date_time + "' and dag_id = '" + self.external_dag_id + "'")['run_id'].decode('utf-8')

            # run_id = get_pymysql_data("select run_id from airflow.dag_run where execution_date = '"
            #                          + formatted_date_time + "' and dag_id = '" + self.external_dag_id + "'")

            self.log.info("获取的 runId:%s", run_id)

            self.log.info(
                "开始查询数据库 task_instance :" + "select state from airflow.task_instance where task_id = '" + self.external_task_id
                + "' and dag_id = '" + self.external_dag_id + "' and run_id = '" + run_id + "'")

            state = mysql_pool.getOne("select state from airflow.task_instance where task_id = '" + self.external_task_id
                                     + "' and dag_id = '" + self.external_dag_id + "' and run_id = '" + run_id + "'")['state'].decode('utf-8')

            self.log.info("获取的 state:%s", state)

            boo = (state == self.allowed_states)

            self.log.info("获取的 state: %s", state == self.allowed_states)

            return boo
        except Exception as e:
            self.log.info(e)
            return False
        finally:
            self.log.info("-------------------检查上游dag任务状态结束-------------------")


class ExternalDagSensorAsync(ExternalDagSensor):
    """
    跨Dag异步算子
    """

    def execute(self, context: Context):
        import re
        import time

        external_execution_date_time = self.external_execution_date_time
        b = re.findall(":", external_execution_date_time)
        try:
            if len(b) >= 2:
                time.strptime(external_execution_date_time, "%Y-%m-%d %H:%M:%S")
                # time.strptime(check_date[ck_field], "Y年%m-%d %H:%M:%S")

            elif 0 < len(b) < 2:
                time.strptime(external_execution_date_time, "%Y-%m-%d %H:%M")

            else:
                time.strptime(external_execution_date_time, "%Y-%m-%d")

        except Exception as e:
            raise RuntimeError("{0}>>>>>>>：不是时间格式,直接终止校验流程".format(external_execution_date_time))

        self.defer(
            trigger=TaskStatusTrigger(external_dag_id=self.external_dag_id,
                                      external_task_id=self.external_task_id,
                                      external_execution_date_time=self.external_execution_date_time,
                                      execution_delta=self.execution_delta,
                                      execution_delta_time_unit=self.execution_delta_time_unit,
                                      allowed_states=self.allowed_states),
            method_name="execute_complete",
        )

    def execute_complete(self, context, event=None):
        """Callback for when the trigger fires - returns immediately."""
        return None
