# -*- coding: utf-8 -*-
import pendulum
from typing import Iterable, Union, List, Callable
from datetime import timedelta,datetime
from airflow.exceptions import AirflowSkipException
cst = pendulum.timezone('Asia/Shanghai')


def week_day_only(days_of_week: Union[int, Iterable[int]]):
    """
    用于在某些日期跳过任务。将函数设为 Operator 的 pre_execute 即可实现跳过
    some_operator.pre_execute = week_day_only(1) # 仅周 1 号运行，其他日期任务全部跳过
    some_operator.pre_execute = week_day_only([1, 3, 5]) # 仅周 1, 3, 5 号运行，其他日期任务全部跳过
    """
    if not isinstance(days_of_week, Iterable):
        days_of_week = [days_of_week]

    if not all(0 <= day <= 7 for day in days_of_week):
        raise Exception('week_day_only param ERROR: `days_of_week` should be provide and between [0, 6]')

    days_of_week = [str(0 if day == 7 else day) for day in days_of_week]

    def _skip_if_not_match(context):
        print('in pre execute')
        day = cst.convert(context['ti'].execution_date) + timedelta(days=1)
        if day.strftime('%w') not in days_of_week:
            print(f'{day.strftime("%w")} not in {days_of_week}, should skip')
            raise AirflowSkipException()
        else:
            print(f'{day.strftime("%w")} in {days_of_week}, run now')

    return _skip_if_not_match


def month_day_only(days_of_month: Union[int, Iterable[int]]):
    """
    用于在某些日期跳过任务。将函数设为 Operator 的 pre_execute 即可实现跳过
    some_operator.pre_execute = month_day_only(1) # 仅每月 1 号运行，其他日期任务全部跳过
    some_operator.pre_execute = month_day_only([1, 3, 5]) # 仅每月 1, 3, 5 号运行，其他日期任务全部跳过
    """
    if not isinstance(days_of_month, Iterable):
        days_of_month = [days_of_month]

    if not all(1 <= day <= 31 for day in days_of_month):
        raise Exception('month_day_only param ERROR: `days_of_month` should be provide and between [1, 31]')

    days_of_month = [str(n).zfill(2) for n in days_of_month]

    def _skip_if_not_match(context):
        print('in pre execute')
        day = cst.convert(context['ti'].execution_date) + timedelta(days=1)
        if day.strftime('%d') not in days_of_month:
            print(f'{day.strftime("%w")} not in {days_of_month}, should skip')
            raise AirflowSkipException()
        else:
            print(f'{day.strftime("%w")} in {days_of_month}, run now')

    return _skip_if_not_match


def combine(funcs: List[Callable], ignore_error: bool = False):
    funcs = funcs.copy()
    print("任务失败11：开始采集")


    def combined(*args, **kwargs):
        print("任务失败：开始采集")
        update_airflow_runInfo(args[0])
        print("任务失败：采集完成")
        last_res = None
        for fn in funcs:
            try:
                last_res = fn(*args, **kwargs)
            except Exception as e:
                if not ignore_error:
                    raise e
                else:
                    print(e)

        return last_res

    return combined


def call_external_dag_tasks(context):
    print("*******************clear_airflow_task_start*********************")
    """
    1. 获取当前的任务信息，dagId、taskId、executionDate、taskType
    2. 根据批次的血缘数据，判断当前任务是否有下游任务依赖。
    3. 如果有下游批次的血缘数据，就要查看每一个对应任务的状态
    4. 只有失败的任务才会真正去触发指定任务
    """
    jinja_dag_env = context['dag'].get_template_env()
    dag_id = jinja_dag_env.from_string("{{ dag_run.dag_id }}").render(context).strip()
    task_id = jinja_dag_env.from_string("{{ ti.task_id }}").render(context).strip()
    run_id = jinja_dag_env.from_string("{{ run_id }}").render(context).strip()
    from pytz import timezone
    from airflow.providers.http.hooks.http import HttpHook
    import json
    execution_date = context['execution_date'].replace(tzinfo=timezone('UTC')).astimezone(timezone('Asia/Shanghai'))
    # task_type = context['operator']
    from utils.operators.my_sql_conn import MyPymysqlPool
    mysql_pool = MyPymysqlPool("airflow_metastore_ex")
    # TODO 是否下游的调用记录会有很多 是否要做批量处理
    select_sql = """
    select * from t_ds_external_dag_relation_instance where parent_dag_id = '{parent_dag_id}'
    and parent_task_id = '{parent_task_id}' and parent_execution_date = '{parent_execution_date}'
    """.format(parent_dag_id=dag_id
               , parent_task_id=task_id
               , parent_execution_date=execution_date.strftime('%Y-%m-%d %H:%M:%S'))
    print(f"sql:{select_sql}")

    downstream_task_list = mysql_pool.getAll(select_sql)

    mysql_pool.dispose()

    EmptyList = []

    if list == EmptyList:
        print("空列表，不做处理")
        return

    # 判断是否为空

    http = HttpHook(http_conn_id="airflow_rest_api")
    extra = http.get_connection("airflow_rest_api").extra_dejson

    for item in downstream_task_list:
        external_dag_id = item['dag_id']
        external_task_id = item['task_id']
        external_run_id = item['run_id']
        external_execution_date = item['utc_execution_date']
        # 触发下游接口
        http = HttpHook(http_conn_id="airflow_rest_api")
        http_call_req = {
            'endpoint': extra.get('path') or f'/api/v1/dags/{external_dag_id.decode("utf-8")}/clearTaskInstances',
            'headers': {'Authorization': extra.get("Authorization"), 'Content-Type': 'application/json'},
            'data': json.dumps(
                {"dry_run": False, "end_date": external_execution_date.strftime('%Y-%m-%dT%H:%M:%S.%f' + '+00:00'),
                 "include_parentdag": False,
                 "include_subdags": False, "only_failed": True, "only_running": False,
                 "reset_dag_runs": True,
                 "start_date": external_execution_date.strftime('%Y-%m-%dT%H:%M:%S.%f' + '+00:00'),
                 "task_ids": [external_task_id.decode("utf-8")]})}
        print(f'request-parm:{http_call_req}')
        print('http call req:\n', http_call_req)
        http_call_res = http.run(**http_call_req)
        print('http call req:\n', http_call_res)
    print("*******************clear_airflow_task_end*********************")


def external_task_success_callback(context):
    print("*******************update_external_dag_task_info*********************")
    """
    如果当前是跨dag算子，将上下游的批次号信息落库
    """

    jinja_dag_env = context['dag'].get_template_env()
    jinja_task_env = context['task'].get_template_env()

    if jinja_task_env.from_string("{{ task.task_type }}").render(context).strip() != 'ExternalDagSensorAsync':
        return

    parent_dag_id = jinja_task_env.from_string("{{ task.external_dag_id }}").render(context).strip()
    parent_task_id = jinja_task_env.from_string("{{ task.external_task_id }}").render(context).strip()
    # parent_run_id = jinja_env.from_string("{{ run_id }}").render(context).strip()

    origin_date_time = jinja_task_env.from_string("{{ task.external_execution_date_time }} ").render(context).strip()
    execution_delta_time_unit = jinja_task_env.from_string("{{ task.execution_delta_time_unit }} ").render(context).strip()
    execution_delta = jinja_task_env.from_string("{{ task.execution_delta }} ").render(context).strip()
    from textwrap import dedent
    origin_execution_date_str = dedent(jinja_dag_env.from_string(origin_date_time).render(context))

    # 日期重新拼装 使用相同的判断逻辑 注意时间格式timestamp类型
    from datetime import datetime, timedelta
    origin_execution_date = datetime.strptime(origin_execution_date_str, '%Y-%m-%d %H:%M:%S')
    formatted_date_time = ''
    if execution_delta_time_unit == "days":
        formatted_date_time = (origin_execution_date + timedelta(days=int(execution_delta))).strftime("%Y-%m-%d %H:%M:%S")
    elif execution_delta_time_unit == "hours":
        formatted_date_time = (origin_execution_date + timedelta(hours=int(execution_delta))).strftime("%Y-%m-%d %H:%M:%S")

    dag_id = jinja_dag_env.from_string("{{ dag_run.dag_id }}").render(context).strip()
    task_id = jinja_dag_env.from_string("{{ ti.task_id }}").render(context).strip()
    run_id = jinja_dag_env.from_string("{{ run_id }}").render(context).strip()
#    execution_date = jinja_dag_env.from_string("{{ execution_date }}").render(context).strip()
    utc_execution_date_str = context['execution_date'].strftime('%Y-%m-%d %H:%M:%S.%f')
    from pytz import timezone
    execution_date = context['execution_date'].replace(tzinfo=timezone('UTC')).astimezone(timezone('Asia/Shanghai'))

    print("获取后的数据信息为 ==> parent_dag_id:", parent_dag_id, "parent_task_id:", parent_task_id
          , "parent_execution_date:", formatted_date_time,
          "dag_id:", dag_id, "task_id:", task_id,
          "run_id:", run_id, "execution_date", execution_date.strftime('%Y-%m-%d %H:%M:%S.%f'))

#    import pymysql
#    db = pymysql.connect(host="10.66.72.250",
#                         user="root",
#                         passwd="123456",
#                         port=53306,
#                         charset="utf8",
#                         autocommit=True,
#                         cursorclass=pymysql.cursors.DictCursor,
#                         database='ds'
#                         )
#    cursor = db.cursor()

    from utils.operators.my_sql_conn import MyPymysqlPool
    mysql_pool = MyPymysqlPool("airflow_metastore_ex")

    try:
        sql = """
        INSERT INTO t_ds_external_dag_relation_instance
        (parent_dag_id, parent_task_id, parent_execution_date, dag_id, task_id, run_id, cts_execution_date, utc_execution_date, execution_delta, execution_delta_time_unit)
        VALUES('{parent_dag_id}', '{parent_task_id}', '{parent_execution_date}', '{dag_id}', '{task_id}', '{run_id}', '{cts_execution_date}', '{utc_execution_date}', '{execution_delta}', '{execution_delta_time_unit}')
        on duplicate key update parent_dag_id   = ('{parent_dag_id}'),
                            parent_task_id= ('{parent_task_id}'),
                            parent_execution_date=('{parent_execution_date}'),
                            dag_id = ('{dag_id}'),
                            task_id  = ('{task_id}'),
                            run_id=('{run_id}')
        """.format(parent_dag_id=parent_dag_id, parent_task_id=parent_task_id
                   , parent_execution_date=formatted_date_time, dag_id=dag_id
                   , task_id=task_id, run_id=run_id, utc_execution_date=utc_execution_date_str, cts_execution_date=execution_date.strftime('%Y-%m-%d %H:%M:%S.%f'), execution_delta=execution_delta, execution_delta_time_unit=execution_delta_time_unit)

        print("执行的sql是：", sql)
#        cursor.execute(sql)
#        db.commit()
        mysql_pool.insert(sql)

    except Exception as err:
        print("sql执行失败 ,err: {}".format(err))
#        db.rollback()
#        cursor.close()
#        db.close()
    finally:
        # 释放资源
        mysql_pool.dispose()


def update_airflow_runInfo(context):
    print("*******************update_airflow_runInfo*********************")

    jinja_env = context['dag'].get_template_env()
    dag_id = jinja_env.from_string("{{ dag_run.dag_id }}").render(context).strip()
    task_id = jinja_env.from_string("{{ ti.task_id }}").render(context).strip()
    run_id = jinja_env.from_string("{{ run_id }}").render(context).strip()
    start_date = jinja_env.from_string("{{ ti.start_date }} ").render(context).strip()
    end_date = jinja_env.from_string("{{ ti.end_date }} ").render(context).strip()
    state = jinja_env.from_string("{{ ti.state }} ").render(context).strip()
    try_number = jinja_env.from_string("{{ ti.try_number }} ").render(context).strip()
    email = jinja_env.from_string("{{ task.email }} ").render(context).strip().replace("'",'')
    upstream_task_ids = jinja_env.from_string("{{ task.upstream_task_ids }} ").render(context).strip().replace("'",'')
    downstream_task_ids = jinja_env.from_string("{{ task.downstream_task_ids }} ").render(context).strip().replace("'",'')
    if upstream_task_ids=="set()":
        upstream_task_ids=""
    if downstream_task_ids=="set()":
        downstream_task_ids=""
    executor_memory=''
    executor_cores=''
    operator = jinja_env.from_string("{{ ti.operator }}").render(context).strip()
    task = context['task']
    upstream_dag_ids=''
    if operator=="SparkSqlOperator":
        executor_memory = getattr(task, "_executor_memory")
        executor_cores = getattr(task, "_executor_cores")
    if operator=="ExternalDagSensor":
        upstream_task_ids = getattr(task, "external_task_id","").replace("'",'')
        upstream_dag_ids = getattr(task, "external_dag_id","").replace("'",'')
        print(upstream_dag_ids,"====",upstream_task_ids)

    print("获取的信息是：-operator",operator,"--dag_id:",dag_id,"task_id:",task_id,"run_id:",run_id,"start_date:",start_date,"end_date:",end_date,"state:",state,"try_number:",try_number,"email:",email,"upstream_task_ids:",upstream_task_ids,"downstream_task_ids:",downstream_task_ids)


    import pymysql
    db = pymysql.connect(host="10.33.20.8",
                         user="airflow_dispatch",
                         passwd="PpY2ewd4hJrhM5DLIu39_",
                         port=3306,
                         charset="utf8",
                         autocommit=True,
                         cursorclass=pymysql.cursors.DictCursor,
                         database='bgdm'
                         )
    cursor = db.cursor()

    try:
        if try_number=='1':
                sql1 = """ insert into bgdm.airflow_task_run_info (dag_id ,task_id ,run_id ,try_number ,state ,first_start_date ,last_start_date ,last_end_date,executor_memory,executor_cores,email,upstream_task_ids,downstream_task_ids ,expand1 ,expand2 ,expand3 ,expand4 ,expand5 ,create_user_id,update_user_id)
                      values ("{dag_id}", "{task_id}", "{run_id}","{try_number}","{state}","{start_date}","{start_date}","{end_date}","{executor_memory}","{executor_cores}","{email}","{upstream_task_ids}","{downstream_task_ids}","{upstream_dag_ids} "," "," "," "," ","airflow","airflow")
                    on duplicate key update state   = ('{state}'),
                                            try_number= ('{try_number}'),
                                            first_start_date=('{start_date}'),
                                            last_start_date = ('{start_date}'),
                                            last_end_date  = ('{end_date}'),
                                            update_time=(now()),
                                            executor_memory=('{executor_memory}'),
                                            executor_cores=('{executor_cores}'),
                                            email=('{email}'),
                                            upstream_task_ids=('{upstream_task_ids}'),
                                            downstream_task_ids=('{downstream_task_ids}'),
                                            expand1=('{upstream_dag_ids}')
                       """.format(dag_id=dag_id, task_id=task_id,run_id=run_id,try_number=try_number,state=state,start_date=start_date,end_date=end_date,email=email,upstream_task_ids=upstream_task_ids,downstream_task_ids=downstream_task_ids,executor_memory=executor_memory,executor_cores=executor_cores,upstream_dag_ids=upstream_dag_ids)

                print("执行的sql是：",sql1)
                cursor.execute(sql1)
        else :
                if "running"!=state:
                    try_number=int(try_number)-1
                sql = """ insert into bgdm.airflow_task_run_info (dag_id ,task_id ,run_id ,try_number ,state ,first_start_date ,last_start_date ,last_end_date,executor_memory,executor_cores,email,upstream_task_ids,downstream_task_ids ,expand1 ,expand2 ,expand3 ,expand4 ,expand5 ,create_user_id,update_user_id)
                  values ("{dag_id}", "{task_id}", "{run_id}","{try_number}","{state}","{start_date}","{start_date}","{end_date}","{executor_memory}","{executor_cores}","{email}","{upstream_task_ids}","{downstream_task_ids}"," {upstream_dag_ids}"," "," "," "," ","airflow","airflow")
                on duplicate key update state   = ('{state}'),
                                        try_number= ('{try_number}'),
                                        last_start_date = ('{start_date}'),
                                        last_end_date  = ('{end_date}'),
                                        update_time=(now()),
                                        executor_memory=('{executor_memory}'),
                                        executor_cores=('{executor_cores}'),
                                        email=('{email}'),
                                        upstream_task_ids=('{upstream_task_ids}'),
                                        downstream_task_ids=('{downstream_task_ids}'),
                                        expand1=('{upstream_dag_ids}') 
             """.format(dag_id=dag_id, task_id=task_id,run_id=run_id,try_number=try_number,state=state,start_date=start_date,end_date=end_date,email=email,upstream_task_ids=upstream_task_ids,downstream_task_ids=downstream_task_ids,executor_memory=executor_memory,executor_cores=executor_cores,upstream_dag_ids=upstream_dag_ids)
                print("执行的sql是：",sql)
                cursor.execute(sql)
        db.commit()

    except Exception as err:
        print("sql执行失败 ,err: {}".format(err))
        db.rollback()
        cursor.close()
        db.close()

    cursor.close()
    db.close()