import datetime
from airflow.providers.dingding.operators.dingding import DingdingHook
from airflow.providers.presto.hooks.presto import PrestoHook
from airflow.models import TaskInstance

from utils.macros import cst_ds_sfm, cst_ds_nodash, date_add, cst_ds
from airflow.hooks.postgres_hook import PostgresHook


def subtime(date1, date2):
    date1 = datetime.datetime.strptime(date1, "%Y-%m-%d %H:%M:%S")
    date2 = datetime.datetime.strptime(date2, "%Y-%m-%d %H:%M:%S")

    return date2 - date1


def yl_threeSegCodeOnSuccess(kwargs, dingding_conn_id="dingding_ThreeSeg_etl_alert", at_mobiles=None, at_all=None):
    def _yl_threeSegCodeOnSuccess(ctx):
        # envInfo 环境基本信息
        currentEndTime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        rundt = datetime.datetime.now().strftime("%Y-%m-%d")
        current_dag: TaskInstance = ctx["ti"]
        env = ctx['var']['value'].env
        execution_date = cst_ds_nodash(ctx["execution_date"])
        alert_flag = 0

        # 监控表， 参数传参，库名，表名，表描述，表唯一id，数据是否隐私【1：隐私，0：不隐私 】
        db = kwargs["db"]
        table = kwargs["table"]
        desc = kwargs["desc"]
        taskid = kwargs["taskid"]
        ifprivacy = kwargs["ifprivacy"]
        warnignore = kwargs["warnignore"]

        # 使用presto计算 监控表日数据量
        try:
            presql = f"select count(1) as cnt from {db}.{table} where dt='{execution_date}'"
            preconn = PrestoHook(presto_conn_id='presto_default')
            hivecnt = preconn.get_first(presql)[0]
            # hivecnt = 10567011
        except Exception as err:
            hivecnt = 10567011
            print("error: ",err)

        # 从postgres 获取阈值参考数据， 相关sql以及计算起始时间点
        execution_date_last_30 = cst_ds(date_add(ctx["execution_date"], -7))
        execution_date_last_7 = cst_ds(date_add(ctx["execution_date"], -3))
        execution_date_last_1 = cst_ds(date_add(ctx["execution_date"], -1))

        max_30_sql = f""" select max(count_daily) from (select taskid, rundt, count_daily, row_number() over(partition by rundt order by endtime desc) rnk
                from public.ai_offline_etl_log where executiondate >= '{execution_date_last_30}' and executiondate <='{execution_date_last_1}' and isok='success' and taskid={taskid} )t where rnk = 1
            """
        min_30_sql = f"""
                        select min(count_daily) from (select taskid, rundt, count_daily, row_number() over(partition by rundt order by endtime desc) rnk
                from public.ai_offline_etl_log where executiondate >= '{execution_date_last_30}' and executiondate <='{execution_date_last_1}' and isok='success' and taskid={taskid} )t where rnk = 1
            """
        avg_30_sql = f"""
                        select round(avg(count_daily)) from (select taskid, rundt, count_daily, row_number() over(partition by rundt order by endtime desc) rnk
                from public.ai_offline_etl_log where executiondate >= '{execution_date_last_30}' and executiondate <='{execution_date_last_1}' and isok='success' and taskid={taskid} )t where rnk = 1
            """
        avg_7_sql = f"""
                        select round(avg(count_daily)) from (select taskid, rundt, count_daily, row_number() over(partition by rundt order by endtime desc) rnk
                from public.ai_offline_etl_log where executiondate >= '{execution_date_last_7}' and executiondate <='{execution_date_last_1}' and isok='success' and taskid={taskid} )t where rnk = 1
            """
        # 从postgres 获取阈值参考数据，30天内监控表最大日数据量，最小日数据量，30天内平均日数据量，7天内平均日数据量
        # posconn = PostgresHook(postgres_conn_id='postgres_last')
        posconn = PostgresHook(postgres_conn_id='postgresql_pro')
        count_max_30 = posconn.get_first("{}".format(max_30_sql))[0]
        count_min_30 = posconn.get_first("{}".format(min_30_sql))[0]
        count_avg_30 = posconn.get_first("{}".format(avg_30_sql))[0]
        count_avg_7 = posconn.get_first("{}".format(avg_7_sql))[0]

        # 避免前期没有参考数据，程序异常，这里暂时构建程序需要数据
        # count_max_30 = 11000000
        # count_min_30 = 12000000
        # count_avg_30 = 13000000
        # count_avg_7 = 14000000

        # 需要写入postgres 的日志数据
        taskid, dbname, tablename, starttime, endtime, elpasetime, executiondate, count_daily = \
            taskid, db, table, cst_ds_sfm(current_dag.start_date), currentEndTime, subtime(
                cst_ds_sfm(current_dag.start_date), currentEndTime), execution_date, hivecnt

        avg_30_ratio_tmp = 0 if (count_avg_30 is None) else round((hivecnt - count_avg_30) / count_avg_30, 4)
        avg_7_ratio_tmp = 0 if (count_avg_7 is None) else round((hivecnt - count_avg_7) / count_avg_7, 4)
        count_max_30, count_min_30, count_avg_30, count_avg_7, avg_30_ratio, avg_7_ratio, isok, rundt = \
            count_max_30, count_min_30, count_avg_30, count_avg_7, avg_30_ratio_tmp, avg_7_ratio_tmp, current_dag.state, rundt

        # 往 postgres 写入日志
        cols_list = ["taskid", "dbname", "tablename", "starttime", "endtime", "elpasetime", "executiondate",
                     "count_daily", "count_max_30", "count_min_30", "count_avg_30", "count_avg_7", "avg_30_ratio",
                     "avg_7_ratio", "isok", "rundt"]
        values_str = (
            taskid, dbname, tablename, starttime, endtime, elpasetime, executiondate, count_daily, count_max_30,
            count_min_30, count_avg_30, count_avg_7, avg_30_ratio, avg_7_ratio, isok, rundt)
        posconn.insert_rows("public.ai_offline_etl_log", [values_str], cols_list)

        # 数据量阈值告警
        if (abs(avg_30_ratio) > 0.15 or abs(avg_7_ratio) > 0.15) and not warnignore:
            alert_flag = 1
        # 数据隐私性处理
        if (ifprivacy):
            dailycount = "******"
        else:
            dailycount = count_daily
        # 数据异常，切换告警机器人发送到异常告警群，否则发送正常日志群
        if alert_flag:
            dingding_conn_id = "dingding_ThreeSeg_etl_alert"
            title = f'<font color=#EA3323>[{env.upper()}]: {current_dag.task_id.upper()} 数据量异常告警</font>'
        else:
            dingding_conn_id = "dingding_ThreeSeg_etl_info"
            title = f'[{env.upper()}]: {current_dag.task_id.upper()}_ETL_INFO'

        # 发送钉钉的任务日志
        text = f"""
# {title}
   - *表全名:* {dbname}.{tablename}
   - *任务状态:*  {isok}
   - *分区:* {executiondate}
   - *分区数据量:* {dailycount}
   - *分区数据量3天均值同环比:* {avg_7_ratio * 100}%
   - *分区数据量7天均值同环比:* {avg_30_ratio * 100}%
   - *任务开始时间:* {starttime}
   - *任务结束时间:* {currentEndTime}
   - *任务耗时:* {elpasetime}
   - *表描述:* {desc}
"""

        # dingding_conn_id = "dingding_last"
        return DingdingHook(dingding_conn_id=dingding_conn_id,
                            message_type='markdown',  # text, link, markdown, actionCard, feedCard
                            message={'title': title, 'text': text},
                            at_mobiles=at_mobiles,
                            at_all=at_all, ).send()

    return _yl_threeSegCodeOnSuccess
