import datetime
import os
import traceback

import numpy as np
import pandas as pd
import requests
from sklearn.linear_model import LinearRegression
from sqlalchemy import create_engine

from air_web.config.config import config
from air_web.dw.logger import init_log
from air_web.web_flask.dal.base_dal import EsBaseDal
from air_web.web_flask.tools.big_air import (
    get_city_cycle_coad,
    get_no_excute_cons,
)
from air_web.web_flask.tools.kputil_old import get_baseline_data

if not os.path.exists("/home/zshield/logs/dw/"):
    os.mkdir("/home/zshield/logs/dw/")
log = init_log("/home/zshield/logs/dw/")


is_predict = True
now = datetime.date.today().strftime("%Y-%m-%d")
year = datetime.datetime.now().year
es_dal = EsBaseDal(config["ES_HOST"])
connect = create_engine(
    "mysql+pymysql://{}:{}@{}:{}/{}".format(
        config.get("MYSQL_USER"),
        config.get("MYSQL_PASSWORD"),
        config.get("MYSQL_HOST"),
        config.get("MYSQL_PORT"),
        config.get("MYSQL_DB_NAME"),
    )
)


def get_baseline_day(day_time):
    """
    取上个月某一天的空调负荷作为基线（96点），选取条件为该月全省最高温度与今日最高温度相同
    （如无，则上浮最接近的值，如今日最高温度32度，去年7月中每天最高温度没有32度，则上浮取最接近的33)
    :param day_time:
    :return:
    """
    aclr_weather_hour = config.get("ACLR_WEATHER_HOUR", "aclr_weather_hour")
    this_month_start = datetime.datetime.strptime(
        day_time, "%Y-%m-%d"
    ).replace(day=1)
    last_month_end = this_month_start - datetime.timedelta(days=1)
    last_month_start = last_month_end.replace(day=1)

    date_list = pd.date_range(
        start=last_month_start, end=last_month_end, freq="1d"
    ).strftime("%Y-%m-%d")
    tmp_list = []
    for date in date_list:
        rules = [
            ("datetime", "query", "=", date),
            ("tmp", "stat", "max", ">", "0"),
        ]

        res = es_dal.get_group_vector(
            rules, aclr_weather_hour, doc_time_field="datetime"
        )
        if len(res) > 0:
            tmp = round(res[0][1], 2)
            tmp_list.append({"date": date, "tmp": tmp})
        else:
            print(f"缺少天气数据:{date}")
    base_tmp_df = pd.DataFrame(tmp_list)

    rules = [
        ("datetime", "query", "=", day_time),
        ("tmp", "stat", "max", ">", "0"),
    ]
    res = es_dal.get_group_vector(
        rules, aclr_weather_hour, doc_time_field="datetime"
    )
    today_tmp = round(res[0][1], 2)

    greater_df = base_tmp_df[base_tmp_df["tmp"] >= today_tmp].sort_values(
        "tmp"
    )
    if not greater_df.empty:
        baseline_day = greater_df.iloc[0]["date"]
    else:
        smaller_df = base_tmp_df[base_tmp_df["tmp"] < today_tmp].sort_values(
            "tmp", ascending=False
        )
        baseline_day = smaller_df.iloc[0]["date"]
    print(f"日期:{day_time}, 温度：{today_tmp}，基线日:{baseline_day}")
    return baseline_day


def get_on5_al_type(day_time=now):
    alter_df = pd.read_sql(
        f"""
                select on5, alert_type from city_alert where alert_time = '{day_time}'
            """,
        connect,
    )
    on5_al_type = {}
    if len(alter_df):
        for i in alter_df.to_dict("records"):
            at = i["alert_type"]
            if at == 1:
                at_r = 0.8
            elif at == 2:
                at_r = 0.9
            else:
                at_r = 0
            on5_al_type[i["on5"]] = at_r
    print(on5_al_type)
    return on5_al_type


def get_on5_alter_low_sum(day_time, baseline_day):
    # on5_al_type = get_on5_al_type(day_time)
    # on5_list = list(on5_al_type.keys())
    on5_list = [
        51401,
        51402,
        51403,
        51404,
        51405,
        51406,
        51407,
        51408,
        51409,
        51410,
        51411,
        51412,
        51413,
        51414,
        51415,
        51416,
        51417,
        51418,
        51419,
        51420,
        51421,
        51422,
        51428,
    ]
    print(on5_list)
    if not len(on5_list):
        return pd.DataFrame()
    baseline_end_day = (
        datetime.datetime.strptime(baseline_day, "%Y-%m-%d")
        + datetime.timedelta(days=1)
    ).strftime("%Y-%m-%d")
    after_day = (
        datetime.datetime.strptime(day_time, "%Y-%m-%d")
        + datetime.timedelta(days=1)
    ).strftime("%Y-%m-%d")
    if len(on5_list) == 1:
        on5_s = "org_no =  {}".format(on5_list[0])
    else:
        on5_s = "org_no in   ({}) ".format(",".join(map(str, on5_list)))
    s_sql = f"""   
           SELECT a.*, b.base_load FROM
           ((
                               SELECT
               DATE_FORMAT( data_time, '%%H:%%i' ) quarter,
               sum(p_kt_sum) real_load
           FROM
               orgno_typeid_15min 
           WHERE
                data_time >=  '{day_time}'  and data_time <  '{after_day}' 
               AND type_id = 0 
           AND {on5_s} group by quarter
           ) a
           left join (	SELECT
               DATE_FORMAT( data_time, '%%H:%%i' ) quarter,
               sum(p_kt_sum) base_load
           FROM
               orgno_typeid_15min 
           WHERE
                data_time >=  '{baseline_day}'  and data_time <  '{baseline_end_day}' 
               AND type_id = 0 
           AND {on5_s}  group by quarter) b on a.quarter = b.quarter) 
                   """
    # print(s_sql)
    res_df = pd.read_sql(s_sql, connect)
    if not len(res_df):
        return 0
    res_df["kt_low"] = res_df["base_load"] - res_df["real_load"]
    print(res_df[["quarter", "base_load", "real_load", "kt_low"]])
    print(f"get low data {on5_list}")
    ktl = sorted(res_df["kt_low"].to_list())
    if len(ktl) > 3:
        ktl = ktl[:-2]
    avg_ktl = sum(ktl) / len(ktl)
    max_ktl = max(ktl)
    max_ktl = max(100, min(max_ktl, 1000000))  # 客户要求该值不得小于等于0，不得大于100万kW
    return avg_ktl, max_ktl


def get_air_condition_summary(day_time, baseline_day):
    try:
        business_data_yes_c = (
            business_cons_max_load
        ) = city_count = join_bid = already_bid = business_cons_count = 0
        before_day = (
            datetime.datetime.strptime(day_time, "%Y-%m-%d")
            + datetime.timedelta(days=-1)
        ).strftime("%Y-%m-%d")
        after_day = (
            datetime.datetime.strptime(day_time, "%Y-%m-%d")
            + datetime.timedelta(days=1)
        ).strftime("%Y-%m-%d")

        business_cons_count_df = pd.read_sql(
            f"""
        select cons_num as business_cons_count from area_cons_num 
        where org_no = 51101  and year = {year} """,
            connect,
        )
        if len(business_cons_count_df):
            business_cons_count = business_cons_count_df.to_dict("records")[0][
                "business_cons_count"
            ]
        business_data = pd.read_sql(
            f"""   	
        SELECT
            max(p_kt_sum) business_cons_max_load 
        FROM
            orgno_typeid_15min 
        WHERE
            data_time >= '{day_time}' 
            AND data_time < '{after_day}' 
            AND org_no = 51101 
            AND type_id = 0
                     """,
            connect,
        )

        if len(business_data):
            business_cons_max_load = business_data.to_dict("records")[0][
                "business_cons_max_load"
            ]

        # business_data_yes = pd.read_sql(f"""
        # SELECT
        #     p_kt_sum business_cons_max_load
        # FROM
        #     orgno_typeid_15min
        # WHERE
        #     data_time >= '{day_time}'
        #     AND data_time < '{after_day}'
        #     AND org_no = 51101
        #     AND is_day_max = 1
        #     AND type_id = 0
        #              """, connect)
        #
        # if len(business_data_yes):
        #     business_data_yes_c = business_data_yes.to_dict('records')[0]['business_cons_max_load']

        city_control_df = pd.read_sql(
            f"""                    
               SELECT
                   '{day_time}' day,
                   sum( t1.n_cons_total ) join_bid,
                   sum( t1.n_cons_st ) already_bid ,
                   count(on5) city_count
               FROM
                   city_control t1 
               WHERE
                   t1.control_time = '{day_time}' 
               GROUP BY
                   day
               """,
            connect,
        )

        on5_alter_low_sum, max_ktl = get_on5_alter_low_sum(
            day_time, baseline_day
        )
        result = {
            # "business_data_yes_c": business_data_yes_c,
            "business_cons_max_load": business_cons_max_load / 10000,
            "cycle_load": on5_alter_low_sum / 10000,
            "max_cycle_load": max_ktl / 10000,
            "day": day_time,
            "business_cons_count": business_cons_count / 10000,
            "city_count": city_count,
            "join_bid": join_bid,
            "already_bid": already_bid,
        }
        if len(city_control_df):
            result.update(city_control_df.to_dict("records")[0])
        return [result]
    except Exception as e:
        print(traceback.format_exc())


def get_city_complete_rate_data(day_time=now):
    s_sql = f"""                    
    SELECT
        t1.on5 city_org_no,
        t1.city_name city_org_name,
        '{day_time}'  day,
        sum( t1.n_cons_total ) join_bid,
        sum( t1.n_cons_st ) already_bid,
        ( t1.n_cons_st / t1.n_cons_total ) city_complete_rate 
    FROM
        city_control t1 
    WHERE
        t1.control_time = '{day_time}' 
    GROUP BY
        on5
    """
    city_control_df = pd.read_sql(s_sql, connect)
    res = city_control_df.to_dict("records")
    for i in res:
        i["city_org_name"] = (
            i["city_org_name"]
            .replace("国网", "")
            .replace("供电公司", "")
            .replace("新区", "")
        )
    return res


def get_air_condition_load_execute(day_time, baseline_day):
    baseline_end_day = (
        datetime.datetime.strptime(baseline_day, "%Y-%m-%d")
        + datetime.timedelta(days=1)
    ).strftime("%Y-%m-%d")

    on5_list = (
        51401,
        51402,
        51403,
        51404,
        51405,
        51406,
        51407,
        51408,
        51409,
        51410,
        51411,
        51412,
        51413,
        51414,
        51415,
        51416,
        51417,
        51418,
        51419,
        51420,
        51421,
        51422,
        51428,
    )

    alter_df = pd.read_sql(
        f"""
            select on5, alert_type from city_alert where alert_time = '{day_time}'
        """,
        connect,
    )
    on5_al_type = {}
    if len(alter_df):
        for i in alter_df.to_dict("records"):
            at = i["alert_type"]
            if at == 1:
                at_r = 0.8
            elif at == 2:
                at_r = 0.9
            else:
                at_r = 0
            on5_al_type[i["on5"]] = at_r
    print(on5_al_type)
    # on5_list = list(on5_al_type.keys())

    after_day = (
        datetime.datetime.strptime(day_time, "%Y-%m-%d")
        + datetime.timedelta(days=1)
    ).strftime("%Y-%m-%d")

    def get_on5_data(on5_d):
        s_sql = f"""   
        SELECT b.*, a.real_load FROM
        ((
                            SELECT
            DATE_FORMAT( data_time, '%%H:%%i' ) quarter,
            p_kt_sum real_load
        FROM
            orgno_typeid_15min
        WHERE
             data_time >=  '{day_time}'  and data_time <  '{after_day}' 
            AND type_id = 0 
        AND org_no = {on5_d} 
        ) a
        right join (	SELECT
            org_no city_org_no,
            org_name city_org_name,
            DATE_FORMAT( data_time, '%%H:%%i' ) quarter,
            p_kt_sum base_load
        FROM
            orgno_typeid_15min
        WHERE
             data_time >=  '{baseline_day}'  and data_time <  '{baseline_end_day}' 
            AND type_id = 0 
        AND org_no = {on5_d} ) b on a.quarter = b.quarter) 
                """
        # print(s_sql)
        condition_control_df = pd.read_sql(s_sql, connect)
        if len(condition_control_df) > 5:
            condition_control_df = condition_control_df.sort_values(
                by="quarter"
            )
            condition_control_df[["base_load", "real_load"]] = (
                condition_control_df[["base_load", "real_load"]]
                .rolling(window=4)
                .mean()
                .bfill()
            )

        if on5_d in on5_al_type:
            condition_control_df["target_load"] = condition_control_df[
                "base_load"
            ] * on5_al_type.get(on5_d, 0)
        else:
            condition_control_df["target_load"] = 0
        return condition_control_df

    df_list = []
    for on5 in on5_list:
        df = get_on5_data(on5)
        if len(df):
            df_list.append(df)
    print(len(df_list))
    if not len(df_list):
        return []
    condition_df = pd.concat(df_list)
    print(len(condition_df))

    real_time_list = sorted(
        list(
            set(condition_df.loc[~condition_df["real_load"].isna(), "quarter"])
        )
    )
    if len(real_time_list) > 3:
        res_df = pd.DataFrame()
        drop_time_list = real_time_list[-2:]
        exist_time_list = real_time_list[:-2]
        print(f"开始过滤:{len(real_time_list)}, {drop_time_list}")

        for on5, group_df in condition_df.groupby("city_org_no"):
            group_df.loc[
                group_df["quarter"].isin(drop_time_list), "real_load"
            ] = np.nan

            if is_predict:
                model = LinearRegression()
                model.fit(
                    group_df.loc[
                        group_df["quarter"].isin(exist_time_list), "base_load"
                    ].values.reshape(-1, 1),
                    group_df["real_load"].dropna(),
                )

                past_features = group_df.loc[
                    group_df["quarter"].isin(drop_time_list), "base_load"
                ]
                predictions = model.predict(
                    past_features.values.reshape(-1, 1)
                )
                group_df.loc[
                    group_df["quarter"].isin(drop_time_list), "real_load"
                ] = predictions
                print(f"预测:{predictions}")
            res_df = pd.concat([res_df, group_df])
    else:
        res_df = condition_df

    res_df = res_df.replace({np.nan: None})
    res_df["day"] = day_time
    res_df["baseDay"] = baseline_day
    res = res_df.to_dict("records")
    for i in res:
        try:
            i["city_org_name"] = (
                i["city_org_name"]
                .replace("国网", "")
                .replace("供电公司", "")
                .replace("新区", "")
            )
        except:
            print(i)
    return res


def post_data(url, data):
    for i in range(3):
        log.info(f"start reqeust {url}")
        if not data:
            log.info(f" data is null : {data}")
            return
        resp = requests.post(url=url, json=data)
        print(resp.content)
        if resp.status_code == 200:
            return


base_url = "25.214.217.116:20081"
# base_url = '192.168.83.221:18008'

url_map = {
    "air_condition_summary": f"http://{base_url}/airconditionSummary",
    "air_condition_load_execute": f"http://{base_url}/airConditionerLoadExecute",
    "city_complete_rate_data": f"http://{base_url}/cityCompleteRate",
    "baseline_data": f"http://{base_url}/zhixiang/addDataAircondition",
    "no_excute_cons": f"http://{base_url}/NotExecuteCons",
    "city_cycle_coad": f"http://{base_url}/CityCycleCoad",
}


def run_airconditionSummary(day_time, baseline_day):
    log.info("start #### air_condition_summary")
    air_condition_summary = get_air_condition_summary(day_time, baseline_day)
    log.info(air_condition_summary)
    post_data(url_map["air_condition_summary"], air_condition_summary)


def run_air_condition_load_execute(day_time, baseline_day):
    log.info("start #### air_condition_load_execute")
    air_condition_load_execute = get_air_condition_load_execute(
        day_time, baseline_day
    )
    log.info(air_condition_load_execute[:10])
    log.info("$$$$$$$$$$$$$ {}".format(len(air_condition_load_execute)))
    post_data(
        url_map["air_condition_load_execute"], air_condition_load_execute
    )


def run_city_complete_rate_data(day_time):
    log.info("start #### city_complete_rate_data")
    city_complete_rate_data = get_city_complete_rate_data(day_time=day_time)
    log.info(city_complete_rate_data)
    post_data(url_map["city_complete_rate_data"], city_complete_rate_data)


def run_baseline_data(
    day_time, baseline_day, is_delete=True, is_predict=False
):
    log.info("start #### baseline_data")
    baseline_data = get_baseline_data(
        day_time, baseline_day, is_delete, is_predict
    )
    # log.info(baseline_data)
    post_data(url_map["baseline_data"], baseline_data)


def run_city_cycle_coad(day_time, baseline_day):
    print("start #### city_cycle_coad")
    city_res_list = get_city_cycle_coad(day_time, baseline_day)
    log.info(city_res_list)
    post_data(url_map["city_cycle_coad"], city_res_list)


def run_no_excute_cons(day_time, baseline_day):
    log.info("start #### no_excute_cons")
    cons_res_list = get_no_excute_cons(day_time, baseline_day)
    # log.info(cons_res_list)
    post_data(url_map["no_excute_cons"], cons_res_list)


def sichuan_big_main():
    dd = pd.read_sql(
        "select * from real_org_no where org_no = 51101 ", connect
    )
    if not len(dd):
        print("没有四川供电所信息,应该非四川,跳过")
        return
    day_time = datetime.date.today().strftime("%Y-%m-%d")
    baseline_day = get_baseline_day(day_time)

    run_airconditionSummary(day_time, baseline_day)
    run_city_complete_rate_data(day_time)
    run_air_condition_load_execute(day_time, baseline_day)
    run_baseline_data(day_time, baseline_day, is_predict=is_predict)
    run_city_cycle_coad(day_time, baseline_day)
    run_no_excute_cons(day_time, baseline_day)


def sichuan_big_main_yesterday():
    dd = pd.read_sql(
        "select * from real_org_no where org_no = 51101 ", connect
    )
    if not len(dd):
        print("没有四川供电所信息,应该非四川,跳过")
        return
    yesterday_time = (
        datetime.date.today() - datetime.timedelta(days=1)
    ).strftime("%Y-%m-%d")
    baseline_day = get_baseline_day(yesterday_time)

    run_baseline_data(yesterday_time, baseline_day, is_delete=False)


def test_main():
    day_time = datetime.datetime.now().date().strftime("%Y-%m-%d")
    baseline_day = get_baseline_day(day_time)
    # run_airconditionSummary(day_time, baseline_day)
    # run_city_complete_rate_data(day_time)
    # run_air_condition_load_execute(day_time, baseline_day)
    # run_baseline_data(day_time, baseline_day)
    # run_city_cycle_coad(day_time, baseline_day)
    # run_no_excute_cons(day_time, baseline_day)


if __name__ == "__main__":
    # sichuan_big_main()
    test_main()
