from collections import OrderedDict
import numpy as np

from app.models.bo.gaolu.gaolu_reason_input_bo import GaoLuReasonInputBO
from app.models.po.gaolu.gaolu_predict_po import GaoLuPredictPO
from app.services.business import gaolu_biz_service
from app.services.dal import gaolu_dao_service
from app.utils import simple_util, sys_utils, date_util

from app.exts import ironman_redis as redis
from app.database import convert
from app.models.bo.hanfan.hanfan_input_param_bo import HanFanInputParamBO
from app.models.po.lugang.lugang_original_po import LuGangDataPO
from sqlalchemy import and_
from datetime import datetime, timedelta
from typing import List
import pandas as pd
import json
from typing import Dict
from app.services.models.gaolu_reason_model import gaolu_reason_model

struct = OrderedDict(
    s11700=["CG_LT_GL_GL04_LGWDBG10500TE1247",
            "CG_LT_GL_GL04_LGWDBG11500TE1249",
            "CG_LT_GL_GL04_LGWDBG11500TE1251",
            "CG_LT_GL_GL04_LGWDBG10500TE1245",
            "CG_LT_GL_GL04_LGWDBG11500TE1255"],

    s10200=["CG_LT_GL_GL04_LGWDBG9500TE1235", "CG_LT_GL_GL04_LGWDBG10500TE1237",
            "CG_LT_GL_GL04_LGWDBG10500TE1241", "CG_LT_GL_GL04_LGWDBG9500TE1233", "CG_LT_GL_GL04_LGWDBG10500TE1243"],

    s9200=["CG_LT_GL_GL04_LGWDBG8500TE1223",
           "CG_LT_GL_GL04_LGWDBG9500TE1225",
           "CG_LT_GL_GL04_LGWDBG9500TE1229",
           "CG_LT_GL_GL04_LGWDBG9500TE1227",
           "CG_LT_GL_GL04_LGWDBG8500TE1221",
           "CG_LT_GL_GL04_LGWDBG9500TE1231"],

    s8700=["CG_LT_GL_GL04_LGWDBG8000TE1207",
           "CG_LT_GL_GL04_LGWDBG8000TE1205",
           "CG_LT_GL_GL04_LGWDBG8500TE1211",
           "CG_LT_GL_GL04_LGWDBG8500TE1209",
           "CG_LT_GL_GL04_LGWDBG8500TE1217",
           "CG_LT_GL_GL04_LGWDBG8500TE1213",
           "CG_LT_GL_GL04_LGWDBG8500TE1219"],

    s8200=["CG_LT_GL_GL04_LGWDBG7500TE1191",
           "CG_LT_GL_GL04_LGWDBG7500TE1190",
           "CG_LT_GL_GL04_LGWDBG8000TE1195",
           "CG_LT_GL_GL04_LGWDBG8000TE1193",
           "CG_LT_GL_GL04_LGWDBG8000TE1199",
           "CG_LT_GL_GL04_LGWDBG8000TE1201",
           "CG_LT_GL_GL04_LGWDBG8000TE1203", ],

    s7700=["CG_LT_GL_GL04_LGWDBG7000TE1175",
           "CG_LT_GL_GL04_LGWDBG7000TE1173",
           "CG_LT_GL_GL04_LGWDBG7500TE1179",
           "CG_LT_GL_GL04_LGWDBG7500TE1177",
           "CG_LT_GL_GL04_LGWDBG7500TE1185",
           "CG_LT_GL_GL04_LGWDBG7500TE1181",
           "CG_LT_GL_GL04_LGWDBG7500TE1187", ],

    s7200=["CG_LT_GL_GL04_LGWDBG7000TE1161",
           "CG_LT_GL_GL04_LDWDBG6500TE1159",
           "CG_LT_GL_GL04_LGWDBG7000TE1165",
           "CG_LT_GL_GL04_LGWDBG7000TE1163",
           "CG_LT_GL_GL04_LGWDBG7000TE1169",
           "CG_LT_GL_GL04_LGWDBG7000TE1167",
           "CG_LT_GL_GL04_LDWDBG6500TE1157",
           "CG_LT_GL_GL04_LGWDBG7000TE1171"],

    s6693=["CG_LT_GL_GL04_LDWDBG6200TE1134",
           "CG_LT_GL_GL04_LDWDBG6200TE1137",
           "CG_LT_GL_GL04_LDWDBG6200TE1140",
           "CG_LT_GL_GL04_LDWDBG6200TE1144", ],

    s5693=[
        "CG_LT_GL_GL04_LDWDBG5700TE1109",
        "CG_LT_GL_GL04_LDWDBG5700TE1112",
        "CG_LT_GL_GL04_LDWDBG5700TE1115",
        "CG_LT_GL_GL04_LDWDBG5700TE1119", ],

)


def get_lugang_matrix(value_dict):
    matrix = []
    for k, v in struct.items():
        for key in v:
            if key.find(",") > 0:
                avg_value = np.mean([float(value_dict.get(sub_key, 0)) for sub_key in key.split(",")])
                matrix.append(round(avg_value, 2))
            else:
                matrix.append(round(float(value_dict.get(key, 0)), 2))

    return list(struct.keys()), matrix


def dump_lugang_data():
    value_dict = redis.hget_all_str_decode("lugang")
    time_dict = redis.hget_all_str_decode("lugang_update")

    now = datetime.now()

    if value_dict is None: pass

    for key, value in value_dict.items():
        date_time_str = time_dict.get(key)

        date_time = datetime.strptime(date_time_str, "%Y-%m-%d %H:%M:%S")

        # 查看是否已存在
        content: LuGangDataPO = convert.query(LuGangDataPO,
                                              and_(LuGangDataPO.date_time == date_time,
                                                   LuGangDataPO.name == key,
                                                   LuGangDataPO.level == 'min'),
                                              ).first()
        # 如果不存在,就插入,不然就更新
        if content is None:
            record: LuGangDataPO = LuGangDataPO()
            record.level = "min"
            record.name = key
            record.value = value
            record.date_time = date_time
            record.gmt_create = now
            record.gmt_modified = now
            convert.add_one(record)
        else:
            convert.update_model(content, dict(value=value, gmt_modified=datetime.now()))

        monday = get_monday(now)
        weekend = get_weekend(now)

        histories = convert.query(LuGangDataPO,
                                  and_(LuGangDataPO.date_time >= monday,
                                       LuGangDataPO.name == key,
                                       LuGangDataPO.date_time <= weekend,
                                       LuGangDataPO.level == 'min'),
                                  ).all()
        if len(histories) >0:
            weekend = get_weekend(now)
            weekly_content = convert.query(LuGangDataPO,
                                           and_(LuGangDataPO.date_time == weekend,
                                                LuGangDataPO.name == key,
                                                LuGangDataPO.level == 'week')).first()
            avg = sum([h.value for h in histories]) / len(histories)
            # 如果不存在,就插入,不然就更新
            if weekly_content is None:
                record: LuGangDataPO = LuGangDataPO()
                record.level = "week"
                record.name = key
                record.value = avg
                record.date_time = weekend
                record.gmt_create = now
                record.gmt_modified = now
                convert.add_one(record)
            else:
                convert.update_model(weekly_content, dict(value=avg,
                                                          gmt_modified=datetime.now()))

        hour_start = datetime(now.year, now.month, now.day, now.hour, 0, 0)
        hour_end = datetime(now.year, now.month, now.day, now.hour, 59, 59)

        hourly_histories = [v for v in histories if v.date_time >= hour_start and v.date_time <= hour_end]
        if len(hourly_histories) > 0:
            avg = sum([h.value for h in hourly_histories]) / len(hourly_histories)
            hourly_content = convert.query(LuGangDataPO,
                                           and_(LuGangDataPO.date_time == hour_end,
                                                LuGangDataPO.name == key,
                                                LuGangDataPO.level == 'hour')).first()
            # 如果不存在,就插入,不然就更新
            if hourly_content is None:
                record: LuGangDataPO = LuGangDataPO()
                record.level = "hour"
                record.name = key
                record.value = avg
                record.date_time = hour_end
                record.gmt_create = now
                record.gmt_modified = now
                convert.add_one(record)
            else:
                convert.update_model(hourly_content, dict(value=avg,
                                                          gmt_modified=datetime.now()))
            redis.hset("lugang_hourly", key, avg)
        else:
            value = redis.hget("lugang_hourly", key)
            if value is None:  continue
            hourly_content = convert.query(LuGangDataPO,
                                           and_(LuGangDataPO.date_time == hour_end,
                                                LuGangDataPO.name == key,
                                                LuGangDataPO.level == 'hour')).first()
            # 如果不存在,就插入,不然就更新
            if hourly_content is None:
                record: LuGangDataPO = LuGangDataPO()
                record.level = "hour"
                record.name = key
                record.value = value
                record.date_time = hour_end
                record.gmt_create = now
                record.gmt_modified = now
                convert.add_one(record)
            else:
                convert.update_model(hourly_content, dict(value=value,
                                                          gmt_modified=datetime.now()))


def get_history(name, time):
    dist = redis.hget("lugang_param_dist", name)

    current = None
    if time is None:
        current = redis.hget("lugang_hourly", name)
    else:
        hour_end = datetime(time.year, time.month, time.day, time.hour, 59, 59)
        record = convert.query(LuGangDataPO,
                               LuGangDataPO.level == 'hour',
                               LuGangDataPO.name == name,
                               LuGangDataPO.date_time == hour_end).first()
        if record is not None:
            current = record.value

    if dist is not None:
        dist = json.loads(dist)

    return current, dist


def lugang_calculate_dist(start, end, bin_num):
    # end = datetime.now()
    # start = end - timedelta(days=365)
    histories = convert.query(LuGangDataPO,
                              LuGangDataPO.date_time >= start,
                              LuGangDataPO.date_time < end,
                              LuGangDataPO.level == 'hour').all()

    if len(histories) == 0:
        return

    result_map = simple_util.result_dict(histories)
    df = pd.DataFrame(result_map)
    for name, group in df.groupby("name"):
        param = pd.cut(group['value'], bins=bin_num, retbins=True, include_lowest=True)
        param_norm = param[0].value_counts(sort=False, normalize=True).mul(100).array
        param_bins = param[1]
        bin_median_list = []
        for i in range(bin_num):
            # 保留两位小数
            bin_median_list.append(round(((param_bins[i] + param_bins[i + 1]) / 2), 2))
        dist_dict_inner = dict(zip(bin_median_list, param_norm))
        redis.hset("lugang_param_dist", name, json.dumps(dist_dict_inner))
    # df['DateTime'] = pd.to_datetime(df['date_time']).apply(lambda x: x.replace(tzinfo=None))  # 去掉时区
    # if start and end:
    #     df = df[(start < df['date_time']) & (df['date_time'] < end)]

def lugang_temp_change_rate_recommend(current:datetime):

    cur_record: GaoLuPredictPO = gaolu_dao_service.get_gaolu_result_one(current)
    if cur_record is None:
        return []
    statistic = gaolu_biz_service.get_auto_score_by_record(cur_record)
    status_dict = statistic.get('statusDict',{})
    value_dict = statistic.get('valueDict',{})
    status = statistic.get("status",{})
    recommend = []

    gaolu_reason_input_bo = GaoLuReasonInputBO()
    sys_utils.copy_properties(cur_record, gaolu_reason_input_bo)
    scope = gaolu_reason_model.get_reason_scope(gaolu_reason_input_bo)

    for key in status_dict.keys():
        desc = status_dict.get(key,"")
        if desc in ['冷风流量','富氧流量','热风温度','热风压力','焦比','煤比']:
            recommend.append(dict(desc=status_dict.get(key,""),status=status.get(key,""),scope=scope.get(key,"")))

    return recommend


def lugang_temp_change_rate(time:datetime):
    # 当前值
    # if date_util.is_current_hour(time):
    #     current_value_dict = redis.hget_all_str_decode("lugang")
    # else:
    current_week = get_weekend(time)
    time = current_week
    curreent_weekly_records = convert.query(LuGangDataPO, LuGangDataPO.date_time == current_week,
                                            LuGangDataPO.level == "week").all()
    # 周维度数据
    current_value_dict = {record.name: record.value for record in curreent_weekly_records}


    last_week = time - timedelta(days=7)
    weekend = get_weekend(last_week)

    weekly_records = convert.query(LuGangDataPO, LuGangDataPO.date_time == weekend, LuGangDataPO.level == "week").all()
    # 周维度数据
    weekly_dict = {record.name: record.value for record in weekly_records}

    # 根据struct分组
    group = []
    for tag, keys in struct.items():
        item = dict(tag=tag.replace("s",""))

        for index, key in enumerate(keys):
            v2 = weekly_dict.get(key, 0)
            v1 = current_value_dict.get(key, 0)
            item['group' + str(index + 1)] = round(v2 - float(v1),2)
        group.append(item)
    return group


def get_weekend(dt):
    weekend = dt + timedelta(days=6 - dt.weekday())
    new = datetime(weekend.year, weekend.month, weekend.day, 23, 59, 59)
    return new


def get_monday(dt):
    monday = dt + timedelta(days=0 - dt.weekday())
    new = datetime(monday.year, monday.month, monday.day, 0, 0, 0)
    return new


def get_data_by_time(time: datetime) -> Dict:
    time = datetime(time.year, time.month, time.day, time.hour, 59, 59)
    results = convert.query(LuGangDataPO,
                            LuGangDataPO.date_time == time,
                            LuGangDataPO.level == "hour").all()

    result_dict: Dict = {r.name: r.value for r in results}

    return result_dict


if __name__ == "__main__":
    print(get_monday(datetime.now()))
