from datetime import datetime

from sqlalchemy import and_
import math
import numpy as np
from typing import Dict, List

from app.database import convert
from app.exts import ironman_redis as redis
from app.models.po.lugang.lugang_qinshi_po import lugang_qinshi_column
from app.models.po.lugang.lugang_original_po import LuGangDataPO, LuGangQinshiDataPO, LuGangQinshiResultPO
from app.utils import date_util
import logging as log

columns = ['CG_LT_GL_GL04_LDWDBG5700TE1103', 'CG_LT_GL_GL04_LGWDBG8500TE1210', 'CG_LT_GL_GL04_LDWDBG5700TE1120',
           'CG_LT_GL_GL04_LDWDBG5700TE1113', 'CG_LT_GL_GL04_LGWDBG9500TE1230', 'CG_LT_GL_GL04_LDWDBG5700TE1107',
           'CG_LT_GL_GL04_LGWDBG7500TE1186', 'CG_LT_GL_GL04_LGWDBG6500TE1158', 'CG_LT_GL_GL04_LDWDBG5700TE1109',
           'CG_LT_GL_GL04_LDWDBG5700TE1115', 'CG_LT_GL_GL04_LGWDBG7000TE1176', 'CG_LT_GL_GL04_LGWDBG8500TE1216',
           'CG_LT_GL_GL04_LDWDBG6200TE1128', 'CG_LT_GL_GL04_LDWDBG5700TE1114', 'CG_LT_GL_GL04_LGWDBG8000TE1208',
           'CG_LT_GL_GL04_LGWDBG6500TE1160', 'CG_LT_GL_GL04_LGWDBG8000TE1196', 'CG_LT_GL_GL04_LGWDBG8500TE1220',
           'CG_LT_GL_GL04_LGWDBG11500TE1250', 'CG_LT_GL_GL04_LGWDBG7000TE1162', 'CG_LT_GL_GL04_LGWDBG7000TE1170',
           'CG_LT_GL_GL04_LGWDBG11500TE1254', 'CG_LT_GL_GL04_LGWDBG10500TE1242', 'CG_LT_GL_GL04_LGWDBG9500TE1232',
           'CG_LT_GL_GL04_LGWDBG9500TE1236', 'CG_LT_GL_GL04_LGWDBG7500TE1184', 'CG_LT_GL_GL04_LDWDBG5700TE1104',
           'CG_LT_GL_GL04_LDWDBG5700TE1122', 'CG_LT_GL_GL04_LGWDBG9500TE1226', 'CG_LT_GL_GL04_LGWDBG8000TE1204',
           'CG_LT_GL_GL04_LDWDBG6200TE1126', 'CG_LT_GL_GL04_LGWDBG10500TE1240', 'CG_LT_GL_GL04_LGWDBG9500TE1234',
           'CG_LT_GL_GL04_LDWDBG6200TE1127', 'CG_LT_GL_GL04_LGWDBG7500TE1180', 'CG_LT_GL_GL04_LGWDBG7000TE1172',
           'CG_LT_GL_GL04_LGWDBG8500TE1224', 'CG_LT_GL_GL04_LGWDBG8000TE1202', 'CG_LT_GL_GL04_LGWDBG8500TE1218',
           'CG_LT_GL_GL04_LDWDBG5700TE1105', 'CG_LT_GL_GL04_LGWDBG7500TE1192', 'CG_LT_GL_GL04_LGWDBG10500TE1248',
           'CG_LT_GL_GL04_LGWDBG7500TE1188', 'CG_LT_GL_GL04_LGWDBG8000TE1200', 'CG_LT_GL_GL04_LGWDBG10500TE1244',
           'CG_LT_GL_GL04_LDWDBG5700TE1121', 'CG_LT_GL_GL04_LGWDBG10500TE1246', 'CG_LT_GL_GL04_LGWDBG8500TE1222',
           'CG_LT_GL_GL04_LDWDBG5700TE1108', 'CG_LT_GL_GL04_LGWDBG10500TE1238', ' CG_LT_GL_GL04_LGWDBG10500TE1248',
           'CG_LT_GL_GL04_LDWDBG5700TE1106', 'CG_LT_GL_GL04_LGWDBG11500TE1252']


def get_qinshi_history(name, start, end, direction='90') -> List[LuGangQinshiResultPO]:
    histories: List[LuGangQinshiResultPO] = convert.query(LuGangQinshiResultPO, LuGangQinshiResultPO.name == name,
                                                          LuGangQinshiResultPO.date_time >= start,
                                                          LuGangQinshiResultPO.date_time <= end,
                                                          LuGangQinshiResultPO.direction == direction,
                                                          LuGangQinshiResultPO.level == "day").all()
    return histories


def dump_lugang_qinshi_data():
    value_dict = redis.hget_all_str_decode("lugang_qinshi")
    time_dict = redis.hget_all_str_decode("lugang_qinshi_update")
    now = datetime.now()

    columns = lugang_qinshi_column

    qinshi_value_dict = {k: v for k, v in value_dict.items() if k in columns}

    for key, value in qinshi_value_dict.items():
        date_time_str = time_dict.get(key)

        date_time = datetime.strptime(date_time_str, "%Y-%m-%d %H:%M:%S")

        # 查看是否已存在
        content: LuGangQinshiDataPO = convert.query(LuGangQinshiDataPO,
                                                    and_(LuGangQinshiDataPO.date_time == date_time,
                                                         LuGangQinshiDataPO.name == key,
                                                         LuGangQinshiDataPO.level == 'min'),
                                                    ).first()
        # 如果不存在,就插入,不然就更新
        if content is None:
            record: LuGangQinshiDataPO = LuGangQinshiDataPO()
            record.level = "min"
            record.name = key
            record.value = value
            record.date_time = date_time
            record.gmt_create = now
            record.gmt_modified = now
            convert.add_one(record)
        else:
            convert.update_model(content, dict(value=value, gmt_modified=datetime.now()))

        hour_start = datetime(now.year, now.month, now.day, now.hour, 0, 0)
        hour_end = datetime(now.year, now.month, now.day, now.hour, 59, 59)

        hourly_histories = convert.query(LuGangQinshiDataPO,
                                         and_(LuGangQinshiDataPO.date_time >= hour_start,
                                              LuGangQinshiDataPO.name == key,
                                              LuGangQinshiDataPO.date_time <= hour_end,
                                              LuGangQinshiDataPO.level == 'min'),
                                         ).all()

        if len(hourly_histories) > 0:
            avg = sum([h.value for h in hourly_histories]) / len(hourly_histories)
            hourly_content = convert.query(LuGangQinshiDataPO,
                                           and_(LuGangQinshiDataPO.date_time == hour_end,
                                                LuGangQinshiDataPO.name == key,
                                                LuGangQinshiDataPO.level == 'hour')).first()
            # 如果不存在,就插入,不然就更新
            if hourly_content is None:
                record: LuGangQinshiDataPO = LuGangQinshiDataPO()
                record.level = "hour"
                record.name = key
                record.value = avg
                record.date_time = hour_end
                record.gmt_create = now
                record.gmt_modified = now
                convert.add_one(record)
            else:
                convert.update_model(hourly_content, dict(value=avg,
                                                          gmt_modified=datetime.now()))
            redis.hset("lugang_qinshi_hourly", key, avg)

        day_start = datetime(now.year, now.month, now.day, 0, 0, 0)
        day_end = datetime(now.year, now.month, now.day, 23, 59, 59)

        hourly_histories = convert.query(LuGangQinshiDataPO,
                                         and_(LuGangQinshiDataPO.date_time >= day_start,
                                              LuGangQinshiDataPO.name == key,
                                              LuGangQinshiDataPO.date_time <= day_end,
                                              LuGangQinshiDataPO.level == 'hour'),
                                         ).all()

        if len(hourly_histories) > 0:
            avg = sum([h.value for h in hourly_histories]) / len(hourly_histories)
            daily_content = convert.query(LuGangQinshiDataPO,
                                          and_(LuGangQinshiDataPO.date_time == day_start,
                                               LuGangQinshiDataPO.name == key,
                                               LuGangQinshiDataPO.level == 'day')).first()
            # 如果不存在,就插入,不然就更新
            if daily_content is None:
                record: LuGangQinshiDataPO = LuGangQinshiDataPO()
                record.level = "day"
                record.name = key
                record.value = avg
                record.date_time = day_start
                record.gmt_create = now
                record.gmt_modified = now
                convert.add_one(record)
            else:
                convert.update_model(daily_content, dict(value=avg,
                                                         gmt_modified=datetime.now()))
            redis.hset("lugang_qinshi_daily", key, avg)


def dump_qinshi_model_data():
    v = redis.hget_all_str_decode("lugang_qinshi_hourly")
    group_with_direction = get_lugang_qinshi_status_group_mock(None, 'hour')

    for direction, group in group_with_direction.items():

        now = datetime.now()
        for key, value in group.items():
            hour_end = datetime(now.year, now.month, now.day, now.hour, 59, 59)
            record = convert.query(LuGangQinshiResultPO,
                                   LuGangQinshiResultPO.date_time == hour_end,
                                   LuGangQinshiResultPO.name == key,
                                   LuGangQinshiResultPO.direction == direction,
                                   LuGangQinshiResultPO.level == 'hour').first()

            if record is None:
                record: LuGangQinshiResultPO = LuGangQinshiResultPO()
                record.level = "hour"
                record.name = key
                record.direction = direction
                record.value = value
                record.date_time = hour_end
                record.gmt_create = now
                record.gmt_modified = now
                convert.add_one(record)
            else:
                convert.update_model(record, dict(value=value, direction=direction,
                                                  gmt_modified=datetime.now()))

    v = redis.hget_all_str_decode("lugang_qinshi_daily")
    group_with_direction = get_lugang_qinshi_status_group_mock(None, 'day')

    for direction, group in group_with_direction.items():

        now = datetime.now()
        for key, value in group.items():
            day_start = datetime(now.year, now.month, now.day, 0, 0, 0)
            record = convert.query(LuGangQinshiResultPO,
                                   LuGangQinshiResultPO.date_time == day_start,
                                   LuGangQinshiResultPO.name == key,
                                   LuGangQinshiResultPO.direction == direction,
                                   LuGangQinshiResultPO.level == 'day').first()

            if record is None:
                record: LuGangQinshiResultPO = LuGangQinshiResultPO()
                record.level = "day"
                record.name = key
                record.direction = direction
                record.value = value
                record.date_time = day_start
                record.gmt_create = now
                record.gmt_modified = now
                convert.add_one(record)
            else:
                convert.update_model(record, dict(value=value, direction=direction,
                                                  gmt_modified=datetime.now()))


def get_qinshi_model_data(time, direction):
    now = datetime.now()
    # if time is None or date_util.is_same_hour(time, now):
    #
    #     v = redis.hget_all_str_decode("lugang_qinshi_hourly")
    #     # group_with_direction = get_lugang_qinshi_status_group(v)
    #     # group = group_with_direction.get(direction)
    #     # 数据mock
    #     group_with_direction = get_lugang_qinshi_status_group_mock(None, 'hour')
    #     group = group_with_direction.get(direction)

    records = convert.query(LuGangQinshiResultPO,
                            LuGangQinshiResultPO.date_time == time,
                            LuGangQinshiResultPO.direction == direction,
                            LuGangQinshiResultPO.level == 'hour').all()
    group = {record.name: round(record.value, 2) for record in records}


    return group


def get_qinshi_model_data_area(time):
    # now = datetime.now()
    # if time is None or date_util.is_same_hour(time, now):
    #     v = redis.hget_all_str_decode("lugang_qinshi_hourly")
    #     return v
    # else:
    records = convert.query(LuGangQinshiResultPO,
                            LuGangQinshiResultPO.date_time == time,
                            LuGangQinshiResultPO.level == 'hour').all()
    group = {record['name']: record['value'] for record in records}

    return group


def get_qinshi_avg_status(start, end):
    group = {}
    histories: List[LuGangQinshiDataPO] = convert.query(LuGangQinshiDataPO,
                                                        LuGangQinshiDataPO.date_time >= start,
                                                        LuGangQinshiDataPO.date_time < end,
                                                        LuGangQinshiDataPO.level == 'hour').all()
    for history in histories:
        array = group.get(history.name, [])
        array.append(history.value)
        group[history.name] = array

    group = {k: _avg(k, v) for k, v in group.items()}

    result = {}
    result['A'] = get_rate(get_avg([41, 24, 36, 14, 40, 10, 19], group),
                           get_avg([41, 24, 36, 14, 40, 10, 19, 18, 49, 28, 38, 37, 6, 20, 46, 32, 47, 17, 29, 42, 7],
                                   group))
    result['B'] = get_rate(get_avg([18, 49, 28, 16, 34, 1, 20], group),
                           get_avg([41, 24, 36, 14, 40, 10, 19, 18, 49, 28, 38, 37, 6, 20, 46, 32, 47, 17, 29, 42, 7],
                                   group))
    result['C'] = get_rate(get_avg([52, 31, 22, 11, 43, 25, 20], group),
                           get_avg([41, 24, 36, 14, 40, 10, 19, 18, 49, 28, 38, 37, 6, 20, 46, 32, 47, 17, 29, 42, 7],
                                   group))
    result['D'] = get_rate(get_avg([44, 23, 35, 46, 17, 29, 42], group),
                           get_avg([41, 24, 36, 14, 40, 10, 19, 18, 49, 28, 38, 37, 6, 20, 46, 32, 47, 17, 29, 42, 7],
                                   group))
    result['E'] = get_rate(get_avg([5, 48, 8], group), get_avg([0, 26, 39, 51], group))
    result['F'] = get_rate(get_avg([3, 13, 9], group), get_avg([0, 26, 39, 51], group))
    result['G'] = get_rate(get_avg([2, 45, 27], group), get_avg([0, 26, 39, 51], group))
    result['H'] = get_rate(get_avg([30, 33, 12], group), get_avg([0, 26, 39, 51], group))
    result['I'] = get_rate(get_avg([41, 18, 21, 46], group), get_avg(
        [50, 24, 36, 14, 40, 10, 19, 15, 18, 49, 28, 16, 34, 21, 22, 4, 38, 37, 6, 20, 46, 32, 47, 17, 29, 42, 7],
        group))
    result['J'] = get_rate(get_avg([24, 49, 22, 32], group), get_avg(
        [50, 24, 36, 14, 40, 10, 19, 15, 18, 49, 28, 16, 34, 21, 22, 4, 38, 37, 6, 20, 46, 32, 47, 17, 29, 42, 7],
        group))
    result['K'] = get_rate(get_avg([36, 28, 4, 47], group), get_avg(
        [50, 24, 36, 14, 40, 10, 19, 15, 18, 49, 28, 16, 34, 21, 22, 4, 38, 37, 6, 20, 46, 32, 47, 17, 29, 42, 7],
        group))
    result['L'] = get_rate(get_avg([14, 1, 38, 17], group), get_avg(
        [50, 24, 36, 14, 40, 10, 19, 15, 18, 49, 28, 16, 34, 21, 22, 4, 38, 37, 6, 20, 46, 32, 47, 17, 29, 42, 7],
        group))
    result['M'] = get_rate(get_avg([40, 16, 37, 29], group), get_avg(
        [50, 24, 36, 14, 40, 10, 19, 15, 18, 49, 28, 16, 34, 21, 22, 4, 38, 37, 6, 20, 46, 32, 47, 17, 29, 42, 7],
        group))
    result['N'] = get_rate(get_avg([10, 34, 6, 42], group), get_avg(
        [50, 24, 36, 14, 40, 10, 19, 15, 18, 49, 28, 16, 34, 21, 22, 4, 38, 37, 6, 20, 46, 32, 47, 17, 29, 42, 7],
        group))
    result['O'] = get_rate(get_avg([19, 15, 20, 7], group), get_avg(
        [50, 24, 36, 14, 40, 10, 19, 15, 18, 49, 28, 16, 34, 21, 22, 4, 38, 37, 6, 20, 46, 32, 47, 17, 29, 42, 7],
        group))

    return result


def _cal(out_value, in_value):
    if out_value == in_value or (in_value == 0 and in_value == 0):
        return 0
    ta = float(out_value)
    tb = float(in_value)
    s = 1000 * ((1150 - (((0.45) * (10 * (tb - ta) / 0.12)) / 10 + tb)) * 0.3 / (10 * (tb - ta) / 0.12) + 0.6)
    return round(float(s), 2)


def _cal2(out_value, in_value):
    if out_value == in_value and in_value == 0:
        return 0
    ta = float(out_value)
    tb = float(in_value)

    s = 1000 * ((1150 - (((0.5) * (10 * (tb - ta) / 1)) / 10 + tb)) * 0.3 / (
            10 * (tb - ta) / 1) + 7)

    return round(float(s), 2)


def _avg(k, lst):
    log.info(k)
    return sum(lst) / len(lst)


def get_avg(index_range: list, group):
    sum = 0
    for i in index_range:
        column = columns[i]
        value = group.get(column, 0)
        sum = sum + value

    return sum / len(index_range)


def get_rate(n, d, default=0):
    if d == 0: return default;
    if d is None or n is None: return default
    return round(float(n / d), 2)


def get_lugang_qinshi_status_group(v):
    """
    炉缸侵蚀状态各个方向的数据
    :param v:
    :return:
    """
    group = {}
    A = _cal(v.get("CG_LT_GL_GL04_LGWDBG10500TE1247", 0), v.get("CG_LT_GL_GL04_LGWDBG10500TE1248", 0))
    B = _cal(v.get("CG_LT_GL_GL04_LGWDBG9500TE1235", 0), v.get("CG_LT_GL_GL04_LGWDBG9500TE1236", 0))
    C = _cal(v.get("CG_LT_GL_GL04_LGWDBG8500TE1223", 0), v.get("CG_LT_GL_GL04_LGWDBG8500TE1224", 0))
    D = _cal(v.get("CG_LT_GL_GL04_LGWDBG8000TE1207", 0), v.get("CG_LT_GL_GL04_LGWDBG8000TE1208", 0))
    E = _cal(v.get("CG_LT_GL_GL04_LGWDBG7500TE1191", 0), v.get("CG_LT_GL_GL04_LGWDBG7500TE1192", 0))
    F = _cal(v.get("CG_LT_GL_GL04_LGWDBG7000TE1175", 0), v.get("CG_LT_GL_GL04_LGWDBG7000TE1176", 0))
    G = _cal(v.get("CG_LT_GL_GL04_LGWDBG7000TE1161", 0), v.get("CG_LT_GL_GL04_LGWDBG7000TE1162", 0))
    H = _cal2(v.get("CG_LT_GL_GL04_LDWDBG5700TE1108", 0), v.get("CG_LT_GL_GL04_LDWDBG6200TE1133", 0))
    I = _cal2(v.get("CG_LT_GL_GL04_LDWDBG5700TE1109", 0), v.get("CG_LT_GL_GL04_LDWDBG6200TE1134", 0))
    J = _cal2(v.get("CG_LT_GL_GL04_LDWDBG5700TE1116", 0), v.get("CG_LT_GL_GL04_LDWDBG6200TE1141", 0))

    group['90'] = dict(A=A, B=B, C=C, D=D, E=E, F=F, G=G, H=H, I=I, J=J)

    A = _cal(v.get("CG_LT_GL_GL04_LGWDBG11500TE1249", 0), v.get("CG_LT_GL_GL04_LGWDBG10500TE1248", 0))
    B = _cal(v.get("CG_LT_GL_GL04_LGWDBG10500TE1237", 0), v.get("CG_LT_GL_GL04_LGWDBG9500TE1236", 0))
    C = _cal(v.get("CG_LT_GL_GL04_LGWDBG9500TE1225", 0), v.get("CG_LT_GL_GL04_LGWDBG8500TE1224", 0))
    D = _cal(v.get("CG_LT_GL_GL04_LGWDBG8500TE1209", 0), v.get("CG_LT_GL_GL04_LGWDBG8000TE1208", 0))
    E = _cal(v.get("CG_LT_GL_GL04_LGWDBG8000TE1195", 0), v.get("CG_LT_GL_GL04_LGWDBG7500TE1192", 0))
    F = _cal(v.get("CG_LT_GL_GL04_LGWDBG7500TE1179", 0), v.get("CG_LT_GL_GL04_LGWDBG7000TE1176", 0))
    G = _cal(v.get("CG_LT_GL_GL04_LGWDBG7000TE1169", 0), v.get("CG_LT_GL_GL04_LGWDBG7000TE1162", 0))
    H = _cal2(v.get("CG_LT_GL_GL04_LDWDBG5700TE1108", 0), v.get("CG_LT_GL_GL04_LDWDBG6200TE1132", 0))
    I = _cal2(v.get("CG_LT_GL_GL04_LDWDBG5700TE1109", 0), v.get("CG_LT_GL_GL04_LDWDBG6200TE1133", 0))
    J = _cal2(v.get("CG_LT_GL_GL04_LDWDBG5700TE1116", 0), v.get("CG_LT_GL_GL04_LDWDBG6200TE1134", 0))

    group['180'] = dict(A=A, B=B, C=C, D=D, E=E, F=F, G=G, H=H, I=I, J=J)

    A = _cal(v.get("CG_LT_GL_GL04_LGWDBG11500TE1253", 0), v.get("CG_LT_GL_GL04_LGWDBG11500TE1254", 0))
    B = _cal(v.get("CG_LT_GL_GL04_LGWDBG10500TE1241", 0), v.get("CG_LT_GL_GL04_LGWDBG10500TE1242", 0))
    C = _cal(v.get("CG_LT_GL_GL04_LGWDBG9500TE1225", 0), v.get("CG_LT_GL_GL04_LGWDBG9500TE1230", 0))
    D = _cal(v.get("CG_LT_GL_GL04_LGWDBG8500TE1213", 0), v.get("CG_LT_GL_GL04_LGWDBG8500TE1214", 0))
    E = _cal(v.get("CG_LT_GL_GL04_LGWDBG8000TE1199", 0), v.get("CG_LT_GL_GL04_LGWDBG8000TE1200", 0))
    F = _cal(v.get("CG_LT_GL_GL04_LGWDBG7500TE1183", 0), v.get("CG_LT_GL_GL04_LGWDBG7500TE1184", 0))
    G = _cal(v.get("CG_LT_GL_GL04_LGWDBG7000TE1167", 0), v.get("CG_LT_GL_GL04_LGWDBG7000TE1168", 0))
    H = _cal2(v.get("CG_LT_GL_GL04_LDWDBG5700TE1108", 0), v.get("CG_LT_GL_GL04_LDWDBG6200TE1133", 0))
    I = _cal2(v.get("CG_LT_GL_GL04_LDWDBG5700TE1109", 0), v.get("CG_LT_GL_GL04_LDWDBG6200TE1134", 0))
    J = _cal2(v.get("CG_LT_GL_GL04_LDWDBG5700TE1116", 0), v.get("CG_LT_GL_GL04_LDWDBG6200TE1141", 0))

    group['270'] = dict(A=A, B=B, C=C, D=D, E=E, F=F, G=G, H=H, I=I, J=J)

    A = _cal(v.get("CG_LT_GL_GL04_LGWDBG10500TE1245", 0), v.get("CG_LT_GL_GL04_LGWDBG10500TE1246", 0))
    B = _cal(v.get("CG_LT_GL_GL04_LGWDBG9500TE1233", 0), v.get("CG_LT_GL_GL04_LGWDBG9500TE1234", 0))
    C = _cal(v.get("CG_LT_GL_GL04_LGWDBG8500TE1221", 0), v.get("CG_LT_GL_GL04_LGWDBG8500TE1222", 0))
    D = _cal(v.get("CG_LT_GL_GL04_LGWDBG8500TE1213", 0), v.get("CG_LT_GL_GL04_LGWDBG8500TE1220", 0))
    E = _cal(v.get("CG_LT_GL_GL04_LGWDBG8000TE1203", 0), v.get("CG_LT_GL_GL04_LGWDBG8000TE1204", 0))
    F = _cal(v.get("CG_LT_GL_GL04_LGWDBG7500TE1187", 0), v.get("CG_LT_GL_GL04_LGWDBG7500TE1188", 0))
    G = _cal(v.get("CG_LT_GL_GL04_LGWDBG6500TE1157", 0), v.get("CG_LT_GL_GL04_LGWDBG6500TE1158", 0))
    H = _cal2(v.get("CG_LT_GL_GL04_LDWDBG5700TE1108", 0), v.get("CG_LT_GL_GL04_LDWDBG6200TE1133", 0))
    I = _cal2(v.get("CG_LT_GL_GL04_LDWDBG5700TE1109", 0), v.get("CG_LT_GL_GL04_LDWDBG6200TE1134", 0))
    J = _cal2(v.get("CG_LT_GL_GL04_LDWDBG5700TE1116", 0), v.get("CG_LT_GL_GL04_LDWDBG6200TE1141", 0))

    group['360'] = dict(A=A, B=B, C=C, D=D, E=E, F=F, G=G, H=H, I=I, J=J)

    return group


def get_lugang_qinshi_status_group_mock(dt: datetime, level: str):
    """
    炉缸侵蚀状态各个方向的数据
    :param v:
    :return:
    """
    group = {}
    G = 0
    if dt is None:
        dt = datetime.now()

    time = date_util.get_start_of_time_level(dt, level)
    import random
    random.seed(time.timestamp())

    round_scope = 10
    A = random.randint(1421 - round_scope, 1421 + round_scope)
    B = random.randint(1402 - round_scope, 1402 + round_scope)
    C = random.randint(1795 - round_scope, 1795 + round_scope)
    D = random.randint(1931 - round_scope, 1931 + round_scope)
    E = random.randint(2213 - round_scope, 2213 + round_scope)
    F = random.randint(2651 - round_scope, 2651 + round_scope)
    # G = random.randint(1421 - round_scope, 1421 + round_scope)
    H = random.randint(7951 - round_scope, 7951 + round_scope)
    I = random.randint(7834 - round_scope, 7834 + round_scope)
    J = random.randint(7962 - round_scope, 7962 + round_scope)

    group['90'] = dict(A=A, B=B, C=C, D=D, E=E, F=F, G=G, H=H, I=I, J=J)

    A = random.randint(1433 - round_scope, 1433 + round_scope)
    B = random.randint(1390 - round_scope, 1390 + round_scope)
    C = random.randint(1752 - round_scope, 1752 + round_scope)
    D = random.randint(1972 - round_scope, 1972 + round_scope)
    E = random.randint(2251 - round_scope, 2251 + round_scope)
    F = random.randint(2615 - round_scope, 2615 + round_scope)
    # G = random.randint(1421 - round_scope, 1421 + round_scope)
    H = random.randint(7848 - round_scope, 7848 + round_scope)
    I = random.randint(7946 - round_scope, 7946 + round_scope)
    J = random.randint(7931 - round_scope, 7931 + round_scope)

    group['180'] = dict(A=A, B=B, C=C, D=D, E=E, F=F, G=G, H=H, I=I, J=J)

    A = random.randint(1415 - round_scope, 1415 + round_scope)
    B = random.randint(1425 - round_scope, 1425 + round_scope)
    C = random.randint(1798 - round_scope, 1798 + round_scope)
    D = random.randint(1952 - round_scope, 1952 + round_scope)
    E = random.randint(2231 - round_scope, 2231 + round_scope)
    F = random.randint(2617 - round_scope, 2617 + round_scope)
    # G = random.randint(1421 - round_scope, 1421 + round_scope)
    H = random.randint(7852 - round_scope, 7852 + round_scope)
    I = random.randint(7918 - round_scope, 7918 + round_scope)
    J = random.randint(7851 - round_scope, 7851 + round_scope)

    group['270'] = dict(A=A, B=B, C=C, D=D, E=E, F=F, G=G, H=H, I=I, J=J)

    A = random.randint(1428 - round_scope, 1428 + round_scope)
    B = random.randint(1443 - round_scope, 1443 + round_scope)
    C = random.randint(1736 - round_scope, 1736 + round_scope)
    D = random.randint(1915 - round_scope, 1915 + round_scope)
    E = random.randint(2219 - round_scope, 2219 + round_scope)
    F = random.randint(2631 - round_scope, 2631 + round_scope)
    # G = random.randint(1421 - round_scope, 1421 + round_scope)
    H = random.randint(7916 - round_scope, 7916 + round_scope)
    I = random.randint(7895 - round_scope, 7895 + round_scope)
    J = random.randint(7812 - round_scope, 7812 + round_scope)

    group['360'] = dict(A=A, B=B, C=C, D=D, E=E, F=F, G=G, H=H, I=I, J=J)

    return group
