import os

from tqdm import tqdm

from config.config import config
from graph.JsonWR import write_groups_to_json
from group import Group
from utils.feature import calculate_similarity
from utils.interval import partition_interval
from utils.read import read_pkl


def set_value(groups, row, item, simi):
    col = "%s_interval" % config["handler"]["attr"]
    index = int(simi / 0.0001)  # simi左闭右开区间
    group = groups.get(row[col].split("_")[0] + ":" + item[col].split("_")[0])
    group.simiSumArray[index] += simi
    group.countArray[index] += 1

    group = groups.get(item[col].split("_")[0] + ":" + row[col].split("_")[0])
    group.simiSumArray[index] += simi
    group.countArray[index] += 1


def statistic(featuresData, groupsT, groupsF, groupsTF):
    visited = []
    for index, row in featuresData.iterrows():
        if len(visited) != 0:
            for item in visited:
                simi = calculate_similarity(row["feature"], item["feature"])
                if 1.0 - simi <= 0.001:  # 完全一样的图片不纳入统计
                    continue
                # 记录结果
                if row["mask"] == 1 and item["mask"] == 1:
                    # 均不带口罩
                    set_value(groupsF, row, item, simi)
                elif row["mask"] == 2 and item["mask"] == 2:
                    # 均戴口罩
                    set_value(groupsT, row, item, simi)
                else:
                    # 一个戴一个不戴
                    set_value(groupsTF, row, item, simi)
        visited.append(row)


def init_group():
    interval = partition_interval()
    groupMap = {}
    for left in interval:
        for right in interval:
            group = Group(left.split("_")[0], right.split("_")[0])
            key = left.split("_")[0] + ":" + right.split("_")[0]
            groupMap[key] = group
    return groupMap


def main_single():
    # 1、加载数据
    projectPath = config["project_path"]
    dataDirPath = "%s/%s" % (projectPath, config["cluster"]["attrs_select_result_dir"])
    gtDataFileName = os.path.split(config["input"]["gt_data_file_path"])[1].split(".")[0]
    selectMode = config["cluster"]["select_mode"]
    dataPath = r"%s/%s_select_%s_%s.pkl" % (dataDirPath, gtDataFileName, selectMode, config["handler"]["attr"])
    data = read_pkl(dataPath)
    # print(data.info())
    # print(data[["blur", "blur_interval"]])
    data = data.dropna(axis=0)

    # 2、统计
    # 2.1 初始化Group
    groupsT = init_group()  # 均戴口罩
    groupsF = init_group()  # 均不戴口
    groupsTF = init_group()  # 一个戴一个不戴
    personIds = data["gt_person_id"]
    personIds = personIds.drop_duplicates()
    print("同人统计.....")
    print("personIds size: %d" % len(personIds))
    with tqdm(total=len(personIds)) as bar:
        for personId in personIds:
            personData = data[data["gt_person_id"] == personId]
            # print(personData["mask"])
            # print("allFeaturesData size: %d" % len(personData))
            # 统计信息
            statistic(personData, groupsT, groupsF, groupsTF)
            bar.update(1)

    # 存储group
    print("统计结果存储路径:%s/%s" % (config["project_path"], config["cluster"]["attrs_select_result_dir"]))
    write_groups_to_json("data/group_%s_%s.json" % ("single", "T"), groupsT)
    write_groups_to_json("data/group_%s_%s.json" % ("single", "F"), groupsF)
    write_groups_to_json("data/group_%s_%s.json" % ("single", "TF"), groupsTF)


if __name__ == '__main__':
    main_single()
