from concurrent.futures import ThreadPoolExecutor
import json
import os
import threading

# 创建锁
lock = threading.Lock()
# import matplotlib.pyplot as plt
import torch

from Efficientnet.distance import calculate_confidence_interval
from model import densenet121
from PIL import Image
from torchvision import transforms
from concurrent.futures import ThreadPoolExecutor, wait
import pandas as pd
import numpy as np

# create model
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
model = densenet121(num_classes=5).to(device)
# load model weights
model_weight_path = "../model_weights/Densenet/new_weights/model-29.pth"
model.load_state_dict(torch.load(model_weight_path, map_location=device))
model.eval()

# read class_indict
json_path = 'class_indices.json'
assert os.path.exists(
    json_path), "file: '{}' dose not exist.".format(json_path)
with open(json_path, "r") as f:
    class_indict = json.load(f)

# read img_dir
path = r"../data/four_data"  # 文件夹目录
dirs = os.listdir(path)  # 得到文件夹下的所有文件名称
imgs_source = []
imgs_Dclose1 = []
imgs_Dclose2 = []
imgs_Ddistance = []
for dir in dirs:  # 遍历文件夹
    # print(dir)  # 打印结果
    path_flower = os.path.join(path, dir)
    if os.path.isdir(path_flower):
        # print(path_flower)
        files = os.listdir(path_flower)
        for file in files:
            img_path = os.path.join(path_flower, file)
            # print(img_path)
            if dir in "source":
                imgs_source.append(img_path)
            elif dir in "Dclose1":
                imgs_Dclose1.append(img_path)
            elif dir in "Dclose2":
                imgs_Dclose2.append(img_path)
            elif dir in "Ddistance":
                imgs_Ddistance.append(img_path)

img_size = {"s": [300, 384],  # train_size, val_size
            "m": [384, 480],
            "l": [384, 480]}
num_model = "s"

data_transform = transforms.Compose(
    [transforms.Resize(img_size[num_model][1]),
     transforms.CenterCrop(img_size[num_model][1]),
     transforms.ToTensor(),
     transforms.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5])])

prob_source = []
prob_Dclose1 = []
prob_Dclose2 = []
prob_Ddistance = []


def main(img_path):
    # load image
    assert os.path.exists(
        img_path), "file: '{}' dose not exist.".format(img_path)
    try:
        img = Image.open(img_path)
        # plt.imshow(img)
        # [N, C, H, W]
        dir = os.path.basename(os.path.dirname(img_path))
        img = data_transform(img)
        # expand batch dimension
        img = torch.unsqueeze(img, dim=0)

        with torch.no_grad():
            # predict class
            output = torch.squeeze(model(img.to(device))).cpu()
            predict = torch.softmax(output, dim=0)
            predict_cla = torch.argmax(predict).numpy()

        print_res = "class: {}   prob: {:.3}".format(
            class_indict[str(predict_cla)], predict[predict_cla].numpy())
        # plt.title(print_res)
        # for i in range(len(predict)):
        # print("class: {:10}   prob: {:.3}".format(class_indict[str(i)],
        #                                           predict[i].numpy()))
        # print(str(i), dir)

        pre_five = [element.numpy() for element in predict]
        pre_five = np.sort(pre_five)[::-1]
        pre = pre_five[:3]
        result_string = ','.join(map(str, pre))
        with lock:
            if "source" in dir:
                prob_source.append(result_string)
            elif "Dclose1" in dir:
                prob_Dclose1.append(result_string)
            elif "Dclose2" in dir:
                prob_Dclose2.append(result_string)
            elif "Ddistance" in dir:
                prob_Ddistance.append(result_string)

            # with lock:
            #     if str(i) in "2" and dir in "source":
            #         prob_source.append(predict[i].numpy())
            #     elif str(i) in "2" and dir in "Dclose1":
            #         prob_Dclose1.append(predict[i].numpy())
            #     elif str(i) in "2" and dir in "Dclose2":
            #         prob_Dclose2.append(predict[i].numpy())
            #     elif str(i) in "2" and dir in "Ddistance":
            #         prob_Ddistance.append(predict[i].numpy())

        # plt.show()
    except:
        print("pre error: " + img_path)


def sortDataAndConInterval(data, type):
    # 对列表进行升序排序
    data = sorted(data, key=lambda x: x, reverse=False)
    print("Sorted and Top Three ", type, ": \n", data)
    # Calculate 95% confidence interval for population mean weight using t-distribution
    a_t, b_t = calculate_confidence_interval(data, method='t')
    # Calculate 95% confidence interval for population mean weight using normal distribution
    c_norm, d_norm = calculate_confidence_interval(data, method='normal')
    print("T-Distribution Interval:", a_t, b_t)
    print("Normal Distribution Interval:", c_norm, d_norm)
    return data


def is_file_exists(file_name):
    # 获取当前目录
    current_directory = os.getcwd()

    # 构建文件的完整路径
    file_path = os.path.join(current_directory, file_name)

    # 判断文件是否存在
    if os.path.exists(file_path):
        print(f"The file {file_name} exists in the current directory.")
        return True
    else:
        print(f"The file {file_name} does not exist in the current directory.")
        return False


def get_data_from_path(filename_path, imgs_source, imgs_Dclose2, imgs_Ddistance, filename):
    with ThreadPoolExecutor(10) as t:
        print("Running in Roses")
        futures = [t.submit(main, img_path=img) for img in imgs_Ddistance]
        futures1 = [t.submit(main, img_path=img) for img in imgs_source]
        futures3 = [t.submit(main, img_path=img) for img in imgs_Dclose2]

        completed, not_completed = wait(futures)
        completed1, not_completed1 = wait(futures1)
        completed3, not_completed3 = wait(futures3)
        print("End")
        print("starting process")
        # 处理已完成的任务
        for future in completed:
            pass
        # 处理未完成的任务
        for future in not_completed:
            # 处理未完成的任务，如果有需要的话
            print("有错误")

        # 处理已完成的任务
        for future in completed1:
            pass
        # 处理未完成的任务
        for future in not_completed1:
            # 处理未完成的任务，如果有需要的话
            print("有错误")

        # 处理已完成的任务
        for future in completed3:
            pass
        # 处理未完成的任务
        for future in not_completed3:
            # 处理未完成的任务，如果有需要的话
            print("有错误")

        # 将数据集合放入一个列表中
        datasets = [prob_source, prob_Dclose2, prob_Ddistance]

        # 创建DataFrame，并设置列标签
        df = pd.DataFrame(datasets).transpose()
        df.columns = ['prob_source', 'prob_Dclose2', 'prob_Ddistance']
        os.makedirs(f'./{filename_path}', exist_ok=True)
        # 保存DataFrame为xls文件
        df.to_excel(f'./{filename_path}/{filename}.xlsx', index=False)

        prob_source.clear()
        prob_Dclose2.clear()
        prob_Ddistance.clear()
