import os.path
import random
import threading
import time
from concurrent.futures import ThreadPoolExecutor, wait
from queue import Queue
from typing import Any

import numpy as np
import torch
import torchio
from torch.utils.data import DataLoader

from utils.CustomDataset import CustomDataset, KiPA2022Dataset
from utils.DataAugmentation import CustomPreprocessModule
from utils.DataUtils import get_all_data, get_patch_with_mask
from utils.LogUtil import my_logger


class KiPA2022DataLoaderGenerator:
    def __init__(self, all_data_path_list, configure):
        self.configure = configure
        if self.configure.if_debug:
            self.group_data_lists = all_data_path_list[0:5]
        else:
            self.group_data_lists = all_data_path_list
        self.train_data_loader = None
        self.valid_data_loader = None
        if self.configure.if_load_all_data:
            self.train_data_loader, self.valid_data_loader = self.put_data_loader()

    def get_data_loader(self):

        return self.put_data_loader()

    def put_data_loader(self):
        if self.train_data_loader is not None and self.valid_data_loader is not None:
            return self.train_data_loader, self.valid_data_loader
        my_logger.info(threading.current_thread().name + ' start to run put_data_loader function')
        train_index_list = random.sample(range(0, len(self.group_data_lists)),
                                         int(len(self.group_data_lists) * self.configure.train_val_rate))
        train_data_path_list = [self.group_data_lists[i] for i in train_index_list]
        valid_data_path_list = [self.group_data_lists[i] for i in range(len(self.group_data_lists)) if
                                i not in train_index_list]
        common_train_data_loader = DataLoader(
            dataset=KiPA2022Dataset(train_data_path_list,
                                    self.configure,
                                    ),
            batch_size=self.configure.batch_size, shuffle=True)
        valid_data_loader = DataLoader(
            dataset=KiPA2022Dataset(valid_data_path_list,
                                    self.configure,
                                    "valid", ),
            batch_size=self.configure.valid_batch_size, shuffle=True)
        return common_train_data_loader, valid_data_loader


class ParseDataLoaderGenerator:
    def __init__(self, all_sample_path_list, configure):
        if configure.if_debug is True:
            self.all_sample_path_list = all_sample_path_list[0:2]
        else:
            self.all_sample_path_list = all_sample_path_list
        self.group_num = 1
        self.configure = configure
        train_index_list = range(int(len(self.all_sample_path_list) * configure.train_val_rate))
        train_sample_path_list = [self.all_sample_path_list[i] for i in train_index_list]
        valid_sample_path_list = [self.all_sample_path_list[i] for i in range(len(self.all_sample_path_list)) if
                                  i not in train_index_list]
        self.train_data_loader = Parse2022DataLoader(train_sample_path_list, self.configure)
        self.valid_data_loader = Parse2022DataLoader(valid_sample_path_list, self.configure, "valid")

    def get_data_loader(self):
        return self.train_data_loader, self.valid_data_loader


class Parse2022DataLoader(object):
    def __init__(self, sample_path_list, configure, dataset_type="train", ):
        self.image_list = []
        self.label_list = []
        self.current_sample_index = 0
        self.add_threshold = configure.batch_size * 5
        self.configure = configure
        self.sample_path_list = [sample_path_list[i] for i in
                                 random.sample(range(0, len(sample_path_list)), len(sample_path_list))]

        self.dataset_type = dataset_type
        # self.queue_max_size = 2  # The number of original data, not cropped data
        self.myThreadPool = ThreadPoolExecutor(max_workers=2)
        self.queue_operator_mutex = threading.Lock()
        self.sample_index_mutex = threading.Lock()
        self.img_queue = Queue()
        self.label_queue = Queue()
        self.add_data_to_queue()
        self.normal_transform = torchio.ZNormalization()
        self.batch_size = configure.batch_size
        if configure.if_data_augmentation:
            self.preprocessModule = CustomPreprocessModule(configure.patch_size, configure.aug_probability)

    def __len__(self):
        return 400

    def __iter__(self):
        self.current_sample_index = 0
        self.sample_path_list = [self.sample_path_list[i] for i in
                                 random.sample(range(0, len(self.sample_path_list)), len(self.sample_path_list))]
        return self

    def get_next_batch(self):
        tmp_img_list = []
        tmp_label_list = []
        for i in range(self.batch_size):
            if self.configure.if_data_augmentation and self.dataset_type == "train":
                tmp_img = np.expand_dims(np.expand_dims(self.img_queue.get(), axis=0), axis=0)
                tmp_label = np.expand_dims(np.expand_dims(self.label_queue.get(), axis=0), axis=0)
                # tmp_img, tmp_label = self.preprocessModule.process(tmp_img, tmp_label)
                tmp_img = self.normal_transform(np.squeeze(tmp_img, axis=0))
                tmp_img_list.append(torch.from_numpy(tmp_img).unsqueeze(0))
                tmp_label_list.append(torch.from_numpy(tmp_label))
            else:
                tmp_img_list.append(torch.from_numpy(self.img_queue.get()).unsqueeze(0).unsqueeze(0))
                tmp_label_list.append(torch.from_numpy(self.label_queue.get()).unsqueeze(0).unsqueeze(0))
        return torch.cat(tmp_img_list, 0), torch.cat(tmp_label_list, 0)

    def __next__(self):
        try:
            # one epoch finish
            if self.img_queue.qsize() < self.batch_size and self.label_queue.qsize() < self.batch_size:
                self.sample_index_mutex.acquire()
                if self.current_sample_index >= len(self.sample_path_list):
                    self.sample_index_mutex.release()
                    raise StopIteration()
                else:
                    self.sample_index_mutex.release()
                    future = self.myThreadPool.submit(self.add_data_to_queue)
                    img_batch, label_batch = self.get_next_batch()
                    return img_batch, label_batch
            # The rest is enough
            elif self.img_queue.qsize() >= self.batch_size and self.label_queue.qsize() >= self.batch_size:
                img_batch, label_batch = self.get_next_batch()
                if self.img_queue.qsize() < self.add_threshold:
                    future = self.myThreadPool.submit(self.add_data_to_queue)
                    wait([future])
                return img_batch, label_batch
        except KeyboardInterrupt:
            print("catch the control c order!")
            self.myThreadPool.shutdown()

        # my_logger.info(str(len(tmp_data_list)) + " patches has been added to list")

    def add_data_to_queue(self):
        self.sample_index_mutex.acquire()
        sample_index = self.current_sample_index
        self.current_sample_index = self.current_sample_index + 1
        my_logger.info(
            self.dataset_type + ": The current sample index is " + str(self.current_sample_index) + "/" + str(
                len(self.sample_path_list)))
        self.sample_index_mutex.release()

        sample_name = os.path.basename(self.sample_path_list[sample_index])
        my_logger.info(self.dataset_type + ": " + threading.current_thread().name + " start to read " + sample_name)
        tmp_image_list, tmp_label_list = get_patch_with_mask(self.sample_path_list[sample_index],
                                                             self.configure.patch_size,
                                                             self.configure.crop_method)
        self.queue_operator_mutex.acquire()
        my_logger.info(
            self.dataset_type + ": " + threading.current_thread().name + " start to put " + sample_name + " to queue")
        for tmp_img, tmp_label in zip(tmp_image_list, tmp_label_list):
            self.img_queue.put(tmp_img)
            self.label_queue.put(tmp_label)
        self.queue_operator_mutex.release()


class MyDataLoaderGenerator:
    def __init__(self, all_data_path_list, configure):
        self.configure = configure
        if self.configure.if_debug:
            self.group_data_lists = all_data_path_list[0:5]
        else:
            self.group_data_lists = all_data_path_list
        self.group_num = 1
        self.train_data_loader = None
        self.valid_data_loader = None
        if self.configure.if_load_all_data:
            self.train_data_loader, self.valid_data_loader = self.put_data_loader()

    def get_data_loader(self):

        return self.put_data_loader()

    def put_data_loader(self):
        if self.train_data_loader is not None and self.valid_data_loader is not None:
            self.train_data_loader.restart()
            self.valid_data_loader.restart()
            return self.train_data_loader, self.valid_data_loader
        my_logger.info(threading.current_thread().name + ' start to run put_data_loader function')
        train_index_list = random.sample(range(0, len(self.group_data_lists)),
                                         int(len(self.group_data_lists) * self.configure.train_val_rate))
        train_data_path_list = [self.group_data_lists[i] for i in train_index_list]
        valid_data_path_list = [self.group_data_lists[i] for i in range(len(self.group_data_lists)) if
                                i not in train_index_list]
        common_train_data_loader = DataLoader(
            dataset=CustomReadDataset(train_data_path_list,
                                      self.configure.crop_method,
                                      ),
            batch_size=self.configure.batch_size, shuffle=True)
        valid_data_loader = DataLoader(
            dataset=CustomReadDataset(valid_data_path_list,
                                      self.configure.crop_method,
                                      "valid", ),
            batch_size=self.configure.valid_batch_size, shuffle=True)
        return common_train_data_loader, valid_data_loader


class MyDataLoader:
    def __init__(self, dataset, batch_size, shuffle):
        self.DataSet = dataset
        self.stop = len(self.DataSet)
        self.index_list = [i for i in range(0, len(self.DataSet))]
        self.batch_size = batch_size
        self.shuffle = shuffle
        self.current = 0
        if self.shuffle:
            pass  # shuffle

    def __len__(self):
        return self.stop

    def __iter__(self):
        return self

    def restart(self):
        self.current = 0

    def __next__(self) -> Any:
        if self.current < self.stop:
            img, label = self.DataSet.__getitem__(self.current)
            self.current += 1
            # The batch size must be 1
            img = img.unsqueeze(0)
            label = label.unsqueeze(0)
            return img, label
        raise StopIteration()


class TestDataLoader:
    def __init__(self, all_data_path_list, configure):
        self.group_data_lists = all_data_path_list[0:5]
        self.true_queue_size = 0
        self.current_group_index = 0

        self.configure = configure

        self.group_num = 1

        # if not self.train_data_loader_queue.full() and not self.valid_data_loader_queue.full():
        #     future = self.myThreadPool.submit(self.put_data_loader)

    def get_data_loader(self):
        self.current_group_index = int((self.current_group_index + 1) % self.group_num)

        return self.put_data_loader()

    def put_data_loader(self):
        my_logger.info(threading.current_thread().name + ' start to run put_data_loader function')
        train_index_list = random.sample(range(0, len(self.group_data_lists)),
                                         int(len(self.group_data_lists) * self.configure.train_val_rate))
        train_data_path_list = [self.group_data_lists[i] for i in train_index_list]
        valid_data_path_list = [self.group_data_lists[i] for i in range(len(self.group_data_lists)) if
                                i not in train_index_list]
        common_train_data_loader = DataLoader(
            dataset=CustomReadDataset(train_data_path_list, self.configure),
            batch_size=self.configure.batch_size,
            pin_memory=True,
            shuffle=True)
        valid_data_loader = DataLoader(
            dataset=CustomReadDataset(valid_data_path_list, self.configure, "valid"),
            batch_size=self.configure.valid_batch_size,
            pin_memory=True,
            shuffle=True)
        return common_train_data_loader, valid_data_loader


class SingleDataLoader:
    """
    适用于可以一次性全部装进内部的小型数据集
    """

    def __init__(self, all_data_path_list, configure):
        self.group_data_lists = all_data_path_list
        self.img_list, self.label_list = get_all_data(self.group_data_lists, configure.patch_size,
                                                      configure.crop_method, configure.if_mask)

        self.true_queue_size = 0
        self.current_group_index = 0
        self.data_loader_pair = None
        self.class_num = configure.class_num
        self.configure = configure

        self.group_num = 1
        if self.class_num == 1:
            for i in range(len(self.img_list)):
                self.img_list[i] = torch.from_numpy(np.expand_dims(self.img_list[i], axis=0))
                self.label_list[i] = torch.from_numpy(np.expand_dims(self.label_list[i], axis=0)).long()
        elif self.class_num >= 2:
            for i in range(len(self.img_list)):
                self.img_list[i] = torch.from_numpy(np.expand_dims(self.img_list[i], axis=0))
                tmp = torch.nn.functional.one_hot(torch.from_numpy(self.label_list[i]).long(),
                                                  num_classes=configure.class_num)
                tmp = tmp.permute(3, 0, 1, 2)
                self.label_list[i] = tmp

    def regenerate_data_loader(self):
        if self.img_list is not None:
            del self.img_list
        if self.label_list is not None:
            del self.label_list
        self.img_list, self.label_list = get_all_data(self.group_data_lists, self.configure.patch_size,
                                                      self.configure.crop_method, self.configure.if_mask)
        if self.class_num == 1:
            for i in range(len(self.img_list)):
                self.img_list[i] = torch.from_numpy(np.expand_dims(self.img_list[i], axis=0))
                self.label_list[i] = torch.from_numpy(np.expand_dims(self.label_list[i], axis=0)).long()
        elif self.class_num >= 2:
            for i in range(len(self.img_list)):
                self.img_list[i] = torch.from_numpy(np.expand_dims(self.img_list[i], axis=0))
                tmp = torch.nn.functional.one_hot(torch.from_numpy(self.label_list[i]).long(),
                                                  num_classes=self.configure.class_num)
                tmp = tmp.permute(3, 0, 1, 2)
                self.label_list[i] = tmp
        self.put_data_loader()

    def get_data_loader(self):
        self.current_group_index = int((self.current_group_index + 1) % self.group_num)
        if self.data_loader_pair is None:
            self.put_data_loader()
            return self.data_loader_pair
        else:
            return self.data_loader_pair

    def put_data_loader(self):
        my_logger.info(threading.current_thread().name + ' start to run put_data_loader function')
        train_index_list = random.sample(range(0, len(self.img_list)),
                                         int(len(self.img_list) * self.configure.train_val_rate))
        valid_index_list = [i for i in range(0, len(self.img_list)) if
                            i not in train_index_list]
        my_logger.info("The index of train list is:")
        print(train_index_list)
        my_logger.info("start constructing the " + "data loader of train ......")
        common_train_data_loader = DataLoader(
            dataset=CustomDataset([self.img_list[i] for i in train_index_list],
                                  [self.label_list[i] for i in train_index_list]),
            batch_size=self.configure.batch_size, shuffle=True)
        my_logger.info("start constructing the " + "data loader of valid ......")
        valid_data_loader = DataLoader(
            dataset=CustomDataset([self.img_list[i] for i in valid_index_list],
                                  [self.label_list[i] for i in valid_index_list],
                                  "valid"),
            batch_size=self.configure.valid_batch_size, shuffle=True)
        self.data_loader_pair = common_train_data_loader, valid_data_loader


class SingleTransDataLoader:
    """
    使用torch.io来作为数据增强
    使用此类来作为data loader时，内存
    """

    def __init__(self, all_data_path_list, configure, type="train"):
        self.type = type
        self.group_num = 1
        self.subject_list = []
        self.group_data_lists = all_data_path_list
        self.true_queue_size = 0
        self.current_group_index = 0
        self.data_loader_pair = None
        self.class_num = configure.class_num
        self.configure = configure
        self.group_num = 1
        self.queue = None
        self.samples_per_volume = 2
        self.queue_max_size = len(self.group_data_lists) * self.samples_per_volume
        self.sampler = torchio.sampler.UniformSampler(patch_size=configure.patch_size)
        self.normal_transform = torchio.ZNormalization()
        self.transform2 = torchio.OneOf({
            torchio.RandomAffine(scales=(0.9, 1.2), degrees=15): 0.25,  # 随机的旋转
            torchio.RandomBlur(): 0.25,  # 使用随机大小的高斯核模糊图像
            torchio.RandomGamma(): 0.25,  # 随机修改图像的对比度
            torchio.RandomBiasField(): 0.25  # 添加随机偏场伪影
        }, p=configure.aug_probability)
        self.compose = torchio.Compose([self.transform2])

    def get_data_loader(self):
        my_logger.info("Start to read original data")
        self.img_list, self.label_list = get_all_data(self.group_data_lists, self.configure.patch_size,
                                                      self.configure.crop_method, self.configure.if_mask)
        my_logger.info("Start to make subject list")
        if self.class_num == 1:
            for i in range(len(self.img_list)):
                subject = torchio.Subject(
                    train_img=torchio.ScalarImage(
                        tensor=self.normal_transform(torch.from_numpy(np.expand_dims(self.img_list[i], axis=0)))),
                    label=torchio.ScalarImage(
                        tensor=torch.from_numpy(np.expand_dims(self.label_list[i], axis=0).astype(np.long)))
                )
                self.subject_list.append(subject)
                # self.img_list[i] = torch.from_numpy()
                # self.label_list[i] = torch.from_numpy(np.expand_dims(self.label_list[i], axis=0)).long()
        elif self.class_num >= 2:
            for i in range(len(self.img_list)):
                self.img_list[i] = torch.from_numpy(np.expand_dims(self.img_list[i], axis=0))
                tmp = torch.nn.functional.one_hot(torch.from_numpy(self.label_list[i]).long(),
                                                  num_classes=self.class_num)
                tmp = tmp.permute(3, 0, 1, 2)
                self.label_list[i] = tmp

        my_logger.info("Start to initialize queue object")
        if self.type == "train":
            subjectDataset = torchio.SubjectsDataset(self.subject_list,
                                                     transform=self.compose)
        else:
            subjectDataset = torchio.SubjectsDataset(self.subject_list)
        self.queue = torchio.Queue(
            subjectDataset,
            self.queue_max_size,
            self.samples_per_volume,
            self.sampler,
            num_workers=2,
            verbose=True,

        )
        my_logger.info("Start to initialize pytorch data loader")
        # The length of self.queue depends on the number of train data(such as XunFei has 19 samples)
        dataloader = torch.utils.data.DataLoader(
            self.queue,
            batch_size=self.configure.batch_size,
            num_workers=0,  # The official document said that this param must be 0
            drop_last=True
        )
        return dataloader


class MultiThreadDataLoader:
    def __init__(self, all_data_path_list, configure, queue_max_size=2):
        self.myThreadPool = ThreadPoolExecutor(max_workers=queue_max_size + 1)
        self.train_data_loader_queue = Queue(maxsize=queue_max_size)
        self.valid_data_loader_queue = Queue(maxsize=queue_max_size)
        self.group_data_lists = []
        self.group_size = 3  # The number of original data, not cropped data
        self.true_queue_size = 0
        self.current_group_index = 0
        self.queue_size_mutex = threading.Lock()
        self.group_index_mutex = threading.Lock()
        self.configure = configure
        # divide all samples into several groups
        for i in range(int(np.ceil(len(all_data_path_list) / self.group_size))):
            start = i * self.group_size
            end = len(all_data_path_list) if (i + 1) * self.configure.group_size >= len(all_data_path_list) else (
                                                                                                                         i + 1) * self.configure.group_size
            self.group_data_lists.append(all_data_path_list[start:end])
        if len(self.group_data_lists[-1]) < configure.group_size:
            self.group_data_lists = self.group_data_lists[0:-1]
        self.group_num = len(self.group_data_lists)
        print("good")

        # if not self.train_data_loader_queue.full() and not self.valid_data_loader_queue.full():
        #     future = self.myThreadPool.submit(self.put_data_loader)

    def get_data_loader(self):
        if self.true_queue_size == 0 and self.train_data_loader_queue.empty():
            self.queue_size_mutex.acquire()
            self.true_queue_size += 1
            self.queue_size_mutex.release()
            self.group_index_mutex.acquire()
            self.current_group_index = int((self.current_group_index + 1) % self.group_num)
            self.group_index_mutex.release()
            self.put_data_loader(self.current_group_index)

        while self.true_queue_size == self.train_data_loader_queue.maxsize and self.train_data_loader_queue.empty():
            time.sleep(0.5)
            my_logger.info("sleep 0.5s")

        self.queue_size_mutex.acquire()
        self.true_queue_size -= 1
        self.queue_size_mutex.release()
        train_data_loader = self.train_data_loader_queue.get()
        valid_data_loader = self.valid_data_loader_queue.get()
        print(threading.current_thread().name)
        while self.true_queue_size < self.train_data_loader_queue.maxsize:
            self.queue_size_mutex.acquire()
            self.true_queue_size += 1
            if self.true_queue_size > self.train_data_loader_queue.maxsize:
                self.queue_size_mutex.release()
                break
            self.group_index_mutex.acquire()
            self.current_group_index = int((self.current_group_index + 1) % self.group_num)
            future = self.myThreadPool.submit(self.put_data_loader, self.current_group_index)
            self.group_index_mutex.release()
            self.queue_size_mutex.release()

        return train_data_loader, valid_data_loader

    def put_data_loader(self, current_group_index_buffer):
        my_logger.info(
            threading.current_thread().name + ' start to run put_data_loader function, and the group index is ' + str(
                self.current_group_index))
        train_index_list = random.sample(range(0, len(self.group_data_lists[self.current_group_index])),
                                         int(len(self.group_data_lists[
                                                     self.current_group_index]) * self.configure.train_val_rate))
        train_data_path_list = [self.group_data_lists[self.current_group_index][i] for i in train_index_list]
        valid_data_path_list = [i for i in self.group_data_lists[self.current_group_index] if
                                i not in train_data_path_list]
        my_logger.info("start constructing the " + str(current_group_index_buffer) + " data loader of train ......")
        common_train_data_loader = DataLoader(
            dataset=CustomReadDataset(train_data_path_list, self.configure),
            batch_size=self.configure.batch_size,
            # pin_memory=True,
            shuffle=True)
        my_logger.info("start constructing the " + str(current_group_index_buffer) + " data loader of valid ......")
        valid_data_loader = DataLoader(
            dataset=CustomReadDataset(valid_data_path_list, self.configure, "valid"),
            batch_size=self.configure.valid_batch_size,
            # pin_memory=True,
            shuffle=True)
        self.train_data_loader_queue.put(common_train_data_loader)
        self.valid_data_loader_queue.put(valid_data_loader)
