import torch.utils.data as data
from PIL import Image
from torchvision.transforms import Compose
import os
import numpy as np

class SingleDataSet(data.Dataset):
    def __init__(self):
        self.data = []

    def __len__(self):
        return len(self.data)

    def __getitem__(self, index):
        return NotImplementedError

    def set_data(self, new_data: list):
        self.data = new_data

    def add_data(self, obj):
        self.data.append(obj)

    def clear_all(self):
        self.data = []


class SingleImageDataSet(SingleDataSet):

    def __init__(self):
        super(SingleImageDataSet, self).__init__()
        self.transformers = Compose([])

    def push_image_folder(self, folder_path: str):
        for root, dirs, files in os.walk(folder_path):
            for file in files:
                self.push_single_image(os.path.join(root, file))

    def push_single_image(self, image_path: str):
        self.add_data(image_path)

    def read_image_data(self, image_path: str):
        return Image.open(image_path).convert('RGB')

    def __getitem__(self, index):
        image_path = self.data[index]
        image_data = self.read_image_data(image_path)
        result = self.transformers(image_data)
        return result

    def set_image_transformer(self, transformer: Compose):
        self.transformers = transformer


class SingleTextDataSet(SingleDataSet):
    def push_text(self, new_text: str):
        self.add_data(new_text)

    def push_texts(self, texts: list):
        for item in texts:
            self.add_data(item)

    def __getitem__(self, index):
        return self.data[index]


class SingleNumberDataSet(SingleDataSet):
    def __getitem__(self, index):
        return self.data[index]

    def push_number(self, number):
        self.add_data(number)


class MultiDataSet(data.Dataset):
    def __init__(self):
        self._datasets = []
        self.__length = 0

    def append(self, new_dataset: SingleDataSet):
        if self.__length > 0 and self.__length != new_dataset.__len__():
            raise LengthNotMatchError(new_dataset.__len__(), self.__length)
            return None
        self.__length = new_dataset.__len__()
        self._datasets.append(new_dataset)

    def __len__(self):
        return self.__length

    def __getitem__(self, index):
        temp_data = []
        for item in self._datasets:
            temp_data.append(item.__getitem__(index))
        return temp_data


class LengthNotMatchError(AttributeError):
    def __init__(self, new_length: int, old_length: int):
        self.new_length = new_length
        self.old_length = old_length

    def __str__(self):
        print("新添加的DataSet长度(%d)与MultiDataSet中已有DataSet长度(%d)不一致" % (self.new_length, self.old_length))


class SingleOneHotDataSet(SingleDataSet):
    def __init__(self, dimension: int):
        self.dimension = dimension

    def __getitem__(self, index):
        return self.data[index]

    def add_item(self, number: int):
        self.add_data(self.__one_hot__(number))

    def __one_hot__(self, number: int):
        result = []
        for index in range(self.dimension):
            if index != number:
                result.append(0)
            else:
                 result.append(1)
        return result
