from torch.utils.data import Dataset
import numpy as np
from pathlib import Path
import pickle

def unpickle(file):
    with open(file, 'rb') as fo:
        dict = pickle.load(fo, encoding='bytes')
    return dict

class CIFARDataset(Dataset):
    def __init__(self, set_path, block_num=5, mode="train"):
        super().__init__()

        self.base_path = Path(set_path)
        self.block_num = block_num

        self.meta = self._load_meta()

        if mode == "train":
            self.data, self.labels = self._load_train_data()

        else:
            self.data, self.labels = self._load_test_data()

    def _load_meta(self):
        _path = self.base_path / "batches.meta"
        r = unpickle(_path)[b'label_names']
        r = [it.decode() for it in r]
        return r

    def _load_train_data(self):
        datas = []
        labels = []
        for i in range(self.block_num):
            d = unpickle(self.base_path / ("data_batch_{}".format(i + 1)))
            datas.append(d[b"data"])
            labels.append(d[b"labels"])

        return np.vstack(datas), np.concatenate(labels)
    

    def _load_test_data(self):
        d = unpickle(self.base_path / "test_batch")
        return d[b"data"], d[b"labels"]

    

    def __len__(self):
        return self.data.shape[0]
    

    def __getitem__(self, idx):
        img = self.data[idx].reshape(3, 32, 32) / 255.0
        img = img.astype(np.float32)
        return img, self.labels[idx]