"""
Use the default sampler:
    if shuffle:
        sampler = RandomSampler(dataset, generator=generator)
    else:
        sampler = SequentialSampler(dataset)

Use the default 'DataLoader'.
"""


from torch.utils.data import DataLoader, TensorDataset
from torch import tensor
import torch
import numpy as np


class Loader(object):

    dataset: tuple
    batch_size: int
    shuffle: bool

    def __init__(self, dataset: tuple, batch_size: int = 12, shuffle: bool = False, tensor_loader: bool = True) -> None:
        if tensor_loader:
            [self.x_train, self.y_train, self.x_valid, self.y_valid, self.x_test] = [tensor(item, dtype=torch.float32) for item in dataset]
            self.train_loader = DataLoader(TensorDataset(self.x_train, self.y_train), batch_size=batch_size, shuffle=shuffle)
            self.valid_loader = DataLoader(TensorDataset(self.x_valid, self.y_valid), batch_size=batch_size, shuffle=False)
            self.loader = (self.train_loader, self.valid_loader)
        else:
            [self.x_train, self.y_train, self.x_valid, self.y_valid, self.x_test] = [np.squeeze(item) for item in dataset]
            self.x_test = self.x_test.reshape(1, -1)
            self.loader = (self.x_train, self.y_train, self.x_valid, self.y_valid, self.x_test)
