import torch
from torch.utils.data import Dataset
import torchvision.transforms as transforms
import torchvision.datasets as dsets
from datasets.augment.randaugment import RandomAugment
from datasets.augment.autoaugment_extra import CIFAR10Policy
from datasets.augment.cutout import Cutout
from datasets.utils_algo import generate_instance_dependent_candidate_labels
from global_var import DATA_ROOT


def load_cifar10(ds, batch_size, annotation_type=None, noisy_rate=None, partial_rate=None, has_eval_train_loader=False, device=None, split_seed=40):
    test_transform = transforms.Compose(
            [transforms.ToTensor(),
            transforms.Normalize((0.4914, 0.4822, 0.4465), (0.247, 0.243, 0.261))])
    
    temp_train = dsets.CIFAR10(root=DATA_ROOT, train=True, download=True, transform=transforms.ToTensor())
    temp_valid = dsets.CIFAR10(root=DATA_ROOT, train=True, transform=test_transform)
    data_size = len(temp_train)
    train_dataset, _ = torch.utils.data.random_split(temp_train,
                                                                    [int(data_size * 0.9), data_size - int(data_size * 0.9)],
                                                                    torch.Generator().manual_seed(split_seed))
    train_dataset_for_generation, valid_dataset = torch.utils.data.random_split(temp_valid,
                                                                    [int(data_size * 0.9), data_size - int(data_size * 0.9)],
                                                                    torch.Generator().manual_seed(split_seed))

    full_train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=len(train_dataset), shuffle=False, num_workers=0)
    for data, targets in full_train_loader:
        traindata, trainlabels = data, targets.long()
    
    train_loader_for_generation = torch.utils.data.DataLoader(dataset=train_dataset_for_generation, batch_size=batch_size, shuffle=False, num_workers=8)
    valid_loader = torch.utils.data.DataLoader(dataset=valid_dataset, batch_size=len(valid_dataset), shuffle=False, num_workers=8)
    test_dataset = dsets.CIFAR10(root=DATA_ROOT, train=False, transform=test_transform)
    test_loader = torch.utils.data.DataLoader(dataset=test_dataset, batch_size=len(test_dataset), shuffle=False, num_workers=8)

    assert annotation_type in ['noisy', 'noisy_asym', 'partial', 'partial_ins', 'unreliable']
    
    if annotation_type == "noisy":
        pass
    if annotation_type == "noisy_asym":
        pass
    if annotation_type == "partial":
        pass
    if annotation_type == "partial_ins":
        given_label_matrix, avgC = generate_instance_dependent_candidate_labels(ds, train_loader_for_generation, trainlabels, device)
        print('Average candidate num: ', avgC)
    if annotation_type == "unreliable":
        pass

    inaccurate_train_dataset = CIFAR10_Augmentention(traindata, given_label_matrix.float(), trainlabels.float())
    # generate partial label dataset
    inaccurate_train_loader = torch.utils.data.DataLoader(dataset=inaccurate_train_dataset, 
                                                        batch_size=batch_size, 
                                                        shuffle=True, 
                                                        num_workers=8,
                                                        prefetch_factor=8*batch_size,
                                                        drop_last=True)
    dim = 32 * 32 * 3
    K = 10
    return_list = []
    return_list += [inaccurate_train_loader, valid_loader, test_loader, dim, K]
    if has_eval_train_loader:
        eval_train_dataset = CIFAR10(traindata, given_label_matrix.float(), trainlabels.float())
        eval_train_loader = torch.utils.data.DataLoader(dataset=eval_train_dataset, 
                                                                batch_size=batch_size, 
                                                                shuffle=True, 
                                                                num_workers=8,
                                                                drop_last=False)
        return_list.append(eval_train_loader)
    return return_list


class CIFAR10_Augmentention(Dataset):
    def __init__(self, images, given_label_matrix, true_labels, strong_transform_times=2, weak_transform_times=0):
        self.images = images
        self.given_label_matrix = given_label_matrix
        # user-defined label (partial labels)
        self.true_labels = true_labels
        self.weak_transform_times = weak_transform_times
        self.strong_transform_times = strong_transform_times
        self.transform = transforms.Compose([
                    transforms.ToPILImage(),
                    transforms.ToTensor(),
                    transforms.Normalize((0.4914, 0.4822, 0.4465), (0.247, 0.243, 0.261))
                ])
        self.weak_transform = transforms.Compose([
                    transforms.RandomHorizontalFlip(),
                    transforms.RandomCrop(32, 4, padding_mode='reflect'),
                    # transforms.ToTensor(),
                    Cutout(n_holes=1, length=16),
                    transforms.ToPILImage(),
                    CIFAR10Policy(),
                    transforms.ToTensor(),
                    transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
                ])
        self.strong_transform = transforms.Compose([
                    transforms.RandomHorizontalFlip(),
                    transforms.RandomCrop(32, 4, padding_mode='reflect'),
                    # transforms.ToTensor(),
                    Cutout(n_holes=1, length=16),
                    transforms.ToPILImage(),
                    CIFAR10Policy(),
                    transforms.ToTensor(),
                    transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
                ])


    def __len__(self):
        return len(self.true_labels)
        
    def __getitem__(self, index):
        each_image_o = self.transform(self.images[index])
        each_label = self.given_label_matrix[index]
        each_true_label = self.true_labels[index]
        
        each_images = [each_image_o]
        each_images += [ self.weak_transform(self.images[index]) for _ in range(self.weak_transform_times)]
        each_images += [ self.strong_transform(self.images[index]) for _ in range(self.strong_transform_times)]
        return each_images, each_label, each_true_label, index

class CIFAR10(Dataset):
    def __init__(self, images, given_label_matrix, true_labels):
        self.images = images
        self.given_label_matrix = given_label_matrix
        # user-defined label (partial labels)
        self.true_labels = true_labels
        self.transform = transforms.Compose([
                    transforms.ToPILImage(),
                    transforms.ToTensor(),
                    transforms.Normalize((0.4914, 0.4822, 0.4465), (0.247, 0.243, 0.261))
                ])


    def __len__(self):
        return len(self.true_labels)
        
    def __getitem__(self, index):
        each_image_o = self.transform(self.images[index])
        each_label = self.given_label_matrix[index]
        each_true_label = self.true_labels[index]
        
        return each_image_o, each_label, each_true_label, index
    
if __name__ == "__main__":
    pass