import torch
import numpy as np
from PIL import Image
import torchvision.transforms as transforms
import torch.utils.data as torchdata


class MyDataset(torchdata.Dataset):
    def __init__(self, imgs_file='cifar10_file/train_imgs', labels_file='cifar10_file/train_labels_onehot',
                 labels2vec_file='cifar10_file/train_labels2vec',
                 transform=None, target_transform=None):
        self.transform = transform
        self.target_transform = target_transform
        self.images = torch.load(imgs_file)
        self.labels = torch.load(labels_file).long()
        # self.labels2vec = torch.load(labels2vec_file)
        # self.images = torch.load(imgs_file)[0:128]
        # self.labels = torch.load(labels_file)[0:128].long()
        # self.labels2vec = torch.load(labels2vec_file)[0:10]
        # print(self.labels.shape)

    def __getitem__(self, index):
        # print('index = ', index)
        image = self.images[index]
        label = self.labels[index]
        # label2vec = self.labels2vec[index]
        
        image = image.reshape((3,32,32))
        image = image.transpose((1,2,0))# 将 32 32 提到前面，3放到后面
        image = Image.fromarray(image)

        if self.transform is not None:
            image = self.transform(image)
        # print('img.shape = ', image.shape)
        # print('label = ', label)
        return image, label
        # return image, label, label2vec

    def __len__(self):
        # print('len = ', self.labels.shape[0])
        return self.labels.shape[0]

def init_voc_dataloader(args):

    mean = (0.4914, 0.4822, 0.4465)
    std = (0.2023, 0.1994, 0.2010)
    transform_train = transforms.Compose([
        transforms.Resize((224, 224)),
        transforms.RandomHorizontalFlip(),
        transforms.ToTensor(),
        transforms.Normalize(mean=mean, std=std)
    ])
    transform_test = transforms.Compose([
        transforms.Resize((224, 224)),
        transforms.ToTensor(),
        transforms.Normalize(mean=mean, std=std)
    ])

    train_imgs_dir = '../cifar10_file/train_imgs'
    train_labels_dir = '../cifar10_file/train_labels_onehot'
    train_labels2vec_dir = '../cifar10_file/train_labels2vec'
    test_imgs_dir = '../cifar10_file/test_imgs'
    test_labels_dir = '../cifar10_file/test_labels_onehot'
    test_labels2vec_dir = '../cifar10_file/test_labels2vec'
    database_imgs_dir = '../cifar10_file/database_imgs_all'
    database_labels_dir = '../cifar10_file/database_labels_all_onehot'
    database_labels2vec_dir = '../cifar10_file/database_labels2vec'


    train_set = MyDataset(imgs_file=train_imgs_dir, labels_file=train_labels_dir,
                          labels2vec_file=train_labels2vec_dir, transform=transform_train)
    train_loader = torchdata.DataLoader(train_set, batch_size=args.batch_size, shuffle=True, num_workers=4)
    test_set = MyDataset(imgs_file=test_imgs_dir, labels_file=test_labels_dir,
                          labels2vec_file=test_labels2vec_dir, transform=transform_test)
    test_loader = torchdata.DataLoader(test_set, batch_size=args.batch_size, shuffle=False, num_workers=4)
    db_set = MyDataset(imgs_file=database_imgs_dir, labels_file=database_labels_dir,
                       labels2vec_file=database_labels2vec_dir, transform=transform_test)
    db_loader = torchdata.DataLoader(db_set, batch_size=args.batch_size, shuffle=False, num_workers=4)
   
    
    return train_loader, test_loader, db_loader
    # return train_loader



# meta ---------------------------------------------------------
class Meta_Dataset(torchdata.Dataset):
    def __init__(self, imgs_file='cifar10_file/train_imgs', labels_file='cifar10_file/train_labels_onehot',
                #  labels2vec_file='cifar10_file/train_labels2vec',
                 transform=None, target_transform=None):
        self.transform = transform
        self.target_transform = target_transform
        self.images = torch.load(imgs_file)
        self.labels = torch.load(labels_file).long()


        # self.labels2vec = torch.load(labels2vec_file)
        # self.images = torch.load(imgs_file)[0:128]
        # self.labels = torch.load(labels_file)[0:128].long()
        # self.labels2vec = torch.load(labels2vec_file)[0:10]
        # print(self.labels.shape)

    def __getitem__(self, index):
        # print('index = ', index)
        image = self.images[index]
        label = self.labels[index]
        # label2vec = self.labels2vec[index]
        image = image.reshape((3,32,32))
        image = image.transpose((1,2,0))# 将 32 32 提到前面，3放到后面
        image = Image.fromarray(image)

        if self.transform is not None:
            image = self.transform(image)
        # print('img.shape = ', image.shape)
        # print('label = ', label)
        return image, label
        # return image, label, label2vec

    def __len__(self):
        # print('len = ', self.labels.shape[0])
        return self.labels.shape[0]

def init_voc_Meta_dataloader(args):

    mean = (0.4914, 0.4822, 0.4465)
    std = (0.2023, 0.1994, 0.2010)
    transform_train = transforms.Compose([
        transforms.Resize((224, 224)),
        transforms.RandomHorizontalFlip(),
        transforms.ToTensor(),
        transforms.Normalize(mean=mean, std=std)
    ])
    transform_test = transforms.Compose([
        transforms.Resize((224, 224)),
        transforms.ToTensor(),
        transforms.Normalize(mean=mean, std=std)
    ])

    train_imgs_dir = 'cifar10_file/train_imgs'
    train_labels_dir = 'cifar10_file/train_labels_onehot'
    # train_labels2vec_dir = 'cifar10_file/train_labels2vec'
    # test_imgs_dir = 'cifar10_file/test_imgs'
    # test_labels_dir = 'cifar10_file/test_labels_onehot'
    # test_labels2vec_dir = 'cifar10_file/test_labels2vec'
    # database_imgs_dir = 'cifar10_file/database_imgs_all'
    # database_labels_dir = 'cifar10_file/database_labels_all_onehot'
    # database_labels2vec_dir = 'cifar10_file/database_labels2vec'
    meta_train_imgs_dir = 'procedure_cifar10_file/meta_train_imgs'
    meta_train_labels_dir = 'procedure_cifar10_file/meta_train_labels'
    # meta_test_imgs_1000_dir = 'procedure_cifar10_file/meta_test_imgs_1000'
    # meta_test_labels_1000_dir = 'procedure_cifar10_file/meta_test_labels_1000'
    # meta_test_imgs_dir = 'procedure_cifar10_file/meta_test_imgs'
    # meta_test_labels_dir = 'procedure_cifar10_file/meta_test_labels'
    test_imgs_dir = 'cifar10_file/test_imgs'
    test_labels_dir = 'cifar10_file/test_labels_onehot'
    # database_imgs_dir = 'procedure_cifar10_file/database_imgs_except_meta_test_1000'
    # database_labels_dir = 'procedure_cifar10_file/database_labels_except_meta_test_1000'
    database_imgs_dir = 'cifar10_file/database_imgs_all'
    database_labels_dir = 'cifar10_file/database_labels_all_onehot'

    train_set = Meta_Dataset(imgs_file=train_imgs_dir, labels_file=train_labels_dir,
                           transform=transform_train)
    train_loader = torchdata.DataLoader(train_set, batch_size=args.batch_size, shuffle=True, num_workers=4)

    meta_train_set = Meta_Dataset(imgs_file=meta_train_imgs_dir, labels_file=meta_train_labels_dir,
                          transform=transform_train)
    meta_train_loader = torchdata.DataLoader(meta_train_set, batch_size=args.batch_size, shuffle=True, num_workers=4)
    # meta_test_set = Meta_Dataset(imgs_file=meta_test_imgs_dir, labels_file=meta_test_labels_dir,
    #                       transform=transform_test)
    # meta_test_loader = torchdata.DataLoader(meta_test_set, batch_size=args.batch_size, shuffle=True, num_workers=4)
    
    test_set = Meta_Dataset(imgs_file=test_imgs_dir, labels_file=test_labels_dir,
                           transform=transform_test)
    test_loader = torchdata.DataLoader(test_set, batch_size=args.batch_size, shuffle=False, num_workers=4)
    db_set = Meta_Dataset(imgs_file=database_imgs_dir, labels_file=database_labels_dir,
                       transform=transform_test)
    db_loader = torchdata.DataLoader(db_set, batch_size=args.batch_size, shuffle=False, num_workers=4)
   
    
    return train_loader, meta_train_loader, test_loader, db_loader



# 划分 cifar10 数据集
# # ------  step 1: 得到 train_ind 和 test_ind  --------------------------
# train_ind = []
# for i in range(10):
#     class_ind = []
#     train_ind.append(class_ind)


# for i in range(50000):
#     if trainset[i][1] == 0:
#         train_ind[0].append(i)
#     elif trainset[i][1] == 1:
#         train_ind[1].append(i)
#     elif trainset[i][1] == 2:
#         train_ind[2].append(i)
#     elif trainset[i][1] == 3:
#         train_ind[3].append(i)
#     elif trainset[i][1] == 4:
#         train_ind[4].append(i)
#     elif trainset[i][1] == 5:
#         train_ind[5].append(i)
#     elif trainset[i][1] == 6:
#         train_ind[6].append(i)
#     elif trainset[i][1] == 7:
#         train_ind[7].append(i)
#     elif trainset[i][1] == 8:
#         train_ind[8].append(i)
#     elif trainset[i][1] == 9:
#         train_ind[9].append(i)

# for i in range(10):
#     print('train_ind[i].len = ', len(train_ind[i]))
# torch.save(train_ind, 'cifar10_file/train_ind')


# test_ind = []
# for i in range(10):
#     class_ind = []
#     test_ind.append(class_ind)
# for i in range(10000):
#     if testset[i][1] == 0:
#         test_ind[0].append(i)
#     elif testset[i][1] == 1:
#         test_ind[1].append(i)
#     elif testset[i][1] == 2:
#         test_ind[2].append(i)
#     elif testset[i][1] == 3:
#         test_ind[3].append(i)
#     elif testset[i][1] == 4:
#         test_ind[4].append(i)
#     elif testset[i][1] == 5:
#         test_ind[5].append(i)
#     elif testset[i][1] == 6:
#         test_ind[6].append(i)
#     elif testset[i][1] == 7:
#         test_ind[7].append(i)
#     elif testset[i][1] == 8:
#         test_ind[8].append(i)
#     elif testset[i][1] == 9:
#         test_ind[9].append(i)


# for i in range(10):
#     print('test_ind[i].len = ', len(test_ind[i]))
# torch.save(test_ind, 'cifar10_file/test_ind')
# # ---------  end step 1 -------------------------------------------------