import random
import time

import numpy as np
import torch
import os
import pickle
import torchvision.transforms as T
random.seed(1)
a=0
def unpickle(file):
    with open(file, 'rb') as fo:
        dict = pickle.load(fo, encoding='bytes')
    return dict


class CIFAR10(torch.utils.data.Dataset):
    def __init__(self, transform=None, cifar_dir='datasets/cifar10/cifar-10-batches-py/', data_type='train'):
        self.labels = []
        self.images = []
        self.cifar_dir = cifar_dir
        if data_type == 'train':
            for idx in range(1, 6):
                data_file = os.path.join(self.cifar_dir, 'data_batch_{}'.format(idx))
                d = unpickle(data_file)
                batch_imgs = d[b'data']
                batch_labels = d[b'labels']
                for i in range(batch_imgs.shape[0]):
                    img = np.reshape(batch_imgs[i], (3, 32, 32))
                    img = img.transpose(1, 2, 0)
                    # img = T.ToPILImage()(img)
                    self.images.append(img)
                    self.labels.append(batch_labels[i])
        elif data_type == 'test':
            data_file = os.path.join(self.cifar_dir, 'test_batch')
            d = unpickle(data_file)
            batch_imgs = d[b'data']
            batch_labels = d[b'labels']
            for i in range(batch_imgs.shape[0]):
                img = np.reshape(batch_imgs[i], (3, 32, 32))
                img = img.transpose(1, 2, 0)
                # img = T.ToPILImage()(img)
                self.images.append(img)
                self.labels.append(batch_labels[i])
        else:
            raise ValueError("supported data_type: ['train', 'test']")
        self.N = len(self.images)
        # self.labels = np.array(self.labels)
        # self.images = np.array(self.images)
        self.transform = transform

    def __getitem__(self, index):
        index = index % self.N
        img = self.images[index]
        if self.transform is not None:
            img = self.transform(img)
        return img, self.labels[index]  # start from 0

    def __len__(self):
        return self.N


class CIFAR100(torch.utils.data.Dataset):
    def __init__(self, transform=None, cifar_dir='datasets/cifar100/cifar-100-python/', data_type='train'):
        assert data_type in ['train', 'test']
        self.labels = []
        self.images = []
        self.cifar_dir = cifar_dir

        data_file = os.path.join(self.cifar_dir, '{}'.format(data_type))
        d = unpickle(data_file)
        batch_imgs = d[b'data']
        batch_labels = d[b'fine_labels']
        for i in range(batch_imgs.shape[0]):
            img = np.reshape(batch_imgs[i], (3, 32, 32))
            img = img.transpose(1, 2, 0)
            # img = T.ToPILImage()(img)
            self.images.append(img)
            self.labels.append(batch_labels[i])
        self.N = len(self.images)
        # self.labels = np.array(self.labels)
        # self.images = np.array(self.images)
        self.transform = transform

    def __getitem__(self, index):
        index = index % self.N
        img = self.images[index]
        if self.transform is not None:
            img = self.transform(img)
        return img, self.labels[index]  # start from 0

    def __len__(self):
        return self.N

# def apply_transf_batach(images, transf, max_workers=8):
#     images = np.array(images)
#     from concurrent.futures.thread import ThreadPoolExecutor
#     def process_image(image):
#         new_img = transf(image)
#         return new_img
#
#     with ThreadPoolExecutor(max_workers=max_workers) as executor:
#         # 提交所有的图像处理任务
#         processed_images = list(executor.map(process_image, images))
#
#     return torch.stack(processed_images)
#
#
# def apply_transf(images, transf):
#     images = np.array(images)
#     new_imgs = torch.tensor([])
#     for i in range(0, len(images)):
#         new_imgs = torch.cat((new_imgs, transf(images[i]).unsqueeze(0)), dim=0)
#     return new_imgs
#
# if __name__ == '__main__':
#     # import clip
#     # label_model, clip_transf = clip.load('ViT-B/32', 'cpu')
#
#     # img_loader = ImageNet(id_type='cifar100', imagenet_dir='../../datasets/unlabeled_datasets/Imagenet64/')
#     # CIFAR10(cifar_dir='D:/PycharmProjects/datasets/cifar10/cifar-10-batches-py/', data_type='test')
#     # CIFAR100(cifar_dir='D:/PycharmProjects/datasets/cifar100/cifar-100-python/', data_type='train')
#     # CIFAR100(cifar_dir='D:/PycharmProjects/datasets/cifar100/cifar-100-python/', data_type='test')
#
#     # transform_temp = T.Compose([T.ToTensor(), T.ToPILImage(),])
#     # transform_temp = T.Compose(transform_temp.transforms + clip_transf.transforms)
#     train_dataset = CIFAR10(cifar_dir='D:\\PycharmProjects\\datasets\\cifar10\\cifar-10-batches-py', transform=None,
#                             data_type='train')
#     transform_temp = T.Compose([T.ToTensor(),])
#     train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=10)
#     for i, (x, y) in enumerate(train_loader):
#         x1 = apply_transf(x, transform_temp)
#         x2 = apply_transf_batach(x, transform_temp)

