# import numpy as np
# from torchvision.datasets import CIFAR10, CIFAR100
# from torchvision.transforms import transforms
# from torch.utils.data import DataLoader, Dataset
# import torchvision


# cifar10_mean = (0.4914, 0.4822, 0.4465)
# cifar10_std = (0.2471, 0.2435, 0.2616)

# cifar100_mean = (0.5070751592371323,
#                  0.48654887331495095, 0.4409178433670343)
# cifar100_std = (0.2673342858792401, 0.2564384629170883,
#                 0.27615047132568404)

# cifar10_train_transform = transforms.Compose([
#     transforms.RandomResizedCrop(32, scale=(0.2, 1.)),
#     transforms.RandomHorizontalFlip(),
#     transforms.RandomApply([
#         transforms.ColorJitter(0.4, 0.4, 0.4, 0.1)  # not strengthened
#     ], p=0.8),
#     transforms.RandomGrayscale(p=0.2),
#     transforms.ToTensor(),
#     transforms.Normalize(cifar10_mean, cifar10_std),
#     # transforms.RandomErasing(p=0.25)
# ])
# cifar10_test_transform = transforms.Compose([
#     transforms.ToTensor(),
#     transforms.Normalize(cifar10_mean, cifar10_std)
# ])

# cifar100_train_transform = transforms.Compose([
#     transforms.RandomResizedCrop(32, scale=(0.2, 1.)),
#     transforms.RandomHorizontalFlip(),
#     transforms.RandomApply([
#         transforms.ColorJitter(0.4, 0.4, 0.4, 0.1)  # not strengthened
#     ], p=0.8),
#     transforms.RandomGrayscale(p=0.2),
#     transforms.ToTensor(),
#     transforms.Normalize(cifar100_mean, cifar100_std),
#     # transforms.RandomErasing(p=0.25)
# ])
# cifar100_test_transform = transforms.Compose([
#     transforms.ToTensor(),
#     transforms.Normalize(cifar100_mean, cifar100_std)
# ])

# class RandomMaskingGenerator:
#     def __init__(self, input_size, mask_ratio):
#         if not isinstance(input_size, tuple):
#             input_size = (input_size,) * 2

#         self.height, self.width = input_size

#         self.num_patches = self.height * self.width
#         self.num_mask = int(mask_ratio * self.num_patches)

#     def __repr__(self):
#         repr_str = "Maks: total patches {}, mask patches {}".format(
#             self.num_patches, self.num_mask
#         )
#         return repr_str

#     def __call__(self):
#         mask = np.hstack([
#             np.zeros(self.num_patches - self.num_mask),
#             np.ones(self.num_mask),
#         ])
#         np.random.shuffle(mask)
#         return mask # [196]

# class DataAugmentationForMAE(object):
#     def __init__(self,input_size=224, window_size=224//7,mask_ratio=0.75,mean=cifar10_mean,std=cifar10_std):
#         self.transform = transforms.Compose([
#             transforms.RandomResizedCrop(input_size),
#             transforms.ToTensor(),
#             transforms.Normalize(mean=mean,std=std)
#         ])
#         self.masked_position_generator = RandomMaskingGenerator(input_size=window_size, mask_ratio=mask_ratio)

#     def __call__(self, image):
#         return self.transform(image), self.masked_position_generator()

#     def __repr__(self):
#         repr = "(DataAugmentationForBEiT,\n"
#         repr += "  transform = %s,\n" % str(self.transform)
#         repr += "  Masked position generator = %s,\n" % str(self.masked_position_generator)
#         repr += ")"
#         return repr


# def get_mae_loader(args):
#     if args.dataset == 'cifar10':
#         trainset = torchvision.datasets.CIFAR10(root=args.data_path, train=True, download=True,
#                                                 transform=DataAugmentationForMAE(input_size=args.input_size, window_size=args.window_size,
#                                                 mask_ratio= args.mask_ratio,mean=cifar10_mean,std=cifar10_std))
#         trainloader = DataLoader(
#             trainset, batch_size=args.batch_size, drop_last=True, shuffle=True, num_workers=args.num_workers)
#         return trainloader

#     elif args.dataset == 'cifar100':
#         trainset = torchvision.datasets.CIFAR100(root=args.data_path, train=True, download=True,
#                                                  transform=DataAugmentationForMAE(input_size=args.input_size, window_size=args.window_size,
#                                                 mask_ratio=args.mask_ratio,mean=cifar100_mean,std=cifar100_std))

#         trainloader = DataLoader(
#             trainset, batch_size=args.batch_size, drop_last=True, shuffle=True, num_workers=args.num_workers)

#         return trainloader


# if __name__=="__main__":
#     # x=np.random.random(3,224,224)
#     y=RandomMaskingGenerator(224//7,0.75)
#     a=y()
#     print(a)
#     print(a.shape)