from torch.utils.data import DataLoader
from PIL import Image
from config import cfg
from torchvision import datasets, models, transforms
from utils.log_helper import *
import logging
import random
from torch.utils.data import Dataset
logger = logging.getLogger('global')


def Data_2_Train_Val(data_root):
    # 获取生成的数据，并且分成训练集 和 验证集
    # image_datasets = datasets.ImageFolder(os.path.join(cfg.IMGLIST.IMG_OUT_DIR, "train"))
    image_datasets = datasets.ImageFolder(data_root)
    class_to_idx = image_datasets.class_to_idx
    classer = image_datasets.classes
    imgs = image_datasets.imgs
    random.shuffle(imgs)
    train_datasets = imgs[:int(len(imgs) * cfg.DATA.RATION)]
    val_datasets = imgs[int(len(imgs) * cfg.DATA.RATION):]
    return train_datasets, val_datasets, imgs, class_to_idx, classer

def read_image(img_path):
    """Keep reading image until succeed.
    This can avoid IOError incurred by heavy IO process."""
    got_img = False
    while not got_img:
        try:
            img = Image.open(img_path).convert('RGB')
            got_img = True
        except IOError:
            print("IOError incurred when reading '{}'. Will redo. Don't worry. Just chill.".format(
                img_path))
            pass
    return img

class ImageData(Dataset):
    def __init__(self, dataset, transform):
        self.dataset = dataset
        self.transform = transform

    def __getitem__(self, item):
        image_path, id = self.dataset[item]
        img = read_image(image_path)
        if self.transform is not None:
            img = self.transform(img)
        return img, id

    def __len__(self):
        return len(self.dataset)

def Data_Transforms():
    data_transforms = \
    {
        'train': transforms.Compose([
            transforms.Resize((224, 224), interpolation=Image.BICUBIC),
            transforms.RandomHorizontalFlip(),
            transforms.ToTensor(),
            transforms.Normalize(mean=cfg.TRAIN.MEAN, std=cfg.TRAIN.STD)
        ]),
        'val': transforms.Compose([
            transforms.Resize((224, 224), interpolation=Image.BICUBIC),
            transforms.ToTensor(),
            transforms.Normalize(mean=cfg.VAL.MEAN, std=cfg.VAL.STD)
        ]),
    }
    return data_transforms

def mnist_Transforms():
    data_transforms = \
        {
            'train':transforms.Compose(
                [
                    transforms.ToTensor()
                ]
            ),
            'val':transforms.Compose(
                [
                    transforms.ToTensor()
                ]
            )
        }
    return data_transforms

def Data_Loader():
    logger.info("build train dataset")

    train_datasets, val_datasets, imgs, class_to_idx, classer = Data_2_Train_Val(cfg.DATA.ROOT)
    # data_transforms = Data_Transforms()
    data_transforms = mnist_Transforms()

    train_loader = DataLoader(ImageData(train_datasets, data_transforms['train']),
                              batch_size=cfg.TRAIN.BATCH_SIZE,
                              num_workers=cfg.TRAIN.NUM_WORKERS,
                              pin_memory=True,
                              shuffle=True
                              )

    val_loader = DataLoader(ImageData(val_datasets, data_transforms['val']),
                              batch_size=cfg.TRAIN.BATCH_SIZE,
                              num_workers=cfg.TRAIN.NUM_WORKERS,
                              pin_memory=True,
                              shuffle=False)

    data_loader = {'train':train_loader,'val':val_loader}


    logger.info("=> button  loaded")
    logger.info("Dataset statistics:")
    logger.info("---------------------------------")
    logger.info("     file        |   !@#$%^&*   | # numbers")
    logger.info("---------------------------------")
    logger.info("    train data   |              |    {}".format(len(train_datasets)))
    logger.info("    val data     |              |    {}".format(len(val_datasets)))
    logger.info("---------------------------------")

    return imgs, data_loader, class_to_idx, classer

if __name__ == '__main__':
    imgs, data_loader, class_to_idx, classer = Data_Loader()