import os
from glob import glob
from pathlib import Path

import cv2
import numpy as np
from torch.utils.data import Dataset
from torchvision.transforms import transforms
import pandas as pd
from torch.utils.data import DataLoader


class CasiaDataset(Dataset):

    def __init__(self, data_root, mode='train', transform=None):
        """Initialization"""
        self.data_root = data_root
        self.mode = mode
        self.transform = transform
        self.data = []
        self._prepare_data()

    def _prepare_data(self):
        image_paths = glob(os.path.join(self.data_root, '{}/*/*.jpg'.format(self.mode)))
        for image in image_paths:
            image_path = Path(image)
            image_parents = image_path.parents
            image_class = image_parents[0].name
            # print("image_parents[0]",image_parents[0])
            # print("image_parents[0].name", image_parents[0].name)
            image_msr_path = image_parents[3] / 'casia_allframe_init_jpg_facelocate_MSRCR' / image_parents[1].name / image_parents[0].name / image_path.name
            # print("image_parents[3]", image_parents[3])
            # print("image_parents[1].name", image_parents[1].name)
            # print("image_path.name", image_path.name)
            if image_msr_path.exists():
                if image_class == 'attack':
                    self.data.append((image_path, image_msr_path, 1))
                else:
                    self.data.append((image_path, image_msr_path, 0))

    def __getitem__(self, index):
        """Generate one batch of data"""
        image_rgb_path, image_msr_path, label = self.data[index]

        image_rgb = cv2.imread(str(image_rgb_path))
        image_rgb = cv2.cvtColor(image_rgb, cv2.COLOR_BGR2RGB)
        image_rgb = self.transform(image_rgb)

        image_msr = cv2.imread(str(image_msr_path))
        image_msr = cv2.cvtColor(image_msr, cv2.COLOR_BGR2RGB)
        image_msr = self.transform(image_msr)
        # image_msr = automatedMSRCR(tmp, [10, 20, 30])
        # print(image_rgb.shape, image_msr.shape)
        return {'rgb': image_rgb, 'msr': image_msr, 'label': label}

    def __len__(self):
        """Denotes the number of batches per epoch"""
        return len(self.data)


def get_casia_dataloader(model, args, pin_memory=True):
    assert os.path.exists(args.data_dir)
    transform = transforms.Compose([
        transforms.ToPILImage(),
        transforms.Resize((128, 128)),
        transforms.RandomHorizontalFlip(),
        transforms.RandomRotation((0, 20)),
        transforms.CenterCrop((114, 114)),
        transforms.ToTensor(),
        transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
    ])
    train_dataset = CasiaDataset(data_root=args.data_dir, mode='train', transform=transform)
    train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=0, drop_last=True,
                              pin_memory=True)
    # data_batch = next(iter(train_loader))
    # print(data_batch['msr'].shape)
    test_dataset = CasiaDataset(data_root=args.data_dir, mode='test', transform=transform)
    test_dataset, val_dataset = torch.utils.data.random_split(test_dataset, [41864, len(test_dataset) - 41864])
    print(len(test_dataset), len(val_dataset))
    val_loader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=True, num_workers=1, drop_last=True,
                            pin_memory=True)

    input_size = model.default_cfg['input_size']

    train_df = pd.read_csv('data/data_train.csv')
    train_transform = create_train_transform(input_size[1])
    train_data = OuluNpuDataset(data_root=args.data_dir, df=train_df, mode='test', transform=train_transform)
    train_loader = DataLoader(train_data, batch_size=args.batch_size, shuffle=False, num_workers=args.workers,
                              pin_memory=pin_memory, drop_last=True)

    val_df = pd.read_csv('data/data_val.csv')
    val_transform = create_val_transform(input_size[1])
    val_data = OuluNpuDataset(data_root=args.data_dir, df=val_df, mode='validation', transform=val_transform)
    val_loader = DataLoader(val_data, batch_size=args.batch_size, shuffle=False, num_workers=args.workers,
                            pin_memory=pin_memory, drop_last=False)
    return train_loader, val_loader