from torch.utils.data import DataLoader, Subset
from typing import Optional, Any, Dict, Callable
import pytorch_lightning as pl
import torch
from DiffOGMP.utils.common_utils import instantiate_from_config
from DiffOGMP.data.SCANDDataset import RandomRotateWithP
from torchvision.transforms import transforms


img_aug = transforms.Compose([
    RandomRotateWithP(30, 0.3),
    transforms.RandomVerticalFlip(),
    transforms.RandomHorizontalFlip()
])

class SCANDImgDataModule(pl.LightningDataModule):
    def __init__(self,
                 batch_size: int,
                 num_workers: Optional[int]=None,
                 train: Optional[Dict[str, Any]]=None,
                 validation: Optional[Dict[str, Any]]=None,
                 test: Optional[Dict[str, Any]]=None,
                 predict: Optional[Dict[str, Any]]=None,
                 train_max_n_samples: Optional[int]=None,
                 augmentation: Optional[Callable[..., torch.tensor]] = img_aug):
        super().__init__()
        self.batch_size = batch_size
        self.dataset_configs = {}
        self.num_workers = num_workers if num_workers else batch_size * 2
        self.train_max_n_samples = train_max_n_samples
        self.augmentation = augmentation
        if train is not None:
            self.dataset_configs['train'] = train
            self.train_dataloader = self._train_dataloader
        if validation is not None:
            self.dataset_configs['validation'] = validation
            self.val_dataloader = self._val_dataloader
        if test is not None:
            self.dataset_configs['test'] = test
            self.test_dataloader = self._test_dataloader
        if predict is not None:
            self.dataset_configs['predict'] = predict
            self.predict_dataloader = self._predict_dataloader
        print(self)

    def prepare_data(self) -> None:
        pass

    def setup(self, stage: Optional[str] = None) -> None:
        self.datasets = {k: instantiate_from_config(self.dataset_configs[k]) for k in self.dataset_configs}
        if self.augmentation is not None:
            if self.datasets.get('train', None) is not None:
                self.datasets['train'].set_aug(self.augmentation)

    def __str__(self):
        self.prepare_data()
        self.setup()
        string = ''
        for dset_name, dset in self.datasets.items():
            string += f'{dset_name} has {len(dset)} images, augmentation {self.augmentation} applied\n'
        return string

    def _train_dataloader(self):
        if self.train_max_n_samples is not None:
            dataset = Subset(self.datasets['train'], list(range(self.train_max_n_samples)))
        else:
            dataset = self.datasets['train']
        loader = DataLoader(dataset,
                            batch_size=self.batch_size,
                            num_workers=self.num_workers,
                            shuffle=True,
                            drop_last=True)
        return loader

    def _val_dataloader(self):
        return DataLoader(self.datasets['validation'],
                          batch_size=self.batch_size,
                          num_workers=self.num_workers,
                          drop_last=True)

    def _test_dataloader(self):
        return DataLoader(self.datasets['test'],
                          batch_size=self.batch_size,
                          num_workers=self.num_workers)

    def _predict_dataloader(self): 
        return DataLoader(self.datasets['predict'],
                          batch_size=self.batch_size,
                          num_workers=self.num_workers)


class SCANDOGMDataModule(pl.LightningDataModule):
    def __init__(self):
        super().__init__()
        pass