from torch.utils.data import DataLoader
from omegaconf import DictConfig

from ecgcmr.signal.sig_datasets.MaskedECGDataset import MaskedECGDataset
from ecgcmr.signal.sig_datasets.DownstreamECGDataset import DownstreamECGDataset


class MaskedECGWithEvalDataModule:
    def __init__(
        self, cfg: DictConfig
        ) -> None:

        self.cfg = cfg
        self.main_batch_size = cfg.dataset.batch_size
        self.main_num_workers = cfg.dataset.num_workers

        self.downstream_batch_size = cfg.downstream_task.batch_size
        self.downstream_num_workers = cfg.downstream_task.num_workers

    def setup(self, stage: str):
        if stage == 'fit':
            self.dataset_train = MaskedECGDataset(cfg=self.cfg, mode='train', apply_augmentations=True)
            self.dataset_val = MaskedECGDataset(cfg=self.cfg, mode='val', apply_augmentations=True)

            self.dataset_train_downstream = DownstreamECGDataset(cfg=self.cfg, mode='train', apply_augmentations=False)
            self.dataset_val_downstream = DownstreamECGDataset(cfg=self.cfg, mode='val', apply_augmentations=False)

    def train_dataloader(self):
        main_dataloader = DataLoader(
            self.dataset_train, 
            batch_size=self.main_batch_size,
            shuffle=True, 
            num_workers=self.main_num_workers,
            pin_memory=True,
        )

        downstream_dataloader = DataLoader(
            self.dataset_train_downstream, 
            batch_size=self.downstream_batch_size,
            shuffle=True, 
            num_workers=self.downstream_num_workers,
            pin_memory=True
        )

        return {'main': main_dataloader, 'downstream': downstream_dataloader}

    def val_dataloader(self):
        main_dataloader = DataLoader(
            self.dataset_val, 
            batch_size=self.main_batch_size,
            shuffle=False, 
            num_workers=self.main_num_workers,
            pin_memory=True,
        )

        downstream_dataloader = DataLoader(
            self.dataset_val_downstream, 
            batch_size=self.downstream_batch_size,
            shuffle=False, 
            num_workers=self.downstream_num_workers,
            pin_memory=True
        )

        return {'main': main_dataloader, 'downstream': downstream_dataloader}