from .ddpm import DDPM

__all__ = ["DDPM", "DataModule"]

import torch
from ..misc.utils import instantiate_from_config
import pytorch_lightning as pl


class DataModule(pl.LightningDataModule):
    def __init__(
        self,
        *,
        batch_size,
        num_workers,
        train=None,
        validation=None,
        test=None,
        predict=None,
        collate_fn=None,
    ):
        super().__init__()
        self.batch_size = batch_size
        self.num_workers = num_workers
        self._train_loader = train
        self._val_loader = validation
        self._test_loader = test
        self._predict_loader = predict
        self.collate_fn = instantiate_from_config(collate_fn) if collate_fn else None

    def setup(self, stage):
        if stage == "fit":
            self._train_loader = instantiate_from_config(self._train_loader)
            self._val_loader = instantiate_from_config(self._val_loader)
        if stage == "test":
            self._test_loader = instantiate_from_config(self._test_loader)
        if stage == "predict":
            self._predict_loader = instantiate_from_config(self._predict_loader)

    def train_dataloader(self):
        # 必须开启drop_last，否则可能因为最后一个batchsize为1，导致方差计算为零，除数为零
        return torch.utils.data.DataLoader(
            self._train_loader,
            self.batch_size,
            shuffle=True,
            num_workers=self.num_workers,
            collate_fn=self.collate_fn,
            drop_last=True,
        )

    def val_dataloader(self):
        return torch.utils.data.DataLoader(
            self._val_loader,
            self.batch_size,
            shuffle=False,
            num_workers=self.num_workers,
            collate_fn=self.collate_fn,
            drop_last=True,
        )

    def test_dataloader(self):
        return torch.utils.data.DataLoader(
            self._test_loader,
            self.batch_size,
            shuffle=False,
            num_workers=self.num_workers,
            collate_fn=self.collate_fn,
        )

    def predict_dataloader(self):
        return torch.utils.data.DataLoader(
            self._predict_loader,
            batch_size=self.batch_size,
            shuffle=False,
            num_workers=self.num_workers,
            collate_fn=self.collate_fn,
        )

    def transfer_batch_to_device(self, batch, device, dataloader_idx):
        if self.collate_fn is None:
            return super().transfer_batch_to_device(batch, device, dataloader_idx)
        else:
            for k, v in batch.items():
                if isinstance(v, list):
                    if isinstance(v[0], dict):
                        for d in v:
                            for k_, v_ in d.items():
                                if isinstance(v_, str):
                                    d[k_] = v_
                                else:
                                    d[k_] = torch.tensor(v_).to(device=device)
                    else:
                        batch[k] = torch.tensor(v).to(device=device)
                elif isinstance(v, str):
                    batch[k] = v
                else:
                    batch[k] = torch.tensor(v).to(device)
            return batch
