import torch


class CreateLoader(object):
    def __init__(self, is_ddp=False):
        if is_ddp:
            self.__create = self.create_distributed_loader
        else:
            self.__create = self.create_loader

    def create(self, dataset, **loader_params):
        return self.__create(dataset, **loader_params)

    @staticmethod
    def create_loader(dataset, **loader_params):
        assert dataset is not None, "Error: please check the dataset"

        collate_fn = dataset.collate if hasattr(dataset, 'collate') else None
        params = loader_params.copy()
        params['collate_fn'] = params['collate_fn'] if hasattr(params, 'collate_fn') else collate_fn
        if 'shuffle' not in params:
            params['shuffle'] = False if not hasattr(dataset, 'shuffle') else dataset.shuffle

        loader = torch.utils.data.DataLoader(dataset, **params)
        loader.sampler.set_epoch = lambda i: i
        return loader

    @staticmethod
    def create_distributed_loader(dataset, **loader_params):
        assert dataset is not None, "Error: please check the dataset"

        collate_fn = dataset.collate if hasattr(dataset, 'collate') else None
        params = loader_params.copy()
        if 'shuffle' not in params:
            params['shuffle'] = False if not hasattr(dataset, 'shuffle') else dataset.shuffle
        sampler = torch.utils.data.distributed.DistributedSampler(dataset, shuffle=params['shuffle'])

        params['sampler'] = sampler
        params['collate_fn'] = params['collate_fn'] if hasattr(params, 'collate_fn') else collate_fn
        params['shuffle'] = False

        loader = torch.utils.data.DataLoader(dataset, **params)
        return loader
