import dataset
from utils import ddp_opx
from dataset.collater import collaterCOCO, collaterVCOCO

import torchvision.transforms as transforms
from torch.utils.data import DataLoader, DistributedSampler

class DataManager():
    def __init__(self, args, cfg):
        self.args = args
        self.cfg = cfg

        normalize = transforms.Normalize(
            mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]
        )
        self.transform = transforms.Compose([
                            transforms.ToTensor(),
                            normalize,
                        ])

    def _init_train_dataset(self):
        self.train_dataset = eval('dataset.'+self.cfg.DATASET.DATASET)(
            self.cfg, self.cfg.DATASET.ROOT, 
            self.cfg.DATASET.TRAIN_SET, True, self.transform 
        )

    def _init_val_dataset(self):
        self.valid_dataset = eval('dataset.'+self.cfg.DATASET.DATASET)(
            self.cfg, self.cfg.DATASET.ROOT, 
            self.cfg.DATASET.TEST_SET, False, self.transform
        )

    def get_dataloader(self, mode):
        if 'train' in mode:
            return self._get_train_dataloader()
        elif 'val' in mode:
            return self._get_val_dataloader()
        else:
            TypeError(f'{mode} type do no have in this task!')

    def _get_train_dataloader(self):
        self._init_train_dataset()

        sampler_train = DistributedSampler(
            self.train_dataset, shuffle=self.cfg.TRAIN.SHUFFLE,
            num_replicas=ddp_opx.get_world_size(), 
            rank=ddp_opx.get_rank()
        ) if ddp_opx.is_dist_avail_and_initialized() else None
        return DataLoader(
            self.train_dataset, sampler=sampler_train,
            shuffle=(sampler_train is None),
            batch_size=self.cfg.TRAIN.BATCH_SIZE_PER_GPU,
            num_workers=self.cfg.WORKERS,
            pin_memory=self.cfg.PIN_MEMORY,
            collate_fn=self.get_collate('train')
        )

    def _get_val_dataloader(self):
        self._init_val_dataset()

        sampler_val = DistributedSampler(self.valid_dataset) \
                if ddp_opx.is_dist_avail_and_initialized() else None
        return DataLoader(
            self.valid_dataset, sampler=sampler_val,
            shuffle=False, 
            batch_size=self.cfg.TEST.BATCH_SIZE_PER_GPU,
            num_workers=self.cfg.WORKERS,
            pin_memory=self.cfg.PIN_MEMORY,
            collate_fn=self.get_collate('val')
        )

    def get_collate(self, mode):
        dataset = self.cfg.DATASET.DATASET
        if dataset == 'coco':
            if mode == 'train':
                collate_fn = collaterCOCO(self.cfg.DATASET.MAX_PATCH, self.cfg.DATASET.PATCH_MODE)
            else:
                collate_fn= collaterCOCO(0)
            return collate_fn
        elif dataset== 'vcoco':
            return collaterVCOCO()
        else:
            TypeError(f'collate of {dataset} dataset do no define!')
        