import torch
import torchvision
from torchvision.transforms import functional as F
from utils.coco_utils import ConvertCocoPolysToMask, _coco_remove_images_without_annotations

def collate_fn(batch):
    return tuple(zip(*batch))

class ToTensor(object):
    def __call__(self, image, target):
        
        image = F.to_tensor(image)
        return image, target

class Compose(object):
    def __init__(self, transforms):
        self.transforms = transforms

    def __call__(self, image, target):
        for t in self.transforms:
            image, target = t(image, target)
        return image, target

class CocoDetection(torchvision.datasets.CocoDetection):
    def __init__(self, img_folder, ann_file, transforms):
        super(CocoDetection, self).__init__(img_folder, ann_file)
        self._transforms = transforms
        self.dataset_mean=[0.51424634, 0.49859697, 0.47239193]
        self.dataset_std=[0.22782212, 0.22360495, 0.224852]

    def __getitem__(self, idx):
        img, target = super(CocoDetection, self).__getitem__(idx)
        
        image_id = self.ids[idx]
        target = dict(image_id=image_id, annotations=target)
        if self._transforms is not None:
            img, target = self._transforms(img, target)
        
        return img, target


def get_coco_dataset(
        img_folder="/home/disk0/hyq/JSPT/data/1262/", 
        annfile='/home/disk0/hyq/JSPT/data/person_keypoints_train.json', 
        transforms=None,
    ):

    t = [ConvertCocoPolysToMask(), ToTensor()]

    if transforms is not None:
        t.append(transforms)
    transforms = Compose(t)

    img_folder, ann_file = img_folder, annfile

    dataset = CocoDetection(img_folder, ann_file, transforms=transforms)
    dataset = _coco_remove_images_without_annotations(dataset)

    return dataset

def get_dataloader(dataset, batch_size=12, num_workers=8, image_set="train"):
    if image_set=="train":
        data_loader = torch.utils.data.DataLoader(
            dataset, 
            num_workers=num_workers,
            batch_size=batch_size,
            collate_fn=collate_fn
        )
    else:
        data_loader = torch.utils.data.DataLoader(
            dataset, 
            num_workers=num_workers,
            batch_size=1,
            collate_fn=collate_fn
        )

    return data_loader