import signal
import os
import torch
from torch.utils.data import DataLoader
from torch.utils.data.distributed import DistributedSampler
from .registry import DATASETS, PIPELINES
from utils import build, get_logger, AttrDict
from .pipelines.compose import Compose
import numpy as np

logger = get_logger("paddlevideo")


def build_pipeline(cfg):
    """Build pipeline.
    Args:
        cfg (dict): root config dict.
    """
    return Compose(cfg)


def build_dataset(cfg_dataset, cfg_pipeline):
    """Build dataset.
    Args:
        cfg (dict): root config dict.

    Returns:
        dataset: dataset.
    """
    # XXX: ugly code here!
    # add a pipeline parameter(class Compose) to cfg_dataset
    cfg_dataset.pipeline = build_pipeline(cfg_pipeline)
    dataset = build(cfg_dataset, DATASETS, key="format")
    return dataset


def build_batch_pipeline(cfg):
    batch_pipeline = build(cfg, PIPELINES)
    return batch_pipeline


class mix_collate_fn():
    def __init__(self, collate_fn_cfg):
        self.pipeline = build_batch_pipeline(collate_fn_cfg)

    def __call__(self, batch):
        batch = self.pipeline(batch)
        slots = []
        for items in batch:
            for i, item in enumerate(items):
                if len(slots) < len(items):
                    slots.append([item])
                else:
                    slots[i].append(item)

        return [torch.as_tensor(np.stack(slot, axis=0)) for slot in slots]


def build_dataloader(dataset,
                     batch_size,
                     num_workers,
                     shuffle=True,
                     drop_last=True,
                     collate_fn_cfg=None,
                     ddp_sample=False,
                     **kwargs):
    """Build Paddle Dataloader.

    XXX explain how the dataloader work!

    Args:
        dataset (paddle.dataset): A PaddlePaddle dataset object.
        batch_size (int): batch size on single card.
        num_worker (int): num_worker
        shuffle(bool): whether to shuffle the data at every epoch.

    """
    if ddp_sample:
        sampler = DistributedSampler(dataset=dataset,
                                     shuffle=shuffle,
                                     drop_last=drop_last)
    else:
        sampler = None

    # NOTE(shipping): when switch the mix operator on, such as: mixup, cutmix.
    # batch like: [[img, label, attibute, ...], [imgs, label, attribute, ...], ...] will recollate to:
    # [[img, img, ...], [label, label, ...], [attribute, attribute, ...], ...] as using numpy.transpose.

    shuffle_flag = False if sampler is not None else shuffle
    data_loader = DataLoader(
        dataset,
        batch_size=batch_size,
        shuffle=shuffle_flag,
        drop_last=drop_last,
        sampler=sampler,
        num_workers=num_workers,
        collate_fn=mix_collate_fn(collate_fn_cfg) if collate_fn_cfg is not None else None,
        **kwargs)

    return data_loader


def build_data_provider(cfg_dataset: AttrDict, cfg_pipeline: AttrDict, flag: str):
    dataset = build_dataset(cfg_dataset.get(flag), cfg_pipeline.get(flag))
    batch_size = cfg_dataset.get('batch_size', 8)
    num_workers = cfg_dataset.get('num_workers', 0)
    if flag == 'train':
        ddp_flag = True if torch.distributed.is_initialized() else False
        dataloader_setting = dict(batch_size=batch_size,
                                  num_workers=num_workers,
                                  collate_fn_cfg=cfg_dataset.get('Mix', None),
                                  ddp_sample=ddp_flag)
    elif flag == 'valid':
        valid_batch_size = cfg_dataset.get('valid_batch_size', batch_size)
        dataloader_setting = dict(
            batch_size=valid_batch_size,
            num_workers=num_workers,
            drop_last=False,
            shuffle=cfg_dataset.get('shuffle_valid', False)  # NOTE: attention lstm need shuffle valid data.
        )
    elif flag == 'test':
        test_batch_size = cfg_dataset.get('test_batch_size', batch_size)
        test_num_workers = cfg_dataset.get('test_num_workers', num_workers)
        dataloader_setting = dict(batch_size=test_batch_size,
                                  num_workers=test_num_workers,
                                  drop_last=False,
                                  shuffle=False)
    else:
        raise ValueError("The flag is unknown, please check it")

    data_loader = build_dataloader(dataset, **dataloader_setting)
    return dataset, data_loader


def term_mp(sig_num, frame):
    """ kill all child processes
    """
    pid = os.getpid()
    pgid = os.getpgid(os.getpid())
    logger.info("main proc {} exit, kill process group " "{}".format(pid, pgid))
    os.killpg(pgid, signal.SIGKILL)
    return


signal.signal(signal.SIGINT, term_mp)
signal.signal(signal.SIGTERM, term_mp)
