# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.

import torch
import numpy as np

try:
    from collections.abc import Iterable
except ImportError:
    from collections import Iterable


def pad_1d_unsqueeze(x, padlen):
    x = x + 1  # pad id = 0
    xlen = x.size(0)
    if xlen < padlen:
        new_x = x.new_zeros([padlen], dtype=x.dtype)
        new_x[:xlen] = x
        x = new_x
    elif xlen > padlen:
        x = x[:padlen]
    return x.unsqueeze(0)


def pad_token_1d_unsqueeze(x, padlen):
    xlen = x.size(0)
    if xlen < padlen:
        new_x = x.new_ones([padlen], dtype=x.dtype)
        new_x[:xlen] = x
        x = new_x
    elif xlen > padlen:
        x = x[:padlen]
    return x.unsqueeze(0)


def pad_2d_unsqueeze(x, padlen):
    xlen, xdim = x.size()
    if xlen < padlen:
        new_x = x.new_ones([padlen, xdim], dtype=x.dtype)
        new_x[:xlen, :] = x
        x = new_x
    elif xlen > padlen:
        x = x[:padlen, :]
    return x.unsqueeze(0)


def pad_attn_bias_unsqueeze(x, padlen):
    xlen = x.size(0)
    if xlen < padlen:
        new_x = x.new_zeros([padlen, padlen], dtype=x.dtype).fill_(float("-inf"))
        new_x[:xlen, :xlen] = x
        new_x[xlen:, :xlen] = 0
        x = new_x
    elif xlen > padlen:
        x = x[:padlen, :padlen]
    return x.unsqueeze(0)


def pad_edge_type_unsqueeze(x, padlen):
    xlen = x.size(0)
    if xlen < padlen:
        new_x = x.new_zeros([padlen, padlen, x.size(-1)], dtype=x.dtype)
        new_x[:xlen, :xlen, :] = x
        x = new_x
    elif xlen > padlen:
        x = x[:padlen, :padlen, :]
    return x.unsqueeze(0)


def pad_spatial_pos_unsqueeze(x, padlen):
    x = x + 1
    xlen = x.size(0)
    if xlen < padlen:
        new_x = x.new_zeros([padlen, padlen], dtype=x.dtype)
        new_x[:xlen, :xlen] = x
        x = new_x
    elif xlen > padlen:
        x = x[:padlen, :padlen]
    return x.unsqueeze(0)


def pad_3d_unsqueeze(x, padlen1, padlen2, padlen3):
    x = x + 1
    xlen1, xlen2, xlen3, xlen4 = x.size()
    new_x = x.new_zeros([padlen1, padlen2, padlen3, xlen4], dtype=x.dtype)
    new_x[:xlen1, :xlen2, :xlen3, :] = x[:min(xlen1, padlen1), :min(xlen2, padlen2), :min(xlen3, padlen3), :]
    x = new_x
    return x.unsqueeze(0)


def collator(items, max_node=512, seq_length=-1, max_node_num=-1, multi_hop_max_dist=20, spatial_pos_max=20, num_classes=-1):
    if num_classes > 0:
        items = [item for item in items if item is not None and item.x.size(0) <= max_node and int(item.label) < num_classes]
    else:
        items = [item for item in items if item is not None and item.x.size(0) <= max_node]
    if len(items) == 0:
        return None
    items = [
        (
            item.idx,
            item.attn_bias,
            item.attn_edge_type,
            item.spatial_pos,
            item.in_degree,
            item.out_degree,
            item.x,
            item.edge_input[:, :, :multi_hop_max_dist, :],
            item.y,
            item.label,
            item.source_ids,
            item.source_mask,
            item.target_ids,
            item.target_mask,
            item.target_sequence
        )
        for item in items
    ]
    (
        idxs,
        attn_biases,
        attn_edge_types,
        spatial_poses,
        in_degrees,
        out_degrees,
        xs,
        edge_inputs,
        ys,
        label,
        source_ids,
        source_masks,
        target_ids,
        target_masks,
        target
    ) = zip(*items)

    for idx, _ in enumerate(attn_biases):
        attn_biases[idx][1:, 1:][spatial_poses[idx] >= spatial_pos_max] = float("-inf")
    if max_node_num == -1:
        max_node_num = max(i.size(0) for i in xs)
    max_dist = max(i.size(-2) for i in edge_inputs)
    label = torch.cat(label)
    y = torch.cat([pad_1d_unsqueeze(i, max_node_num) for i in ys])
    x = torch.cat([pad_2d_unsqueeze(i, max_node_num) for i in xs])
    edge_input = torch.cat(
        [pad_3d_unsqueeze(i, max_node_num, max_node_num, max_dist) for i in edge_inputs]
    )
    attn_bias = torch.cat(
        [pad_attn_bias_unsqueeze(i, max_node_num + 1) for i in attn_biases]
    )
    attn_edge_type = torch.cat(
        [pad_edge_type_unsqueeze(i, max_node_num) for i in attn_edge_types]
    )
    spatial_pos = torch.cat(
        [pad_spatial_pos_unsqueeze(i, max_node_num) for i in spatial_poses]
    )
    in_degree = torch.cat([pad_1d_unsqueeze(i, max_node_num) for i in in_degrees])
    source_ids = torch.cat([pad_token_1d_unsqueeze(i, seq_length) for i in source_ids])
    source_masks = torch.cat([pad_token_1d_unsqueeze(i, seq_length) for i in source_masks])
    target_ids = torch.cat([pad_token_1d_unsqueeze(i, seq_length) for i in target_ids])
    target_masks = torch.cat([pad_token_1d_unsqueeze(i, seq_length) for i in target_masks])
    target = [[i] for i in target]

    return dict(
        batch={
            "idx": torch.LongTensor(idxs),
            "attn_bias": attn_bias,
            "attn_edge_type": attn_edge_type,
            "spatial_pos": spatial_pos,
            "in_degree": in_degree,
            "out_degree": in_degree,  # for undirected graph
            "x": x,
            "edge_input": edge_input,
            "y": y,
            "label": label
        },
        source_ids=source_ids,
        source_mask=source_masks,
        target_ids=target_ids,
        target_mask=target_masks,
        target=target
    )


if __name__ == "__main__":
    print(pad_3d_unsqueeze(torch.randn(5, 3, 3, 1), 7, 2, 5).size())
