import paddle as pp


def build_relative_position(query_size, key_size):
    """
    Build relative position according to the query and key

    We assume the absolute position of query :math:`P_q` is range from (0, query_size) and the absolute position of key
    :math:`P_k` is range from (0, key_size), The relative positions from query to key is :math:`R_{q \\rightarrow k} =
    P_q - P_k`

    Args:
        query_size (int): the length of query
        key_size (int): the length of key

    Return:
        :obj:`torch.LongTensor`: A tensor with shape [1, query_size, key_size]

    """
    q_ids = pp.arange(query_size, dtype=pp.float64)
    k_ids = pp.arange(key_size, dtype=pp.float64)
    rel_pos_ids = q_ids[:, None] - k_ids.reshape((1, -1)).tile((query_size, 1))
    rel_pos_ids = rel_pos_ids[:query_size, :]
    rel_pos_ids = rel_pos_ids.unsqueeze(0)
    return rel_pos_ids


# @torch.jit.script
@pp.jit.to_static
def c2p_dynamic_expand(c2p_pos, query_layer, relative_pos):
    return c2p_pos.expand([query_layer.shape[0], query_layer.shape[1], query_layer.shape[2], relative_pos.shape[-1]])


# @torch.jit.script
@pp.jit.to_static
def p2c_dynamic_expand(c2p_pos, query_layer, key_layer):
    return c2p_pos.expand([query_layer.shape[0], query_layer.shape[1], key_layer.shape[-2], key_layer.shape[-2]])


# @torch.jit.script
@pp.jit.to_static
def pos_dynamic_expand(pos_index, p2c_att, key_layer):
    return pos_index.expand(p2c_att.shape[:2] + (pos_index.shape[-2], key_layer.shape[-2]))


def swap_dim(x: pp.Tensor, dim0: int, dim1: int):
    perm = list(range(x.dim()))
    perm[dim0], perm[dim1] = perm[dim1], perm[dim0]
    return x.transpose(perm)


def gather(x: pp.Tensor, dim: int, index: pp.Tensor):
    index_shape = index.shape
    index_flatten = index.flatten()
    if dim < 0:
        dim = len(x.shape) + dim
    nd_index = []
    for k in range(len(x.shape)):
        if k == dim:
            nd_index.append(index_flatten)
        else:
            reshape_shape = [1] * len(x.shape)
            reshape_shape[k] = x.shape[k]
            x_arange = pp.arange(x.shape[k], dtype=index.dtype)
            x_arange = x_arange.reshape(reshape_shape)
            dim_index = pp.expand(x_arange, index_shape).flatten()
            nd_index.append(dim_index)
    ind2 = pp.transpose(pp.stack(nd_index), [1, 0]).astype("int64")
    paddle_out = pp.gather_nd(x, ind2).reshape(index_shape)
    return paddle_out




