import mindspore
from mindspore import nn, ops, Tensor
from mindspore.common.initializer import XavierUniform
import numpy as np

from rotary_embedding_mindspore import RotaryEmbedding, apply_rotary_emb

# helpers

def exists(val):
    return val is not None

def default(val, d):
    return val if exists(val) else d

# normalizations

class PreNorm(nn.Cell):
    def __init__(self, dim, fn):
        super().__init__()
        self.fn = fn
        self.norm = nn.LayerNorm((dim,))

    def construct(self, x, *args, **kwargs):
        x = self.norm(x)
        return self.fn(x, *args, **kwargs)

# gated residual

class Residual(nn.Cell):
    def construct(self, x, res):
        return x + res

class GatedResidual(nn.Cell):
    def __init__(self, dim):
        super().__init__()
        self.proj = nn.SequentialCell([
            nn.Dense(dim * 3, 1, has_bias=False),
            nn.Sigmoid()
        ])

    def construct(self, x, res):
        gate_input = ops.concat((x, res, x - res), axis=-1)
        gate = self.proj(gate_input)
        return x * gate + res * (1 - gate)

# attention

class Attention(nn.Cell):
    def __init__(self, dim, pos_emb=None, dim_head=64, heads=8, edge_dim=None):
        super().__init__()
        edge_dim = default(edge_dim, dim)

        inner_dim = dim_head * heads
        self.heads = heads
        self.scale = dim_head ** -0.5

        self.pos_emb = pos_emb

        self.to_q = nn.Dense(dim, inner_dim)
        self.to_kv = nn.Dense(dim, inner_dim * 2)
        self.edges_to_kv = nn.Dense(edge_dim, inner_dim)

        self.to_out = nn.Dense(inner_dim, dim)

    def construct(self, nodes, edges, mask=None):
        h = self.heads

        q = self.to_q(nodes)
        kv = self.to_kv(nodes)
        k, v = ops.split(kv, 2, axis=-1)

        e_kv = self.edges_to_kv(edges)

        q = ops.reshape(q, (-1, h, q.shape[1], q.shape[2] // h))
        k = ops.reshape(k, (-1, h, k.shape[1], k.shape[2] // h))
        v = ops.reshape(v, (-1, h, v.shape[1], v.shape[2] // h))
        e_kv = ops.reshape(e_kv, (-1, h, e_kv.shape[1], e_kv.shape[2] // h))

        if exists(self.pos_emb):
            freqs = self.pos_emb(Tensor(np.arange(nodes.shape[1]), mindspore.float32))
            freqs = ops.reshape(freqs, (1, -1, freqs.shape[-1]))
            q = apply_rotary_emb(freqs, q)
            k = apply_rotary_emb(freqs, k)

        ek, ev = e_kv, e_kv

        k = k + ek
        v = v + ev

        sim = ops.einsum('b h i d, b h j d -> b h i j', q, k) * self.scale

        if exists(mask):
            mask = ops.reshape(mask, (mask.shape[0], 1, 1, -1))
            max_neg_value = -np.finfo(sim.asnumpy().dtype).max
            sim = ops.masked_fill(sim, ~mask, max_neg_value)

        attn = ops.softmax(sim, axis=-1)
        out = ops.einsum('b h i j, b h j d -> b h i d', attn, v)
        out = ops.reshape(out, (out.shape[0], out.shape[2], -1))
        return self.to_out(out)

# optional feedforward

def FeedForward(dim, ff_mult=4):
    return nn.SequentialCell([
        nn.Dense(dim, dim * ff_mult),
        nn.GELU(),
        nn.Dense(dim * ff_mult, dim)
    ])

# classes

class GraphTransformer(nn.Cell):
    def __init__(self, dim, depth, dim_head=64, edge_dim=None, heads=8, gated_residual=True, with_feedforwards=False, norm_edges=False, rel_pos_emb=False, accept_adjacency_matrix=False):
        super().__init__()
        self.layers = nn.CellList([])
        edge_dim = default(edge_dim, dim)
        self.norm_edges = nn.LayerNorm((edge_dim,)) if norm_edges else nn.Identity()

        self.adj_emb = nn.Embedding(2, edge_dim) if accept_adjacency_matrix else None

        pos_emb = RotaryEmbedding(dim_head) if rel_pos_emb else None

        for _ in range(depth):
            self.layers.append(nn.CellList([
                nn.CellList([
                    PreNorm(dim, Attention(dim, pos_emb=pos_emb, edge_dim=edge_dim, dim_head=dim_head, heads=heads)),
                    GatedResidual(dim)
                ]),
                nn.CellList([
                    PreNorm(dim, FeedForward(dim)),
                    GatedResidual(dim)
                ]) if with_feedforwards else None
            ]))

    def construct(self, nodes, edges=None, adj_mat=None, mask=None):
        batch, seq, _ = nodes.shape

        if exists(edges):
            edges = self.norm_edges(edges)

        if exists(adj_mat):
            assert adj_mat.shape == (batch, seq, seq)
            assert exists(self.adj_emb), 'accept_adjacency_matrix must be set to True'
            adj_mat = self.adj_emb(adj_mat.astype(mindspore.int32))

        all_edges = default(edges, 0) + default(adj_mat, 0)

        for attn_block, ff_block in self.layers:
            attn, attn_residual = attn_block
            nodes = attn_residual(attn(nodes, all_edges, mask=mask), nodes)

            if exists(ff_block):
                ff, ff_residual = ff_block
                nodes = ff_residual(ff(nodes), nodes)

        return nodes, edges

