"""
Basic attention layers
"""

from typing import Union, Tuple

import mindspore as ms
import mindspore.numpy as msnp
from mindspore import Tensor
from mindspore.nn import Cell
from mindspore.ops import functional as F

from mindsponge.function import get_integer, get_arguments
from mindsponge.function import concat_last_dim, squeeze_penulti

from .interaction import Interaction, _interaction_register
from ..base import PositionalEmbedding
from ..base import MultiheadAttention
from ..base import Pondering, ACTWeight
from ..base import FeedForward


@_interaction_register('attention_base')
class AttentionBase(Interaction):

    def __init__(self,
                 dim_feature: int,
                 n_heads: int = 8,
                 activation: Union[str, Cell] = 'silu',
                 **kwargs,
                 ):
        
        super().__init__(
            dim_node_rep=dim_feature, 
            dim_edge_rep=dim_feature, 
            dim_edge_emb=dim_feature, 
            dim_node_emb=dim_feature,
            activation=activation,
            )
        self._kwargs = get_arguments(locals(), kwargs)

        self.n_heads = get_integer(n_heads)
        self.dim_feature = get_integer(dim_feature)

        if self.dim_feature % self.n_heads != 0:
            raise ValueError('The dimension of feature should be divisible by the number of heads!')
        
        self.positional_embedding = PositionalEmbedding(self.dim_feature)
        self.multihead_attention = MultiheadAttention(self.dim_feature, self.n_heads, dim_tensor=4)

    def construct(self,
                    node_vec: Tensor,
                    edge_vec: Tensor,
                    edge_mask: Tensor,
                    edge_cutoff: Tensor,
                    time_signal: Tensor = 0,
                    **kwargs
                    ) -> Tuple[Tensor, Tensor]:

        query, key, value = self.positional_embedding(node_vec, edge_vec, time_signal)
        delta_edge_vec = self.multihead_attention(query, key, value, edge_mask, cutoff=edge_cutoff)
        delta_edge_vec = F.squeeze(delta_edge_vec, -2)

        node_vec = F.add(node_vec, delta_edge_vec)

        return node_vec, edge_vec