# Copyright 2021-2023 @ Shenzhen Bay Laboratory &
#                       Peking University &
#                       Huawei Technologies Co., Ltd
#
# This code is a part of Cybertron package.
#
# The Cybertron is open-source software based on the AI-framework:
# MindSpore (https://www.mindspore.cn/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Interaction layers for SchNet model.
"""

import torch
from torch import nn, Tensor
from typing import Union, Tuple, Optional

from ..utils import get_integer, get_arguments
from .interaction import Interaction, _interaction_register
from ..layer import Dense, MLP
from ..base import Aggregate
from ..filter import DenseFilter

@_interaction_register('schnet')
class SchNetInteraction(Interaction):
    """
    Interaction layer of SchNet.
    
    Args:
        dim_feature (int): Dimension of node features.
        dim_edge_emb (int): Dimension of edge embeddings.
        dim_filter (int, optional): Dimension of the filter network.
            Defaults to dim_feature if not specified.
        activation (str, optional): Activation function name. Defaults to 'ssp'.
        normalize_filter (bool, optional): Whether to normalize the filter output.
            Defaults to False.
    """
    def __init__(self, 
                 dim_feature: int, 
                 dim_edge_emb: int, 
                 dim_filter: Optional[int] = None, 
                 activation: str = 'ssp', 
                 normalize_filter: bool = False,
                 **kwargs
                 ):
        super().__init__(
            dim_node_rep=dim_feature,
            dim_edge_rep=dim_feature,
            dim_node_emb=dim_feature,
            dim_edge_emb=dim_edge_emb,
            activation=activation,
        )
        self._kwargs = get_arguments(locals(), kwargs)

        self.dim_filter = get_integer(dim_filter)
        if dim_filter is None:
            self.dim_filter = self.dim_edge_rep

        # (..., K) -> (..., W)
        self.filter_net = DenseFilter(dim_in=self.dim_edge_emb,
                                      dim_out=self.dim_filter,
                                      activation=activation)
        # (..., F) -> (..., W)
        self.atomwise_bc = Dense(self.dim_node_emb, self.dim_filter)
        # (..., W) -> (..., F)
        self.atomwise_ac = MLP(self.dim_filter,
                               self.dim_node_rep,
                               [self.dim_node_rep],
                               activation=activation,
                               use_last_activation=False)
        
        self.agg = Aggregate(axis=-2, mean=normalize_filter)

    def _print_info(self,
                    num_retraction: int = 6,
                    num_gap: int = 3,
                    char: str = '-'
                    ) -> None:
        ret = char * num_retraction
        gap = char * num_gap
        print(ret + gap + ' Feature dimension: ' + str(self.dim_node_rep))
        print(ret + gap + ' Activation function: ' + str(self.activation))

    def forward(self, 
                node_vec: Tensor, 
                node_emb: Tensor, 
                node_mask: Tensor, 
                edge_vec: Tensor, 
                edge_emb: Tensor, 
                edge_mask: Optional[Tensor] = None, 
                edge_cutoff: Optional[Tensor] = None, 
                neigh_idx: Optional[Tensor] = None) -> Tuple[Tensor, Tensor]:
        """
        Args:
            node_vec: Node representation tensor.
            node_emb: Node embedding tensor.
            node_mask: Node mask tensor.
            edge_vec: Edge representation tensor.
            edge_emb: Edge embedding tensor.
            edge_mask: Optional edge mask tensor.
            edge_cutoff: Optional edge cutoff tensor.
            neigh_idx: Optional neighbor index tensor.
            
        Returns:
            node_new: Updated node representations.
            edge_vec: Unchanged edge representations.
        """
        # (B, A, W) <- (B, A, F)
        x_i = self.atomwise_bc(node_vec)
        # (B, A, A/N, W) <- (B, A, A/N, K)
        g_ij = self.filter_net(edge_vec)
         # (B, A, A/N, W) * (B, A, A/N, 1)
        w_ij = g_ij * edge_cutoff.unsqueeze(-1)

        if neigh_idx is None:
            x_ij = x_i.unsqueeze(-3)
        else:
            x_ij = torch.take_along_dim(x_i.unsqueeze(1),neigh_idx[...,None],2)

        # (B, 1, A/N, W) * (B, A, A/N, W)
        y = x_ij * w_ij
        # (B, A, W) <- (B, A, A/N, W)
        y = self.agg(y, edge_mask)
        # (B, A, F) <- (B, A, W)
        v = self.atomwise_ac(y)
        # (B, A, F) + (B, A, F)
        node_new = node_vec + v

        return node_new, edge_vec