# Copyright 2021-2023 @ Shenzhen Bay Laboratory &
#                       Peking University &
#                       Huawei Technologies Co., Ltd
#
# This code is a part of Cybertron package.
#
# The Cybertron is open-source software based on the AI-framework:
# MindSpore (https://www.mindspore.cn/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
GNN-based deep molecular model (DMM) implementation.
"""

import torch
from torch import nn, Tensor
from typing import Union, List, Optional, Tuple

from ..utils import get_integer, get_tensor, GLOBAL_DEVICE
from ..interaction import Interaction
from ..activation import get_activation
from ..configure import get_embedding_config

# Define the _model_register function
_MODEL_BY_KEY = {}


def _model_register(*aliases):
    def alias_reg(cls):
        name = cls.__name__.lower()
        if name not in _MODEL_BY_KEY:
            _MODEL_BY_KEY[name] = cls

        for alias in aliases:
            if alias not in _MODEL_BY_KEY:
                _MODEL_BY_KEY[alias] = cls

        return cls
    
    return alias_reg


# Define the MolecularGNN class
class MolecularGNN(nn.Module):
    """
    Basic class for graph neural network (GNN) based deep molecular model

    Reference:

        Zhang, J.; Lei, Y.-K.; Zhang, Z.; Chang, J.; Li, M.; Han, X.; Yang, L.; Yang, Y. I.; Gao, Y. Q.
        A Perspective on Deep Learning for Molecular Modeling and Simulations [J].
        The Journal of Physical Chemistry A, 2020, 124(34): 6745-6763.
    
    Args:
        dim_node_rep (int): Dimension of node representation vectors.
        dim_edge_rep (int): Dimension of edge representation vectors.
        interaction (Union[Interaction, List[Interaction], None], optional): 
            Interaction module or list of interaction modules. Defaults to None.
        n_interaction (int, optional): Number of interaction layers. Defaults to 3.
        coupled_interaction (bool, optional): Whether to use coupled interactions.
            Defaults to False.
        activation (str, optional): Activation function name. Defaults to 'silu'.
        dim_node_emb (int, optional): Dimension of node embedding vectors.
            Defaults to None.
        dim_edge_emb (int, optional): Dimension of edge embedding vectors.
            Defaults to None.
    """
    def __init__(self, 
                 dim_node_rep: int,
                 dim_edge_rep: int,
                 interaction: Optional[Union[Interaction, List[Interaction]]] = None, 
                 n_interaction: int = 3,
                 coupled_interaction: bool = False,
                 activation: str = 'silu',
                 dim_node_emb: Optional[int] = None, 
                 dim_edge_emb: Optional[int] = None,
                 **kwargs
                 ):
        super().__init__()
        self._kwargs = kwargs

        self.dim_node_rep = get_integer(dim_node_rep)
        self.dim_edge_rep = get_integer(dim_edge_rep)
        self.dim_node_emb = get_integer(dim_node_emb)
        self.dim_edge_emb = get_integer(dim_edge_emb)
        self.n_interaction = get_integer(n_interaction)
        self.coupled_interaction = coupled_interaction
        self.activation = get_activation(activation)
        
        self.device = GLOBAL_DEVICE()

        self.interaction = None
        if interaction is not None:
            if isinstance(interaction, Interaction):
                interaction = [interaction] * self.n_interaction
            if isinstance(interaction, list):
                interaction = nn.ModuleList(interaction)
            if isinstance(interaction, nn.ModuleList):
                self.n_interaction = len(interaction)
                self.interaction = interaction
            else:
                raise TypeError(f'Unsupport type: {interaction}')
            
        self.default_embedding = self.get_default_embedding('default')

    def get_default_embedding(self, configure='default'):
        default_embedding = get_embedding_config(configure)
        default_embedding['dim_node'] = self.dim_node_emb
        default_embedding['dim_edge'] = self.dim_edge_emb
        default_embedding['activation'] = self.activation
        return default_embedding

    def set_dimension(self, dim_node_emb, dim_edge_emb):
        if self.dim_node_emb is None:
            self.dim_node_emb = dim_node_emb
        elif self.dim_node_emb != dim_node_emb:
            raise ValueError(
                f'The dimension of node embedding of Embedding Cell ({dim_node_emb}) cannot match that of Model Cell ({self.dim_node_emb}).')

        if self.dim_edge_emb is None:
            self.dim_edge_emb = get_integer(dim_edge_emb)
        elif self.dim_edge_emb != dim_edge_emb:
            raise ValueError(
                f'The dimension of edge embedding of Embedding Cell ({dim_edge_emb}) cannot match that of Model Cell ({self.dim_edge_emb}).')

        if self.interaction is None:
            self.build_interaction()

        return self

    def build_interaction(self):
        return self

    def broadcast_to_interactions(self, value, name):
        tensor = get_tensor(value)
        size = tensor.size()
        if self.coupled_interaction:
            if size > 1:
                raise ValueError(
                    f'The size of "{name}" must be 1 when "coupled_interaction" is "True"')
        else:
            if size not in (self.n_interaction, 1):
                raise ValueError(f'"The size of "{name}" ({size}) must be equal to "n_interaction" ({self.n_interaction})!')
            tensor = tensor.broadcast_to((self.n_interaction,))
        return tensor

    def _print_info(self,
                    num_retraction: int = 3,
                    num_gap: int = 3,
                    char: str = '-'
                    ) -> None:
        ret = char * num_retraction
        gap = char * num_gap
        print(ret + f' Deep molecular model: {self._get_name()}')
        print('-' * 80)
        print(ret + gap + f' Dimension of node representation vector: {self.dim_node_rep}')
        print(ret + gap + f' Dimension of edge representation vector: {self.dim_edge_rep}')
        print(ret + gap + f' Dimension of node embedding vector: {self.dim_node_emb}')
        print(ret + gap + f' Dimension of edge embedding vector: {self.dim_edge_emb}')
        if self.coupled_interaction:
            print(ret + gap + f' Using coupled interaction with {self.n_interaction} layers:')
            print(ret + gap + gap + ' ' + self.interaction[0]._get_name())
            self.interaction[0]._print_info(num_retraction=num_retraction + num_gap, num_gap=num_gap, char=char)
        else:
            print(ret + gap + f' Using {self.n_interaction} independent interaction layers:')
            for i, inter in enumerate(self.interaction):
                print(ret + gap + ' ' + str(i) + '. ' + inter._get_name())
                inter._print_info(num_retraction=num_retraction + num_gap, num_gap=num_gap, char=char)
        print('-' * 80)

    def forward(self, 
                node_emb: Tensor, 
                node_mask: Optional[Tensor] = None, 
                edge_emb: Optional[Tensor] = None, 
                edge_mask: Optional[Tensor] = None, 
                edge_cutoff: Optional[Tensor] = None, 
                neigh_idx: Optional[Tensor] = None
                ) -> Tuple[Tensor, Tensor]:
        """ 
        Args:
            node_emb: Node embedding tensor.
            node_mask: Optional node mask tensor.
            edge_emb: Optional edge embedding tensor.
            edge_mask: Optional edge mask tensor.
            edge_cutoff: Optional edge cutoff tensor.
            neigh_idx: Optional neighbor index tensor.
            
        Returns:
            node_vec: Node representation tensor.
            edge_vec: Edge representation tensor.
        """
        
        node_vec = node_emb
        edge_vec = edge_emb

        for i in range(len(self.interaction)):
            node_vec, edge_vec = self.interaction[i](
                node_vec=node_vec,
                node_emb=node_emb,
                node_mask=node_mask,
                edge_vec=edge_vec,
                edge_emb=edge_emb,
                edge_mask=edge_mask,
                edge_cutoff=edge_cutoff,
                neigh_idx=neigh_idx,
            )

        return node_vec, edge_vec

