# Copyright 2021-2023 @ Shenzhen Bay Laboratory &
#                       Peking University &
#                       Huawei Technologies Co., Ltd
#
# This code is a part of Cybertron package.
#
# The Cybertron is open-source software based on the AI-framework:
# MindSpore (https://www.mindspore.cn/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Molecular Conformation Transformer (MolCT) implementation.
"""

import torch
from torch import nn, Tensor
from typing import Union, List, Optional, Tuple

from ..utils import get_integer, get_tensor, get_arguments
from .model import MolecularGNN, _model_register
from ..interaction import Interaction, NeuralInteractionUnit
from ..filter import ResFilter

# Define the MolCT class

@_model_register('molct')
class MolCT(MolecularGNN):
    """
    Molecular Configuration Transformer (MolCT) Model

    Reference:

        Zhang, J.; Zhou, Y.; Lei, Y.-K.; Yang, Y. I.; Gao, Y. Q.,
        Molecular CT: unifying geometry and representation learning for molecules at different scales [J/OL].
        arXiv preprint, 2020: arXiv:2012.11816 [2020-12-22]. https://arxiv.org/abs/2012.11816

    
    Args:
        dim_feature (int, optional): Dimension of feature vectors. Defaults to 128.
        dim_edge_emb (int, optional): Dimension of edge embedding vectors. Defaults to None.
        interaction (Union[Interaction, List[Interaction], None], optional):
            Interaction module or list of interaction modules. Defaults to None.
        n_interaction (int, optional): Number of interaction layers. Defaults to 3.
        activation (str, optional): Activation function name. Defaults to 'silu'.
        open_act (bool, optional): Whether to use adaptive computation time. Defaults to False.
        n_heads (int, optional): Number of attention heads. Defaults to 8.
        max_cycles (int, optional): Maximum number of computation cycles. Defaults to 10.
        coupled_interaction (bool, optional): Whether to use coupled interactions.
            Defaults to False.
        fixed_cycles (bool, optional): Whether to use fixed number of cycles.
            Defaults to False.
        use_feed_forward (bool, optional): Whether to use feed-forward network.
            Defaults to False.
        act_threshold (float, optional): Threshold for halting probability.
            Defaults to 0.9.
    """
    def __init__(self, 
                 dim_feature: int = 128, 
                 dim_edge_emb: Optional[int] = None, 
                 interaction: Optional[Union[Interaction, List[Interaction]]] = None, 
                 n_interaction: int = 3, 
                 activation: str = 'silu', 
                 open_act: bool = False, 
                 n_heads: int = 8, 
                 max_cycles: int = 10, 
                 coupled_interaction: bool = False, 
                 fixed_cycles: bool = False, 
                 use_feed_forward: bool = False, 
                 act_threshold: float = 0.9,
                 **kwargs
                 ):
        super().__init__(
            dim_node_rep=dim_feature,
            dim_edge_rep=dim_feature,
            n_interaction=n_interaction,
            interaction=interaction,
            activation=activation,
            coupled_interaction=coupled_interaction,
            dim_node_emb=dim_feature,
            dim_edge_emb=dim_edge_emb,
        )

        self._kwargs = get_arguments(locals(), kwargs)
        self.open_act = open_act
        self.max_cycles = 1
        if self.open_act:
            self.max_cycles = max_cycles

        self.n_heads = n_heads
        self.use_feed_forward = use_feed_forward
        self.fixed_cycles = fixed_cycles
        self.act_threshold = get_tensor(act_threshold, dtype=torch.float32, device=self.device)

        self.dim_feature = dim_feature

        self.filter_net = None
        if self.dim_edge_emb is not None:
            self.filter_net = ResFilter(self.dim_edge_emb, self.dim_feature, self.activation)
            if self.interaction is None:
                self.build_interaction()

        self.default_embedding = self.get_default_embedding('molct')

    def build_interaction(self):
        if self.dim_edge_emb is None:
            raise ValueError('Cannot build interaction without `dim_edge_emb`. Please use `set_embedding_dimension` at first.')

        # Create neural interaction units
        interaction_kwargs = {
            'dim_feature': self.dim_feature,
            'n_heads': self.n_heads,
            'max_cycles': self.max_cycles,
            'activation': self.activation,
            'fixed_cycles': self.fixed_cycles,
            'use_feed_forward': self.use_feed_forward,
            'act_threshold': self.act_threshold,
        }
        
        if self.coupled_interaction:
            self.interaction = nn.ModuleList([
                NeuralInteractionUnit(**interaction_kwargs)
            ] * self.n_interaction)
        else:
            self.interaction = nn.ModuleList([
                NeuralInteractionUnit(**interaction_kwargs)
                for _ in range(self.n_interaction)])
            
    def set_dimension(self, dim_node_emb: int, dim_edge_emb: int) -> None:
        """check and set dimension of embedding vectors"""
        super().set_dimension(dim_node_emb, dim_edge_emb)
        if self.filter_net is None:
            self.filter_net = ResFilter(self.dim_edge_emb, self.dim_feature, self.activation)

    def forward(self, 
                node_emb: Tensor, 
                node_mask: Optional[Tensor] = None, 
                edge_emb: Optional[Tensor] = None, 
                edge_mask: Optional[Tensor] = None, 
                edge_cutoff: Optional[Tensor] = None, 
                neigh_idx: Optional[Tensor] = None
                ) -> Tuple[Tensor, Tensor]:
        """   
        Args:
            node_emb: Node embedding tensor.
            node_mask: Optional node mask tensor.
            edge_emb: Optional edge embedding tensor.
            edge_mask: Optional edge mask tensor.
            edge_cutoff: Optional edge cutoff tensor.
            neigh_idx: Optional neighbor index tensor.
            
        Returns:
            node_vec: Node embedding tensor.
            edge_vec: Edge embedding tensor.
        """
        if neigh_idx is None:
            # (B, A, A)
            diagonal = torch.logical_and(node_mask.unsqueeze(-1), node_mask.unsqueeze(-2))
            # (B, A, A)
            edge_mask = torch.logical_or(edge_mask, diagonal)
        else:
            condition = torch.zeros_like(edge_mask, dtype=edge_mask.dtype, device=edge_mask.device)
            condition[:,:,0] = True
            edge_mask = torch.logical_or(edge_mask, condition)

        node_vec = node_emb
        edge_vec = self.filter_net(edge_emb)
        for i in range(len(self.interaction)):
            node_vec, edge_vec = self.interaction[i](
                node_vec=node_vec,
                node_emb=node_emb,
                node_mask=node_mask,
                edge_vec=edge_vec,
                edge_emb=edge_emb,
                edge_mask=edge_mask,
                edge_cutoff=edge_cutoff,
                neigh_idx=neigh_idx,
            )

        return node_vec, edge_vec