# Copyright 2021-2023 @ Shenzhen Bay Laboratory &
#                       Peking University &
#                       Huawei Technologies Co., Ltd
#
# This code is a part of Cybertron package.
#
# The Cybertron is open-source software based on the AI-framework:
# MindSpore (https://www.mindspore.cn/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Embedding
"""

from typing import Union, Tuple

import mindspore as ms
import mindspore.numpy as msnp
from mindspore import nn
from mindspore import Tensor
from mindspore.nn import Cell
from mindspore.ops import functional as F
from mindspore.ops import operations as P
from mindspore.common.initializer import Initializer, Normal


from mindsponge.function import GLOBAL_UNITS, Length, get_length
from mindsponge.function import get_integer, get_ms_array, get_arguments, get_initializer

from .graph import GraphEmbedding, _embedding_register
from ..cutoff import Cutoff, get_cutoff
from ..rbf import get_rbf
from ..filter import Filter, get_filter
from ..interaction import AttentionBase


@_embedding_register('molecule')
class MolEmbedding(GraphEmbedding):
    r"""Embedding for molecule

    Args:
        dim_node (int): Dimension of node embedding vector.

        dim_edge (int): Dimension of edge embedding vector.

        emb_dis (bool): Whether to embed the distance.

        emb_bond (bool): Whether to embed the bond.

        cutoff (Union[Length, float, Tensor]): Cut-off radius. Default: None

        activation: Union[Cell, str]: Activation function. Default: None

        length_unit: Union[str, Units]: Length unit. Default: Global length unit

    """
    def __init__(self,
                 dim_node: int,
                 dim_edge: Union[int, None] = None,
                 emb_dis: bool = True,
                 emb_bond: bool = False,
                 cutoff: Length = Length(1, 'nm'),
                 cutoff_fn: Union[Cutoff, str] = 'smooth',
                 rbf_fn: Union[int, str] = 'log_gaussian',
                 num_basis: Union[int, None] = None,
                 atom_filter: Union[Filter, str, None] = None,
                 dis_filter: Union[Filter, str, None] = None,
                 bond_filter: Union[Filter, str] = 'residual',
                 dis_self: Length = Length(0.05, 'nm'),
                 use_sub_cutoff: bool = False,
                 cutoff_buffer: Union[Length, float, Tensor] = Length(0.2, 'nm'),
                 num_atom_types: int = 64,
                 num_bond_types: int = 16,
                 initializer: Union[Initializer, str] = Normal(1.0),
                 activation: Union[Cell, str] = 'silu',
                 length_unit: str = GLOBAL_UNITS.length_unit,
                 **kwargs,
                 ):

        super().__init__(
            dim_node=dim_node,
            dim_edge=dim_node if dim_edge is None else dim_edge,
            emb_dis=emb_dis,
            emb_bond=emb_bond,
            activation=activation,
            length_unit=length_unit,
        )
        self._kwargs = get_arguments(locals(), kwargs)
        self.initializer = get_initializer(initializer)

        # initializing atom embedding network
        self.atom_embedding = None
        self.num_atom_types = get_integer(num_atom_types)
        self.atom_embedding = nn.Embedding(vocab_size=self.num_atom_types,
                                           embedding_size=self.dim_node,
                                           use_one_hot=True,
                                           embedding_table=self.initializer)
        self.atom_filter = get_filter(atom_filter, self.dim_node,
                                      self.dim_node, activation)
        
        # initializing distance embedding network
        self.rbf_fn = None
        self.dis_filter = None
        dis_self = get_length(dis_self, self.units)
        self.dis_self = get_ms_array(dis_self, ms.float32).reshape((-1,)) # () -> (1,)
        if self.emb_dis:
            self.cutoff = get_length(cutoff, self.units)
            self.cutoff_fn = get_cutoff(cutoff_fn, self.cutoff)
            if self.cutoff_fn is not None:
                self.cutoff = self.cutoff_fn.cutoff # float to Tensor
            if self.cutoff is not None:
                self.cutoff = get_ms_array(self.cutoff, ms.float32) # () -> (1,)

            self.use_sub_cutoff = use_sub_cutoff ## Todo: what's this?
            self.cutoff_buffer = get_length(cutoff_buffer, self.units)

            num_basis = get_integer(num_basis)
            self.rbf_fn = get_rbf(rbf_fn, r_max=self.cutoff, num_basis=num_basis,
                                  length_unit=self.units.length_unit)

            self.dis_filter = get_filter(cls_name=dis_filter,
                                         dim_in=self.num_basis,
                                         dim_out=self._dim_edge,
                                         activation=activation)
            
        # bond embedding step
        self.bond_embedding = None
        self.bond_filter = None
        self.num_bond_types = get_integer(num_bond_types)
        if self.emb_bond:
            self.bond_embedding = nn.Embedding(
                self.num_bond_types, self._dim_edge,
                use_one_hot=True, embedding_table=self.initializer)
            self.bond_filter = get_filter(cls_name=bond_filter,
                                          dim_in=self._dim_edge,
                                          dim_out=self._dim_edge,
                                          activation=activation)

        if self.emb_dis and self.emb_bond:
            self.interaction = AttentionBase(dim_feature=self.dim_node)
            self.bond_filter = get_filter(cls_name=bond_filter,
                                          dim_in=self._dim_edge,
                                          dim_out=self.dim_node,
                                          activation=activation)

    @property
    def num_basis(self) -> int:
        """number of radical basis function"""
        if self.rbf_fn is None:
            return 1
        return self.rbf_fn.num_basis

    @property
    def dim_edge(self) -> int:
        """dimension of edge vector"""
        if self.emb_dis and self.dis_filter is None:
            return self.num_basis
        return self._dim_edge

    def print_info(self, num_retraction: int = 3, num_gap: int = 3, char: str = ' '):
        """print the information of molecular model"""
        ret = char * num_retraction
        gap = char * num_gap
        print(ret+f' Graph Embedding: {self.cls_name}')
        print('-'*80)
        print(ret+gap+f' Length unit: {self.units.length_unit}')
        print(ret+gap+f' Atom embedding size: {self.num_atom_types}')
        
        print(ret+gap+f' Embedding distance: {self.emb_dis}')
        print(ret+gap+f' Embedding Bond: {self.emb_bond}')
        if self.emb_dis:
            print(ret+gap+f' Cutoff distance: {self.cutoff} {self.units.length_unit}')
            print(ret+gap+f' Cutoff function: {self.cutoff_fn.cls_name}')
            print(ret+gap+f' Radical basis functions: {self.rbf_fn.cls_name}')
            self.rbf_fn.print_info(num_retraction=num_retraction +
                                num_gap, num_gap=num_gap, char=char)
        if self.emb_bond:
            print(ret+gap+f' Bond embedding size: {self.num_bond_types}')
        print(ret+gap+f' Dimension of node embedding vector: {self.dim_node}')
        print(ret+gap+f' Dimension of edge embedding vector: {self.dim_edge}')
        print('-'*80)

    def get_rbf(self, distances: Tensor):
        """get radical basis function"""
        if self.rbf_fn is None:
            # (B, A, N, 1)
            return F.expand_dims(distances, -1)
        # (B, A, N, F)
        return self.rbf_fn(distances)

    def construct(self,
                  atom_type: Tensor,
                  distance: Tensor,
                  bond_type: Tensor,
                  atom_mask: Tensor = None,
                  distance_mask: Tensor = None,
                  bond_mask: Tensor = None,
                  ) -> Tuple[Tensor, Tensor, Tensor, Tensor, Tensor]:
        #pylint: disable=unused-argument
        ## Todo: check this

        if self.emb_dis:
            batch_size = distance.shape[0]
            num_atoms = distance.shape[-2]
        else:
            batch_size = bond_type.shape[0]
            num_atoms = bond_type.shape[-2]

        # node embedding
        node_emb = self.atom_embedding(atom_type)
        if self.atom_filter is not None:
            node_emb = self.atom_filter(node_emb)

        node_mask = atom_mask # None if no input
        if batch_size > 1 and atom_type.shape[0] != batch_size: # check batch size
            node_emb = F.broadcast_to(node_emb, (batch_size,) + node_emb.shape[1:])
            if atom_mask is not None:
                node_mask = F.broadcast_to(atom_mask, (batch_size,) + atom_mask.shape[1:])
        
        # distance embedding
        dis_emb = None
        dis_cutoff = None
        _dis_mask = None
        # print("Distance mask before cutoff:", distance_mask)
        if self.emb_dis:
            # (B, A, A)
            distance = msnp.where(F.eye(num_atoms, num_atoms, ms.bool_), self.dis_self, distance) # self distance correction

            # (B, A, A, K)
            dis_emb = self.get_rbf(distance)
            if self.dis_filter is not None:
                # (B, A, A, F)
                dis_emb = self.dis_filter(dis_emb)

            # (B, A, A)
            if self.cutoff_fn is None: # no cutoff, all distances have the same weight.
                dis_cutoff = F.ones_like(distance)
            else:
                if self.use_sub_cutoff: 
                    # (B, 1, A)
                    center_dis = F.expand_dims(distance[..., 0, :], -2)
                    cutoff = self.cutoff + self.cutoff_buffer - center_dis
                    cutoff = F.maximum(0, F.minimum(cutoff, self.cutoff))
                    dis_cutoff, _dis_mask = self.cutoff_fn(distance, distance_mask, cutoff)
                else:
                    dis_cutoff, _dis_mask = self.cutoff_fn(distance, distance_mask)

            if distance_mask is None:
                distance_mask = _dis_mask
            else:
                distance_mask = F.logical_and(distance_mask, _dis_mask)

        bond_emb = None
        bond_cutoff = None
        if self.emb_bond:
            bond_emb = self.bond_embedding(bond_type)

            if bond_mask is None:
                # (B, A, 1) * (B, 1, A) -> (B, A, A)
                bond_mask = F.logical_and(P.ExpandDims()(node_mask, -1), P.ExpandDims()(node_mask, -2))
            
            # (B, A, A, F_b) * (B, A, A, 1) -> (B, A, A, F_b)
            bond_emb = bond_emb * F.expand_dims(bond_mask, -1)

            if self.bond_filter is not None:
                bond_emb = self.bond_filter(bond_emb)

        edge_emb = None
        edge_mask = None
        edge_cutoff = None
        if not self.emb_dis: # emb_dis == False
            edge_emb = bond_emb
            edge_mask = bond_mask
            edge_cutoff = bond_cutoff

        elif not self.emb_bond: # emb_bond == False
            edge_emb = dis_emb
            edge_mask = distance_mask
            edge_cutoff = dis_cutoff

        else: # if emb_bond and emb_dis == True
            if self.interaction is not None:
                for _num in range(3):
                    node_emb, _ = self.interaction(node_vec=node_emb,
                                                edge_vec=bond_emb,
                                                edge_mask=bond_mask,
                                                edge_cutoff=bond_cutoff,
                                                )

                edge_emb = dis_emb
                edge_cutoff = dis_cutoff
            else:
                edge_emb = P.Add()(dis_emb, bond_emb)
                edge_cutoff = dis_cutoff
            
            edge_mask = F.logical_and(distance_mask, bond_mask)

        eye_mask = F.eye(num_atoms, num_atoms, ms.bool_)
        edge_mask = F.logical_and(edge_mask, F.logical_not(eye_mask)) # uniform mask output, user can define unique mask type in model.

        return node_emb, node_mask, edge_emb, edge_mask, edge_cutoff
