# Copyright 2021-2023 @ Shenzhen Bay Laboratory &
#                       Peking University &
#                       Huawei Technologies Co., Ltd
#
# This code is a part of MindSPONGE:
# MindSpore Simulation Package tOwards Next Generation molecular modelling.
#
# MindSPONGE is open-source software based on the AI-framework:
# MindSpore (https://www.mindspore.cn/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
LINCS Constraint algorithm
"""

from typing import Union, Tuple, Dict
import numpy as np

import torch
import torch.nn.functional as F
from torch import Tensor, nn

from . import Constraint
from ...system import Molecule
from ...potential import PotentialCell
from ...function.operations import GetShiftGrad
from ...function import get_arguments, get_tensor


class EinsumWrapper(nn.Module):
    """Implement particular Einsum operation

    Args:

        equation (str):  en equation represent the operation.

    Supported Platforms:

        ``Ascend`` ``GPU``

    """
    def __init__(self, equation: str):
        super().__init__()
        self.equation = equation

    def forward(self, xy: Tuple[Tensor, Tensor]) -> Tensor:
        """Calculation for Einsum operation"""

        result = None
        if self.equation == 'ijk,ilkm->iljm':
            ijk, ilkm = xy
            iljk = ijk.unsqueeze(1).expand(ijk.shape[:1] + ilkm.shape[1:2] + ijk.shape[1:])
            iljm = torch.bmm(iljk, ilkm)
            result = iljm
        elif self.equation == 'ijkl,imkl->ijm':
            ijkl, imkl = xy
            ijmkl1 = ijkl.unsqueeze(2).expand(ijkl.shape[:2] + imkl.shape[1:2] + ijkl.shape[2:])
            ijmkl2 = imkl.unsqueeze(1).expand(imkl.shape[:1] + ijkl.shape[1:2] + imkl.shape[1:])
            ijm = torch.sum(ijmkl1 * ijmkl2, dim=[-1, -2])
            result = ijm
        elif self.equation == 'ijkl,ikl->ij':
            ijkl, ikl = xy
            ijkl2 = ikl.unsqueeze(1).expand(ikl.shape[:1] + ijkl.shape[1:2] + ikl.shape[1:])
            ij = torch.sum(ijkl * ijkl2, dim=[-1, -2])
            result = ij
        elif self.equation == 'ijk,ik->ij':
            ijk, ik = xy
            ijk2 = ik.unsqueeze(1).expand(ik.shape[:1] + ijk.shape[1:2] + ik.shape[1:])
            ij = torch.sum(ijk * ijk2, dim=-1)
            result = ij
        elif self.equation == 'ijkl,ij->ikl':
            ijkl, ij = xy
            ijkl2 = ij.reshape(ij.shape + (1, 1)).expand(ij.shape + ijkl.shape[-2:])
            ikl = torch.sum(ijkl * ijkl2, dim=1)
            result = ikl
        elif self.equation == 'ijk,ikl->ijl':
            ijk, ikl = xy
            ijl = torch.bmm(ijk, ikl)
            result = ijl
        elif self.equation == 'ijk,ijl->ikl':
            ijk, ijl = xy
            ijkl1 = ijk.unsqueeze(-1).expand(*ijk.shape, ijl.shape[-1])
            ijkl2 = ijl.unsqueeze(-2).expand(*ijl.shape[:-1], ijk.shape[-1], ijl.shape[-1])
            result = (ijkl1 * ijkl2).sum(dim=1)
        else:
            raise NotImplementedError("This equation is not implemented")
        return result


class Lincs(Constraint):
    """A LINCS (LINear Constraint Solver) constraint module, which is a subclass of `Constraint`.

    Args:

        system (Molecule):          Simulation system.

        bonds (Union[Tensor, str]): Bonds to be constraint.
                                    Tensor of shape (K, 2). Data type is int.
                                    Alternative: "h-bonds" or "all-bonds".

        potential (PotentialCell):  Potential Cell. Default: None

    Supported Platforms:

        ``Ascend`` ``GPU``

    """

    def __init__(self,
                 system: Molecule,
                 bonds: Union[Tensor, str] = 'h-bonds',
                 potential: PotentialCell = None,
                 **kwargs
                 ):

        super().__init__(
            system=system,
            bonds=bonds,
            potential=potential,
        )
        print('[MindSPONGE] The lincs constraint is used for the molecule system.')

        self._kwargs = get_arguments(locals(), kwargs)

        if isinstance(bonds, str):
            if bonds.lower() == 'h-bonds':
                if system.remaining_index is None:
                    self.bonds = torch.gather(system.bonds, system.h_bonds, dim=-2)
                else:
                    take_index = torch.nonzero((system.remaining_index == system.h_bonds[..., None]).sum(-2)).reshape(-1)
                    self.bonds = torch.gather(system.bonds, system.remaining_index[take_index], dim=-2)
            elif bonds.lower() == 'all-bonds':
                if system.remaining_index is None:
                    self.bonds = system.bonds
                else:
                    self.bonds = torch.gather(system.bonds, system.remaining_index, dim=-2)
            else:
                raise ValueError(f'"bonds" must be "h-bonds" or "all-bonds" but got: {bonds}')
        else:
            try:
                self.bonds = get_tensor(bonds)
            except TypeError as te:
                raise TypeError(f'The type of "bonds" must be Tensor or str, but got: {type(bonds)}') from te

        if self.bonds.ndim != 2:
            if self.bonds.ndim != 3:
                raise ValueError(f'The rank of "bonds" must be 2 or 3 but got: {self.bonds.ndim}')

            if self.bonds.shape[0] != 1:
                raise ValueError(f'For constraint, the batch size of "bonds" must be 1 but got: {self.bonds[0]}')
            self.bonds = self.bonds[0]

        if self.bonds.shape[-1] != 2:
            raise ValueError(f'The last dimension of "bonds" but got: {self.bonds.shape[-1]}')

        self.num_constraints = self.bonds.shape[-2]

        #pylint: disable=invalid-name
        flatten_bonds = self.bonds.reshape(-1).numpy()
        remaining_atoms = get_tensor(np.sort(np.unique(flatten_bonds)), dtype=torch.int32)

        self.remaining_atoms = remaining_atoms
        self.bs_index = torch.broadcast_to(remaining_atoms[None, ..., None],
                                         (1, remaining_atoms.shape[0], 3))
        mapping_atoms = torch.arange(remaining_atoms.shape[-1])

        mapping = dict(zip(remaining_atoms.numpy(), mapping_atoms.numpy()))
        self.bonds = get_tensor(np.vectorize(mapping.get)(self.bonds.numpy()), dtype=torch.int32)

        self.num_atoms = remaining_atoms.shape[0]
        # (R,R) <- (R,R)
        iinvM = torch.eye(self.num_atoms)
        self.inv_mass = torch.gather(self.inv_mass, remaining_atoms, dim=-1)

        # (B,R,R) = (1,R,R) * (B,1,R)
        self.Mii = torch.broadcast_to(
            iinvM, (1,) + iinvM.shape) * self.inv_mass[:, None, :]

        self.BMatrix = GetShiftGrad(
            num_atoms=self.num_atoms,
            bonds=self.bonds,
            num_walkers=self.num_walker,
            dimension=self.dimension,
            use_pbc=self.use_pbc
        )
        # (B,C,R,D)
        shape = (self.num_walker,
                 self.bonds.shape[-2], self.num_atoms, self.dimension)

        self.broadcast = torch.broadcast_to(shape)
        self.inv = torch.inverse
        self.squeeze = torch.squeeze
        self.einsum0 = EinsumWrapper('ijk,ilkm->iljm')
        self.einsum1 = EinsumWrapper('ijkl,imkl->ijm')
        self.einsum2 = EinsumWrapper('ijkl,ikl->ij')
        self.einsum3 = EinsumWrapper('ijk,ik->ij')
        self.einsum4 = EinsumWrapper('ijkl,ij->ikl')
        self.einsum5 = EinsumWrapper('ijk,ikl->ijl')
        self.einsum6 = EinsumWrapper('ijk,ijl->ikl')

        # (B,C,R)
        shape = (self.num_walker, self.num_constraints, self.num_atoms)

        # (1,C,1)
        bond0 = self.bonds[..., 0].reshape(1, -1, 1).numpy()
        # (B,C,R) <- (B,A,1)
        mask0 = np.zeros(shape)
        np.put_along_axis(mask0, bond0, 1, axis=-1)
        # (B,C,R,1)
        self.mask0 = get_tensor(mask0, dtype=torch.int32).unsqueeze(-1)

        # (1,C,1)
        bond1 = self.bonds[..., 1].reshape(1, -1, 1).numpy()
        # (B,C,R) <- (B,R,1)
        mask1 = np.zeros(shape)
        np.put_along_axis(mask1, bond1, 1, axis=-1)
        # (B,C,R,1)
        self.mask1 = get_tensor(mask1, dtype=torch.int32).unsqueeze(-1)

    def forward(self,
                  coordinate: Tensor,
                  velocity: Tensor,
                  force: Tensor,
                  energy: Tensor,
                  virial: Tensor = None,
                  pbc_box: Tensor = None,
                  step: int = 0,
                  **kwargs
                  ) -> Dict[str, Tensor]:
        #pylint: disable=invalid-name

        # (B,A,D)
        last_crd = coordinate.clone()

        # (B,R,D)
        coordinate_old = torch.gather(self._coordinate.clone(), remaining_atoms[None, ..., None], dim=-2)
        coordinate_new = torch.gather(coordinate.clone(), remaining_atoms[None, ..., None], dim=-2)

        # (B,C,R,D)
        BMatrix = self.BMatrix(coordinate_new, coordinate_old, pbc_box)

        # ijk,ilkm->iljm
        # (B,C,R,D)<-(B,R,R),(B,C,R,D)
        tmp0 = self.einsum0((self.Mii, BMatrix))

        # ijkl,imkl->ijm
        # (B,C,C)<-(B,C,R,D),(B,C,R,D)
        tmp1 = self.einsum1((BMatrix, tmp0))

        # (B,C,C)
        tmp2 = self.inv(tmp1)

        # (B,C,R,D) <- (B,R,D)
        pos_old = self.broadcast(F.expand_dims(coordinate_old, -3))

        # (B,C,D) <- (B,C,R,D) = (B,C,R,1) * (B,C,R,D)
        pos_old_0 = torch.sum(self.mask0 * pos_old, dim=-2)
        pos_old_1 = torch.sum(self.mask1 * pos_old, dim=-2)

        # (B,C)
        di = self.get_distance(pos_old_0, pos_old_1, pbc_box)

        # ijkl,ikl->ij
        # (B,C)<-(B,C,R,D),(B,R,D)
        tmp3 = self.einsum2((BMatrix, coordinate_new)) - di

        # ijk,ik->ij
        # (B,C)<-(B,C,C),(B,C)
        tmp4 = self.einsum3((tmp2, tmp3))

        # ijkl,ij->ikl
        # (B,R,D)<-(B,C,R,D),(B,C)
        tmp5 = self.einsum4((BMatrix, tmp4))

        # ijk,ikl->ijl
        # (B,R,D)<-(B,R,R),(B,R,D)
        dr = -self.einsum5((self.Mii, tmp5))

        # (B,R,D)
        update_crd = torch.gather(coordinate.clone(), remaining_atoms[None, ..., None], dim=-2)
        # (B,A,D)
        coordinate = self.scatter_update(coordinate, self.bs_index, update_crd + dr, dim=-2)

        # (B,A,D)
        velocity += (coordinate - last_crd) / self.time_step

        # Constraint force = m * dR / dt^2
        # (B,A,D)<-(B,A,1),(B,A,D)
        constraint_force = self._atom_mass * (coordinate - last_crd) / (self.time_step ** 2)
        force += constraint_force

        if self._pbc_box is not None:
            # (B,D)<-(B,A,D)<-(B,A,D),(B,A,D)
            virial += -0.5 * (last_crd * constraint_force).sum(-2)

        return {'coordinate': coordinate,
                'velocity': velocity,
                'force': force,
                'energy': energy,
                'virial': virial,
                'pbc_box': pbc_box,
                }
