# Copyright 2021-2023 @ Shenzhen Bay Laboratory &
#                       Peking University &
#                       Huawei Technologies Co., Ltd
#
# This code is a part of MindSPONGE:
# MindSpore Simulation Package tOwards Next Generation molecular modelling.
#
# MindSPONGE is open-source software based on the AI-framework:
# MindSpore (https://www.mindspore.cn/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Functions for read and write hyperparameters in checkpoint file
"""

import numpy as np
import torch
from torch import Tensor
from torch import nn
from torch.nn import ModuleList
from ..function.functions import get_integer


__all__ = [
    'str_to_tensor',
    'tensor_to_str',
    'get_class_parameters',
    'get_hyper_parameter',
    'get_hyper_string',
    'set_class_parameters',
    'set_hyper_parameter',
    'set_class_into_hyper_param',
    'load_hyperparam',
    'load_hyper_param_into_class',
]


def str_to_tensor(string: str) -> Tensor:
    """
    encode string to Tensor[int]

    Args:
        string (str):    The input string.

    Returns:
        Tensor[int].

    Supported Platforms:
        ``CPU`` ``GPU``
    """
    if isinstance(string, (list, tuple)):
        string = ' '.join(string)
    return torch.from_numpy(np.fromstring(string, dtype=np.int8))


def tensor_to_str(tensor: Tensor) -> str:
    """
    decode to Tensor[int] to string

    Args:
        tensor (Tensor[int]):   The input tensor.

    Returns:
        string(str).

    Supported Platforms:
        ``CPU`` ``GPU``
    """
    tensor = tensor.detach().cpu().numpy()
    string = tensor.tobytes().decode()
    string = string.split()
    if len(string) == 1:
        string = string[0]
    return string


def get_class_parameters(hyper_param: dict, prefix: str, num_class: int = 1) -> dict:
    """
    get hyperparameter from nn.Module class.

    Args:
        hyper_param (dict): A dict of hyperparameters.
        prefix (str):       Only parameters starting with the prefix will be loaded.
        num_class (int):    The number of the class. Default: 1

    Returns:
        hyperparameters, dict.

    Supported Platforms:
        ``CPU`` ``GPU``
    """
    def _get_class_parameters(hyper_param: dict, prefix: str) -> dict:
        new_params = {}
        idx = len(prefix) + 1
        for name, param in hyper_param.items():
            if name.find(prefix) == 0 \
                    and (name == prefix or name[len(prefix)] == "." or (prefix and prefix[-1] == ".")):
                new_params[name[idx:]] = param
        if 'name' in new_params:
            new_params['name'] = get_hyper_string(new_params, 'name')
            if len(new_params) == 1:
                new_params = new_params.get('name')

        if new_params:
            return new_params
        return None

    if num_class == 1:
        return _get_class_parameters(hyper_param, prefix)

    param_list = []
    for i in range(num_class):
        param_list.append(_get_class_parameters(
            hyper_param, prefix+'.'+str(i)))
    return param_list


def get_hyper_parameter(hyper_param: dict, prefix: str):
    """
    get hyperparameter.

    Args:
        hyper_param (dict): A dict of hyperparameters.
        prefix (str):       Only parameters starting with the prefix will be loaded.

    Returns:
        hyper_param[prefix], Tensor.

    Supported Platforms:
        ``CPU`` ``GPU``
    """
    if prefix in hyper_param.keys():
        return torch.tensor(hyper_param[prefix])
    return None


def get_hyper_string(hyper_param: dict, prefix: str):
    """
    get string type hyperparameter.

    Args:
        hyper_param (dict): A dict of hyperparameters.
        prefix (str):       Only parameters starting with the prefix will be loaded.

    Returns:
        str. String type hyperparameter.

    Supported Platforms:
        ``CPU`` ``GPU``
    """
    if prefix in hyper_param.keys():
        string = hyper_param[prefix]
        if isinstance(string, str):
            return string
        return tensor_to_str(string)
    return None


def set_hyper_parameter(hyper_param: dict, prefix: str, param: None):
    """
    put param into hyper_param.

    Args:
        hyper_param (dict):         A dict of hyperparameters.
        prefix (str):               Only parameters starting with the prefix will be loaded.
        param (Union[str, Tensor]): Parameters need to be put into the hyperparameter dict. Default: None

    Supported Platforms:
        ``CPU`` ``GPU``
    """
    if param is None:
        if prefix in hyper_param.keys():
            hyper_param.pop(prefix)
    else:
        if isinstance(param, str):
            hyper_param[prefix] = str_to_tensor(param)
        else:
            hyper_param[prefix] = param


def set_class_parameters(hyper_param: list, prefix: str, module: nn.Module):
    """
    put hyperparameters into nn.Module class.

    Args:
        hyper_param (dict): A dict of hyperparameters.
        prefix (str):       Only parameters starting with the prefix will be loaded.
        module (nn.Module): A neural network module.

    Supported Platforms:
        ``CPU`` ``GPU``
    """
    def _set_class_parameters(hyper_param: dict, prefix: str, module: nn.Module):
        """set parameters for single class"""
        if not isinstance(module, nn.Module):
            raise TypeError(f'module must be nn.Module but got: {type(module)}')

        for name, param in module.named_parameters():
            param_name = prefix + '.' + name if prefix else name
            if param_name in hyper_param:
                param.data.copy_(torch.tensor(hyper_param[param_name]))

    if isinstance(module, ModuleList):
        for i, m in enumerate(module):
            _set_class_parameters(hyper_param, prefix+'.'+str(i), m)
    else:
        _set_class_parameters(hyper_param, prefix, module)


def load_hyper_param_into_class(cls_dict: dict, hyper_param: dict, types: dict, prefix: str = ''):
    """
    load hyperparameters into class.

    Args:
        cls_dict (dict):       A dict of classes.
        hyper_param (dict):    A dict of hyperparameters.
        types (dict):          A dict of types.
        prefix (str):          Only parameters starting with the prefix will be loaded. Default: ''

    Supported Platforms:
        ``CPU`` ``GPU``
    """
    for name, cls in cls_dict.items():
        if not isinstance(cls, nn.Module):
            continue
        param_name = prefix + '.' + name if prefix else name
        if param_name in types:
            set_class_parameters(hyper_param, param_name, cls)


def set_class_into_hyper_param(hyper_param: dict, types: dict, cls: nn.Module, prefix: str = ''):
    """
    set class into hyperparameters.

    Args:
        hyper_param (dict):    A dict of hyperparameters.
        types (dict):          A dict of types.
        cls (nn.Module):       A neural network module.
        prefix (str):          Only parameters starting with the prefix will be loaded. Default: ''

    Supported Platforms:
        ``CPU`` ``GPU``
    """
    if not isinstance(cls, nn.Module):
        raise TypeError(f'cls must be nn.Module but got: {type(cls)}')

    for name, param in cls.named_parameters():
        param_name = prefix + '.' + name if prefix else name
        if param_name in types:
            hyper_param[param_name] = param.detach().cpu().numpy()


def load_hyperparam(ckpt_file_name: str, prefix: str = 'hyperparam'):
    """
    load hyperparameters from checkpoint file.

    Args:
        ckpt_file_name (str): Checkpoint file name.
        prefix (str):         Only parameters starting with the prefix will be loaded. Default: 'hyperparam'

    Returns:
        dict, hyperparameters.

    Supported Platforms:
        ``CPU`` ``GPU``
    """
    checkpoint = torch.load(ckpt_file_name)
    hyper_param = {}
    for name, param in checkpoint.items():
        if name.startswith(prefix):
            hyper_param[name] = param
    return hyper_param
