"""
change torch pth to mindspore ckpt
"""
from mindspore import log as logger
from mindspore.common.tensor import Tensor
from mindspore.common.initializer import initializer
from mindspore import save_checkpoint
from mindspore import Parameter

import torch


def torch_to_ms(model, tf_reader, save_name):
    """
    Updates model mindspore param's data from torch param's data.
    Args:
        model: mindspore model
        tf_reader: tensorflow checkpointreader
    """

    print("start load")
    # load torch parameter and mindspore parameter
    tf_param_dict = tf_reader.get_variable_to_shape_map()
    ms_param_dict = model.parameters_dict()
    left_cnt = len(ms_param_dict)

    for ms_key in ms_param_dict.keys():
        ms_key_tmp = ms_key.split('.')
        # process "backbone"
        if ms_key_tmp[0] == "backbone":
            del(ms_key_tmp[0])
            ms_key_tmp.insert(0, "inception_i3d")
            ms_key_tmp.insert(0, "RGB")
            # process "bn"
            if "bn" in ms_key_tmp:
                if ms_key_tmp[-1] == "beta":
                    ms_key_tmp[-1] = "bias"
                elif ms_key_tmp[-1] == "gamma":
                    ms_key_tmp[-1] = "weight"
                if "bn2d" in ms_key_tmp: 
                    ms_key_tmp.remove("bn2d")

            update_tf_to_ms(tf_reader, ms_param_dict, "/".join(ms_key_tmp), ms_key)
            left_cnt = left_cnt - 1
        # process "head"
        elif ms_key_tmp[0] == "head":
            del(ms_key_tmp[0])
            update_tf_to_ms(tf_reader, ms_param_dict, "/".join(ms_key_tmp), ms_key)
            left_cnt = left_cnt - 1
            
    save_checkpoint(model, "{}.ckpt".format(save_name))
    print("finish load, {} parameters unloaded.".format(left_cnt))


def update_tf_to_ms(torch_param_dict, ms_param_dict, torch_key, ms_key):
    """Updates mindspore param's data from torch param's data."""

    value = torch_param_dict[torch_key].cpu().numpy()
    value = Parameter(Tensor(value), name=ms_key)
    _update_param(ms_param_dict[ms_key], value)


def _update_param(param, new_param):
    """Updates param's data from new_param's data."""

    if isinstance(param.data, Tensor) and isinstance(new_param.data, Tensor):
        if param.data.dtype != new_param.data.dtype:
            logger.error("Failed to combine the net and the parameters for param %s.", param.name)
            msg = ("Net parameters {} type({}) different from parameter_dict's({})"
                   .format(param.name, param.data.dtype, new_param.data.dtype))
            raise RuntimeError(msg)

        if param.data.shape != new_param.data.shape:
            if not _special_process_par(param, new_param):
                logger.error("Failed to combine the net and the parameters for param %s.", param.name)
                msg = ("Net parameters {} shape({}) different from parameter_dict's({})"
                       .format(param.name, param.data.shape, new_param.data.shape))
                raise RuntimeError(msg)
            return

        param.set_data(new_param.data)
        return

    if isinstance(param.data, Tensor) and not isinstance(new_param.data, Tensor):
        if param.data.shape != (1,) and param.data.shape != ():
            logger.error("Failed to combine the net and the parameters for param %s.", param.name)
            msg = ("Net parameters {} shape({}) is not (1,), inconsistent with parameter_dict's(scalar)."
                   .format(param.name, param.data.shape))
            raise RuntimeError(msg)
        param.set_data(initializer(new_param.data, param.data.shape, param.data.dtype))

    elif isinstance(new_param.data, Tensor) and not isinstance(param.data, Tensor):
        logger.error("Failed to combine the net and the parameters for param %s.", param.name)
        msg = ("Net parameters {} type({}) different from parameter_dict's({})"
               .format(param.name, type(param.data), type(new_param.data)))
        raise RuntimeError(msg)

    else:
        param.set_data(type(param.data)(new_param.data))


def _special_process_par(par, new_par):
    """
    Processes the special condition.

    Like (12,2048,1,1)->(12,2048), this case is caused by GE 4 dimensions tensor.
    """
    par_shape_len = len(par.data.shape)
    new_par_shape_len = len(new_par.data.shape)
    delta_len = new_par_shape_len - par_shape_len
    delta_i = 0
    for delta_i in range(delta_len):
        if new_par.data.shape[par_shape_len + delta_i] != 1:
            break
    if delta_i == delta_len - 1:
        new_val = new_par.data.asnumpy()
        new_val = new_val.reshape(par.data.shape)
        par.set_data(Tensor(new_val, par.data.dtype))
        return True
    return False


# main
from mindspore import context, save_checkpoint
from mindvideo.common.utils.config import Config
from mindvideo.models import build_classifier
import numpy as np
import tensorflow as tf

config = Config("mindvideo/config/models/I3d_rgb.yaml")
context.set_context(**config.context)
# set network
ms_i3d = build_classifier(config.model)

tf_reader = tf.train.load_checkpoint('tf_ckpt/rgb_scratch/model.ckpt')


torch_to_ms(ms_i3d, tf_reader, "ms_fromTF_i3d_imagenet_rgb.ckpt")


print(tf_reader.get_tensor("RGB/inception_i3d/Mixed_5c/Branch_2/Conv3d_0b_3x3/batch_norm/moving_mean").shape)
