from mindspore.train.serialization import load_checkpoint, load_param_into_net, save_checkpoint
from mindspore import Tensor

import torch

from models.ChangeFormer2.ChangeFormer import ChangeFormerV6


import os


def show_params(ckpt_file_path, frame="torch", key=True, value=False):
    """ Show contents of a checkpoint file.
    """
    if frame == "torch":
        params = torch.load(ckpt_file_path, map_location=torch.device('cpu'))
    elif frame == "mindspore":
        params = load_checkpoint(ckpt_file_path)
    else:
        raise ValueError("Attribute `params` must be in [`torch`, `mindspore`]! ")
    if key and value:
        for k, v in params.items():
            print(k, v)
    elif key and not value:
        for k in params.keys():
            print(k)
    elif value and not key:
        for v in params.values():
            print(v)


def compare_model_names(torch_params_dict, mindspore_params_dict):
    """ Compare the params' names between torch and mindspore nets.
    """
    t_params_dict = torch_params_dict.copy()
    m_params_dict = mindspore_params_dict.copy()
    for key in torch_params_dict.keys():
        if "num_batches_tracked" in key:
            t_params_dict.pop(key)

    for t, m in zip(t_params_dict.keys(), m_params_dict.keys()):
        print(t)
        print(m)
        print("=============================")


def from_torch_to_mindspore(net, ckpt_file_path, save_path):
    """ Transform a torch checkpoint file into mindspore checkpoint.
        Modify the param's name first, then change tensor type.
    """
    if not os.path.isfile(ckpt_file_path):
        raise FileExistsError("The file `{}` is not exist! ".format(ckpt_file_path))
    if ".ckpt" not in save_path:
        raise ValueError("Attribute `save_path` should be a checkpoint file with the end of `.ckpt`!")

    params = torch.load(ckpt_file_path, map_location=torch.device('cpu'))['model_G_state_dict']

    torch_params = list(params.items())
    num_params = len(torch_params)

    print("params num: ", num_params)
    params_list = []
    for i in range(num_params):
        key, value = torch_params[i]
        with open("demo_pt-1.txt", 'a+') as f1:
            f1.write(key + '\n')
        f1.close()

        if "weight" in key and i+2 < num_params:
            if "running_mean" in torch_params[i+2][0]:
                key = key.replace("weight", "gamma")
        if "bias" in key and i+1 < num_params:
            if "running_mean" in torch_params[i+1][0]:
                key = key.replace("bias", "beta")
        if "running_var" in key:
            key = key.replace("running_var", "moving_variance")
        if "running_mean" in key:
            key = key.replace("running_mean", "moving_mean")
        if "norm" in key and "weight" in key:
            key = key.replace("weight", "gamma")
        if "norm" in key and "bias" in key:
            key = key.replace("bias", "beta")
        if "num_batches_tracked" in key:
            continue
        if "incre" in key:      # `incre` is a name of params in hrnet for classification.
            break
        print(key)
        with open("demo_ckpt-2.txt", 'a+') as f2:
            f2.write(key+'\n')
        f2.close()
        params_list.append({"name": key, "data": Tensor(value.numpy())})
    save_checkpoint(params_list, save_path)
    print(" transform successfully....")


if __name__ == "__main__":
    net = ChangeFormerV6()
    m_params = net.parameters_dict()

    with open("demo_net-3.txt", 'a+') as fn:
        for m in (m_params.keys()):
            print(m)
            fn.write(m+'\n')
    fn.close()

    # t_params = torch.load("/data1/haojj/MindSpore_Project/ChangeFormerV6_3/checkpoints/close_official_changeformer/official-demo-1_256.ckpt",
    #                       map_location=torch.device('cpu'))
    # show_params("train_out/hrnet_cs_8090_torch11.pth")

    # compare_model_names(t_params, m_params)
    #
    from_torch_to_mindspore(net, "/data1/haojj/ChangeFormer/checkpoints/ChangeFormer-Official-LEVIR-CD/best_ckpt.pt",
                            "/data1/haojj/MindSpore_Project/ChangeFormerV6_3/ckpt_test/pt_2_ckpt/torch_cf_result.ckpt")

    #
    # a = load_checkpoint("/data1/haojj/MindSpore_Project/ChangeFormerV6_3/ckpt_test/pt_2_ckpt/torch_cf_result.ckpt")
    # load_param_into_net(net, a)
    # print("load successuflly....")
    # with open('changeformer_keys.txt', 'a+') as f:
    #     for k, v in m_params.items():
    #         print("param_key: ", k)
    #         print("param_value: ", v)
    #         f.write('{}: {}\n'.format(k, v.shape))
    # f.close()
    # for m in (m_params.keys()):
    #     print(m)
    # print("==================")
    # with open("demo_network.txt", 'a+') as f1:
    #     for m in (m_params.keys()):
    #         print(m)
    #         f1.write(m+'\n')
    # with open("demo_ptCkpt.txt", 'a+') as f2:
    #     for n in (a.keys()):
    #         print(n)
    #         f2.write(n+'\n')
    # f1.close()
    # f2.close()

    # for m, n in zip(m_params.keys(), a.keys()):
    #     print(m,  '   ', n)
    #     # print(n)
    # print("==================")

    # load_param_into_net(net, a)

