import torch
import mindspore as ms
from mindspore import Tensor

def torch_to_mindspore(torch_ckpt_path, mindspore_ckpt_path):
    # 1. 读取 Torch ckpt
    ckpt = torch.load(torch_ckpt_path, map_location="cpu")
    if "model" in ckpt:  # 只取模型部分
        ckpt = ckpt["model"]

    ms_params = []

    for name, param in ckpt.items():
        # 去掉 "module." 前缀
        if name.startswith("module."):
            name = name[len("module."):]

        # BatchNorm 参数映射
        if "bn" in name:
            if name.endswith("weight"):
                name = name.replace("weight", "gamma")
            elif name.endswith("bias"):
                name = name.replace("bias", "beta")
            elif name.endswith("running_mean"):
                name = name.replace("running_mean", "moving_mean")
            elif name.endswith("running_var"):
                name = name.replace("running_var", "moving_variance")
            elif name.endswith("num_batches_tracked"):
                # mindspore 不需要这个参数，直接跳过
                continue

        # 特殊情况：conv7.1 / conv9.1 / conv11.1 这些BN需要加 bn2d 前缀
        if "conv7.1" in name or "conv9.1" in name or "conv11.1" in name:
            name = name.replace("conv7.1.", "conv7.1.bn2d.")
            name = name.replace("conv9.1.", "conv9.1.bn2d.")
            name = name.replace("conv11.1.", "conv11.1.bn2d.")

        # 转换为 MindSpore Tensor
        ms_params.append({
            "name": name,
            "data": Tensor(param.detach().cpu().numpy())
        })

    # 2. 保存为 MindSpore ckpt
    ms.save_checkpoint(ms_params, mindspore_ckpt_path)
    print(f"✅ 转换完成: {mindspore_ckpt_path}, 共 {len(ms_params)} 个参数")

if __name__ == "__main__":
    torch_to_mindspore("model_000014.ckpt", "model_000014_ms.ckpt")
