#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
修复Swin Transformer模型格式以匹配应用程序的期望
"""

import os
import torch
import logging

logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

def fix_model_format():
    """修复模型格式"""
    model_path = "models/swin3d_microexpr.pth"
    fixed_model_path = "models/swin3d_microexpr_fixed.pth"
    
    if not os.path.exists(model_path):
        logger.error(f"模型文件不存在: {model_path}")
        return False
    
    try:
        logger.info(f"加载模型: {model_path}")
        # 强制使用CPU加载模型
        checkpoint = torch.load(model_path, map_location='cpu')
        
        logger.info("检查模型格式")
        
        # 如果模型有嵌套的model_state_dict，直接使用内部的state_dict
        if isinstance(checkpoint, dict) and 'model_state_dict' in checkpoint:
            logger.info("检测到嵌套的model_state_dict，将直接使用内部参数")
            # 直接保存内部的state_dict，而不是嵌套的字典
            torch.save(checkpoint['model_state_dict'], fixed_model_path)
            logger.info(f"保存修复后的模型: {fixed_model_path}")
            
            # 备份原始模型
            backup_path = model_path + ".backup"
            if not os.path.exists(backup_path):
                logger.info(f"备份原始模型到: {backup_path}")
                import shutil
                shutil.copy(model_path, backup_path)
            
            # 替换原始模型
            logger.info("替换原始模型文件")
            os.replace(fixed_model_path, model_path)
            
            logger.info("模型修复完成")
            return True
        else:
            logger.info("模型格式正常，无需修复")
            return True
        
    except Exception as e:
        logger.error(f"修复模型时出错: {str(e)}")
        return False

if __name__ == "__main__":
    fix_model_format() 