#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
高级Swin Transformer模型修复脚本
可以处理多种模型结构差异和格式问题
"""

import os
import torch
import logging
import shutil
import argparse
from collections import OrderedDict

logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

def inspect_model(checkpoint):
    """检查模型结构和参数"""
    logger.info("检查模型结构:")
    
    if isinstance(checkpoint, dict):
        logger.info(f"模型是字典类型，包含以下键: {list(checkpoint.keys())}")
        
        if 'model_state_dict' in checkpoint:
            state_dict = checkpoint['model_state_dict']
            logger.info(f"找到model_state_dict，包含 {len(state_dict)} 个参数")
            # 输出前几个参数键示例
            keys = list(state_dict.keys())
            logger.info(f"参数键示例: {keys[:5]}")
            return True
        
        elif 'state_dict' in checkpoint:
            state_dict = checkpoint['state_dict']
            logger.info(f"找到state_dict，包含 {len(state_dict)} 个参数")
            # 输出前几个参数键示例
            keys = list(state_dict.keys())
            logger.info(f"参数键示例: {keys[:5]}")
            return True
            
        else:
            # 检查是否本身就是state_dict
            for key in checkpoint:
                if isinstance(checkpoint[key], torch.Tensor):
                    logger.info(f"模型可能直接是state_dict，包含 {len(checkpoint)} 个参数")
                    keys = list(checkpoint.keys())
                    logger.info(f"参数键示例: {keys[:5]}")
                    return True
    else:
        logger.warning(f"无法识别的模型类型: {type(checkpoint)}")
        return False

def fix_state_dict(state_dict):
    """修复状态字典，处理键名差异和结构变化"""
    new_state_dict = OrderedDict()
    
    # 检查是否有backbone前缀
    has_backbone_prefix = any(k.startswith('backbone.') for k in state_dict.keys())
    
    # 检查是否有module前缀 (通常来自DataParallel)
    has_module_prefix = any(k.startswith('module.') for k in state_dict.keys())
    
    # 处理各种前缀和结构差异
    for k, v in state_dict.items():
        new_k = k
        
        if has_backbone_prefix and k.startswith('backbone.'):
            new_k = k[9:]  # 移除backbone.前缀
            logger.info(f"移除backbone前缀: {k} -> {new_k}")
            
        elif has_module_prefix and k.startswith('module.'):
            new_k = k[7:]  # 移除module.前缀
            logger.info(f"移除module前缀: {k} -> {new_k}")
        
        # 处理头部结构差异（如cls_head.fc_cls -> head）
        if 'cls_head.fc_cls' in new_k:
            new_k = new_k.replace('cls_head.fc_cls', 'head')
            logger.info(f"重命名cls_head: {k} -> {new_k}")
            
        new_state_dict[new_k] = v
    
    return new_state_dict

def fix_model_format(model_path=None, output_path=None, backup=True):
    """高级模型修复函数"""
    if model_path is None:
        model_path = "models/swin3d_microexpr.pth"
    
    if output_path is None:
        output_path = model_path
        
    fixed_model_path = output_path + ".fixed"
    
    if not os.path.exists(model_path):
        logger.error(f"模型文件不存在: {model_path}")
        return False
    
    try:
        logger.info(f"加载模型: {model_path}")
        # 强制使用CPU加载模型
        checkpoint = torch.load(model_path, map_location='cpu')
        
        # 检查模型格式
        inspect_model(checkpoint)
        
        # 提取状态字典
        if isinstance(checkpoint, dict):
            if 'model_state_dict' in checkpoint:
                logger.info("使用model_state_dict中的参数")
                state_dict = checkpoint['model_state_dict']
            elif 'state_dict' in checkpoint:
                logger.info("使用state_dict中的参数")
                state_dict = checkpoint['state_dict']
            else:
                # 检查是否本身就是state_dict
                is_state_dict = True
                for key in list(checkpoint.keys())[:5]:  # 检查前几个键
                    if not isinstance(checkpoint[key], torch.Tensor):
                        is_state_dict = False
                        break
                
                if is_state_dict:
                    logger.info("模型直接是state_dict格式")
                    state_dict = checkpoint
                else:
                    logger.error("未找到可用的参数字典")
                    return False
        else:
            logger.error("模型不是字典格式")
            return False
        
        # 修复状态字典
        fixed_state_dict = fix_state_dict(state_dict)
        
        # 创建备份（如果需要）
        if backup and model_path == output_path:
            backup_path = model_path + ".backup"
            if not os.path.exists(backup_path):
                logger.info(f"备份原始模型到: {backup_path}")
                shutil.copy(model_path, backup_path)
        
        # 保存修复后的模型
        logger.info(f"保存修复后的模型到: {fixed_model_path}")
        torch.save(fixed_state_dict, fixed_model_path)
        
        # 如果输出路径与输入路径相同，则替换原文件
        if model_path == output_path:
            logger.info("替换原始模型文件")
            os.replace(fixed_model_path, output_path)
        
        logger.info("模型修复完成")
        return True
        
    except Exception as e:
        logger.error(f"修复模型时出错: {str(e)}")
        import traceback
        logger.error(traceback.format_exc())
        return False

if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='修复Swin Transformer模型格式')
    parser.add_argument('--input', default="models/swin3d_microexpr.pth", help='输入模型文件路径')
    parser.add_argument('--output', default=None, help='输出模型文件路径（默认覆盖输入文件）')
    parser.add_argument('--no-backup', action='store_true', help='不创建备份文件')
    
    args = parser.parse_args()
    
    fix_model_format(
        model_path=args.input, 
        output_path=args.output or args.input,
        backup=not args.no_backup
    ) 