#!/usr/bin/env python3
"""
本地模型缓存管理器
确保 FunASR 只使用本地已下载的模型，不进行网络下载
"""

import os
import logging
from pathlib import Path
from typing import Dict, Optional
import json

logger = logging.getLogger(__name__)

class LocalModelCache:
    """本地模型缓存管理器"""
    
    def __init__(self, models_dir: Optional[Path] = None):
        """
        初始化本地模型缓存管理器
        
        Args:
            models_dir: 模型目录路径
        """
        self.models_dir = models_dir or Path(__file__).parent / "models"
        
        # 模型路径映射
        self.model_mappings = {
            "paraformer-zh": "paraformer-zh",
            "fsmn-vad": "fsmn-vad", 
            "ct-punc": "ct-punc",
            "cam++": "cam++"
        }
        
        # FunASR 模型名称到本地路径的映射
        self.funasr_model_mappings = {
            "iic/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch": "paraformer-zh",
            "speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch": "paraformer-zh",
            "paraformer-zh": "paraformer-zh",
            
            "iic/speech_fsmn_vad_zh-cn-16k-common-pytorch": "fsmn-vad",
            "speech_fsmn_vad_zh-cn-16k-common-pytorch": "fsmn-vad",
            "fsmn-vad": "fsmn-vad",
            
            "iic/punc_ct-transformer_zh-cn-common-vocab272727-pytorch": "ct-punc",
            "punc_ct-transformer_zh-cn-common-vocab272727-pytorch": "ct-punc",
            "ct-punc": "ct-punc",
            
            "iic/speech_campplus_sv_zh-cn_16k-common": "cam++",
            "speech_campplus_sv_zh-cn_16k-common": "cam++",
            "cam++": "cam++"
        }
    
    def setup_offline_environment(self):
        """设置离线环境变量"""
        logger.info("设置离线环境...")
        
        # 禁用 ModelScope 的在线下载
        os.environ['MODELSCOPE_CACHE'] = str(self.models_dir.parent / ".modelscope_cache")
        os.environ['HF_DATASETS_OFFLINE'] = '1'
        os.environ['TRANSFORMERS_OFFLINE'] = '1'
        
        # 设置 FunASR 相关环境变量
        os.environ['FUNASR_CACHE_DIR'] = str(self.models_dir)
        
        logger.info(f"模型缓存目录: {self.models_dir}")
        logger.info("离线环境配置完成")
    
    def get_local_model_path(self, model_name: str) -> Optional[Path]:
        """
        获取本地模型路径
        
        Args:
            model_name: 模型名称（支持多种格式）
            
        Returns:
            本地模型路径，如果不存在返回 None
        """
        # 查找本地映射
        local_name = self.funasr_model_mappings.get(model_name, model_name)
        local_path = self.models_dir / local_name
        
        if local_path.exists() and any(local_path.iterdir()):
            return local_path
        
        return None
    
    def verify_all_models_available(self) -> Dict[str, bool]:
        """
        验证所有必需模型是否可用
        
        Returns:
            每个模型的可用状态
        """
        results = {}
        
        for model_name, local_name in self.model_mappings.items():
            local_path = self.models_dir / local_name
            is_available = local_path.exists() and any(local_path.iterdir())
            results[model_name] = is_available
            
            if is_available:
                logger.info(f"✅ 模型 {model_name} 可用: {local_path}")
            else:
                logger.warning(f"❌ 模型 {model_name} 不可用: {local_path}")
        
        return results
    
    def create_model_config(self) -> Dict:
        """
        创建模型配置文件
        
        Returns:
            模型配置字典
        """
        config = {
            "offline_mode": True,
            "models_dir": str(self.models_dir),
            "model_paths": {}
        }
        
        for funasr_name, local_name in self.funasr_model_mappings.items():
            local_path = self.models_dir / local_name
            if local_path.exists():
                config["model_paths"][funasr_name] = str(local_path)
        
        return config
    
    def save_model_config(self, config_file: Optional[Path] = None):
        """
        保存模型配置到文件
        
        Args:
            config_file: 配置文件路径
        """
        if config_file is None:
            config_file = self.models_dir / "local_model_config.json"
        
        config = self.create_model_config()
        
        with open(config_file, 'w', encoding='utf-8') as f:
            json.dump(config, f, indent=2, ensure_ascii=False)
        
        logger.info(f"模型配置已保存到: {config_file}")
    
    def patch_funasr_automodel(self):
        """
        修补 FunASR AutoModel 以使用本地模型
        """
        try:
            from funasr import AutoModel
            
            # 保存原始的 __init__ 方法
            if not hasattr(AutoModel, '_original_init'):
                AutoModel._original_init = AutoModel.__init__
            
            def patched_init(self, model=None, **kwargs):
                """修补后的初始化方法"""
                if model:
                    # 尝试使用本地模型路径
                    local_path = cache_manager.get_local_model_path(model)
                    if local_path:
                        logger.info(f"使用本地模型: {model} -> {local_path}")
                        model = str(local_path)
                    else:
                        logger.warning(f"本地模型不存在: {model}")
                
                # 强制禁用在线更新
                kwargs['disable_update'] = True
                kwargs['disable_log'] = True
                
                # 调用原始初始化方法
                return AutoModel._original_init(self, model=model, **kwargs)
            
            # 应用补丁
            AutoModel.__init__ = patched_init
            logger.info("✅ FunASR AutoModel 已修补为离线模式")
            
        except ImportError:
            logger.warning("⚠️ FunASR 未安装，跳过修补")
        except Exception as e:
            logger.error(f"❌ 修补 FunASR 失败: {e}")

# 创建全局缓存管理器实例
cache_manager = LocalModelCache()

def setup_offline_models(models_dir: Optional[Path] = None) -> bool:
    """
    设置离线模型环境
    
    Args:
        models_dir: 模型目录路径
        
    Returns:
        是否设置成功
    """
    global cache_manager
    
    if models_dir:
        cache_manager = LocalModelCache(models_dir)
    
    try:
        # 设置离线环境
        cache_manager.setup_offline_environment()
        
        # 验证模型可用性
        model_status = cache_manager.verify_all_models_available()
        available_count = sum(model_status.values())
        total_count = len(model_status)
        
        if available_count == total_count:
            logger.info(f"✅ 所有 {total_count} 个模型都可用")
        else:
            logger.warning(f"⚠️ 只有 {available_count}/{total_count} 个模型可用")
            missing_models = [name for name, available in model_status.items() if not available]
            logger.warning(f"缺失模型: {missing_models}")
            logger.info("请运行: python download_models.py --download")
        
        # 保存模型配置
        cache_manager.save_model_config()
        
        # 修补 FunASR
        cache_manager.patch_funasr_automodel()
        
        return available_count == total_count
        
    except Exception as e:
        logger.error(f"❌ 设置离线模型环境失败: {e}")
        return False

def get_model_path(model_name: str) -> Optional[str]:
    """
    获取模型路径（供外部调用）
    
    Args:
        model_name: 模型名称
        
    Returns:
        模型路径字符串，如果不存在返回 None
    """
    path = cache_manager.get_local_model_path(model_name)
    return str(path) if path else None

if __name__ == "__main__":
    # 测试模块
    import argparse
    
    parser = argparse.ArgumentParser(description="本地模型缓存管理器")
    parser.add_argument("--models-dir", type=Path, help="模型目录路径")
    parser.add_argument("--verify", action="store_true", help="验证模型可用性")
    parser.add_argument("--config", action="store_true", help="生成配置文件")
    
    args = parser.parse_args()
    
    if args.models_dir:
        cache_manager = LocalModelCache(args.models_dir)
    
    if args.verify or not any([args.config]):
        cache_manager.verify_all_models_available()
    
    if args.config:
        cache_manager.save_model_config()
        print("配置文件已生成")