#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
# 本地模型管理器 - 优先使用本地部署模型

import os
import json
import logging
import shutil
from pathlib import Path
from typing import Dict, Optional, Any
import yaml

class LocalModelManager:
    """本地模型管理器，优先使用本地模型，避免重复下载"""
    
    def __init__(self, local_models_dir: str = None):
        """
        初始化本地模型管理器
        
        Args:
            local_models_dir: 本地模型存储目录，默认为项目根目录下的models文件夹
        """
        if local_models_dir is None:
            # 使用项目根目录下的models文件夹
            project_root = Path(__file__).parent.parent.parent
            self.local_models_dir = project_root / "models"
        else:
            self.local_models_dir = Path(local_models_dir)
            
        self.local_models_dir.mkdir(exist_ok=True)
        self.logger = logging.getLogger(__name__)
        
        # 支持的模型类型
        self.supported_model_types = {
            'paraformer', 'whisper', 'conformer', 'transformer', 
            'fsmn_vad', 'fsmn_kws', 'sense_voice', 'emotion2vec',
            'paraformer_streaming', 'ct_transformer', 'campplus'
        }
        
        # 模型映射表
        self.model_aliases = {
            'paraformer': 'iic/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch',
            'paraformer-vad-punc': 'iic/speech_paraformer-large-vad-punc_asr_nat-zh-cn-16k-common-vocab8404-pytorch',
            'whisper': 'openai/whisper-large-v3',
            'whisper-tiny': 'openai/whisper-tiny',
            'whisper-base': 'openai/whisper-base',
            'whisper-small': 'openai/whisper-small',
            'whisper-medium': 'openai/whisper-medium',
            'whisper-large': 'openai/whisper-large-v3',
            'vad': 'iic/speech_fsmn_vad_zh-cn-16k-common-pytorch',
            'punc': 'iic/punc_ct-transformer_zh-cn-common-vocab272727-pytorch',
            'spk': 'iic/speech_campplus_sv_zh-cn_16k-common',
        }
        
    def get_model_path(self, model_name: str) -> Optional[Path]:
        """
        获取本地模型路径
        
        Args:
            model_name: 模型名称或别名
            
        Returns:
            本地模型路径，如果模型不存在则返回None
        """
        # 处理别名
        if model_name in self.model_aliases:
            actual_name = self.model_aliases[model_name]
        else:
            actual_name = model_name
            
        # 转换为本地路径格式
        local_path = self.local_models_dir / actual_name.replace('/', '_').replace(':', '_')
        
        # 检查模型是否存在
        if local_path.exists() and (local_path / "configuration.json").exists():
            return local_path
            
        # 检查简化的模型名称
        simple_name = actual_name.split('/')[-1]
        simple_path = self.local_models_dir / simple_name
        if simple_path.exists() and (simple_path / "configuration.json").exists():
            return simple_path
            
        return None
        
    def is_model_available(self, model_name: str) -> bool:
        """检查模型是否在本地可用"""
        return self.get_model_path(model_name) is not None
        
    def list_local_models(self) -> Dict[str, str]:
        """列出所有本地可用模型"""
        models = {}
        
        if not self.local_models_dir.exists():
            return models
            
        for model_dir in self.local_models_dir.iterdir():
            if model_dir.is_dir():
                config_path = model_dir / "configuration.json"
                if config_path.exists():
                    try:
                        with open(config_path, 'r', encoding='utf-8') as f:
                            config = json.load(f)
                            model_name = config.get('model', {}).get('type', str(model_dir.name))
                            models[str(model_dir.name)] = model_name
                    except Exception as e:
                        self.logger.warning(f"读取模型配置失败: {model_dir}, 错误: {e}")
                        
        return models
        
    def setup_model(self, model_name: str, model_source_dir: str) -> bool:
        """
        设置本地模型
        
        Args:
            model_name: 模型名称
            model_source_dir: 模型源目录
            
        Returns:
            是否设置成功
        """
        try:
            if not os.path.exists(model_source_dir):
                self.logger.error(f"模型源目录不存在: {model_source_dir}")
                return False
                
            # 处理别名
            if model_name in self.model_aliases:
                actual_name = self.model_aliases[model_name]
            else:
                actual_name = model_name
                
            # 创建本地模型目录
            local_path = self.local_models_dir / actual_name.replace('/', '_').replace(':', '_')
            local_path.mkdir(parents=True, exist_ok=True)
            
            # 复制模型文件
            for item in os.listdir(model_source_dir):
                src_path = os.path.join(model_source_dir, item)
                dst_path = local_path / item
                
                if os.path.isdir(src_path):
                    if dst_path.exists():
                        shutil.rmtree(dst_path)
                    shutil.copytree(src_path, dst_path)
                else:
                    shutil.copy2(src_path, dst_path)
                    
            self.logger.info(f"模型 {model_name} 已成功部署到本地: {local_path}")
            return True
            
        except Exception as e:
            self.logger.error(f"设置模型失败: {e}")
            return False
            
    def create_model_config(self, model_name: str, **kwargs) -> Dict[str, Any]:
        """
        创建模型配置，优先使用本地模型
        
        Args:
            model_name: 模型名称
            **kwargs: 其他配置参数
            
        Returns:
            模型配置字典
        """
        local_path = self.get_model_path(model_name)
        
        if local_path is not None:
            self.logger.info(f"使用本地模型: {local_path}")
            config = {
                "model": str(local_path),
                "model_path": str(local_path),
                "use_local_model": True,
                **kwargs
            }
            return config
        else:
            self.logger.info(f"使用模型库模型: {model_name} (如已缓存则使用本地版本)")
            config = {
                "model": model_name,
                "use_local_model": False,
                **kwargs
            }
            return config
            
    def get_model_info(self, model_name: str) -> Optional[Dict[str, Any]]:
        """获取模型信息"""
        local_path = self.get_model_path(model_name)
        if local_path is None:
            return None
            
        info = {
            "name": model_name,
            "path": str(local_path),
            "size": sum(f.stat().st_size for f in local_path.rglob('*') if f.is_file()),
            "files": [str(f.relative_to(local_path)) for f in local_path.rglob('*') if f.is_file()]
        }
        
        # 读取配置信息
        config_path = local_path / "configuration.json"
        if config_path.exists():
            try:
                with open(config_path, 'r', encoding='utf-8') as f:
                    config = json.load(f)
                    info["config"] = config
            except Exception as e:
                self.logger.warning(f"读取配置失败: {e}")
                
        return info
        
    def get_model_size(self, model_name: str) -> Optional[int]:
        """获取模型大小（字节）"""
        local_path = self.get_model_path(model_name)
        if local_path is None:
            return None
            
        return sum(f.stat().st_size for f in local_path.rglob('*') if f.is_file())
        
    def remove_model(self, model_name: str) -> bool:
        """删除本地模型"""
        local_path = self.get_model_path(model_name)
        if local_path is None:
            return False
            
        try:
            shutil.rmtree(local_path)
            self.logger.info(f"已删除本地模型: {model_name}")
            return True
        except Exception as e:
            self.logger.error(f"删除模型失败: {e}")
            return False

# 全局实例
_local_model_manager = None

def get_local_model_manager() -> LocalModelManager:
    """获取全局本地模型管理器实例"""
    global _local_model_manager
    if _local_model_manager is None:
        _local_model_manager = LocalModelManager()
    return _local_model_manager