from typing import Dict, Type, Optional, Any

from .BaseLLM import BaseLLM
from .DashScopeLLM import DashScopeLLM
from .DeepSeekChat import DeepSeekChat
from .DeepSeekThink import DeepSeekThink


class LLMFactory:
    """
    LLM工厂类
    使用策略模式和工厂模式，统一管理不同的LLM实现
    """

    # 注册的LLM策略
    _strategies: Dict[str, Type[BaseLLM]] = {
        'deepseek-chat': DeepSeekChat,
        'deepseek-reasoner': DeepSeekThink,  # 别名
        # DashScope 模型
        'qwen-turbo': DashScopeLLM,
        'qwen-plus': DashScopeLLM,
        'qwen-max': DashScopeLLM,
        'qwq-plus': DashScopeLLM,
    }

    @classmethod
    def register_strategy(cls, name: str, strategy_class: Type[BaseLLM]):
        """
        注册新的LLM策略
        
        Args:
            name: 策略名称
            strategy_class: 策略类
        """
        cls._strategies[name] = strategy_class

    @classmethod
    def unregister_strategy(cls, name: str):
        """
        注销LLM策略
        
        Args:
            name: 策略名称
        """
        if name in cls._strategies:
            del cls._strategies[name]

    @classmethod
    def get_available_strategies(cls) -> Dict[str, Type[BaseLLM]]:
        """
        获取所有可用的策略
        
        Returns:
            Dict[str, Type[BaseLLM]]: 策略字典
        """
        return cls._strategies.copy()

    @classmethod
    def create_llm(cls, model_name: str, **kwargs) -> BaseLLM:
        """
        创建LLM实例
        
        Args:
            model_name: 模型名称
            **kwargs: 配置参数
            
        Returns:
            BaseLLM: LLM实例
            
        Raises:
            ValueError: 当模型名称不支持时
        """
        if model_name not in cls._strategies:
            available = ', '.join(cls._strategies.keys())
            raise ValueError(f"不支持的模型: {model_name}. 可用模型: {available}")

        strategy_class = cls._strategies[model_name]
        # 对于 DashScope 模型，需要传递 model_name 参数
        if strategy_class == DashScopeLLM:
            kwargs['model_name'] = model_name
        return strategy_class(**kwargs)

    @classmethod
    def create_chat_llm(cls, **kwargs) -> DeepSeekChat:
        """
        创建聊天LLM实例
        
        Args:
            **kwargs: 配置参数
            
        Returns:
            DeepSeekChat: 聊天LLM实例
        """
        return cls.create_llm('deepseek-chat', **kwargs)

    @classmethod
    def create_think_llm(cls, **kwargs) -> DeepSeekThink:
        """
        创建推理LLM实例
        
        Args:
            **kwargs: 配置参数
            
        Returns:
            DeepSeekThink: 推理LLM实例
        """
        return cls.create_llm('deepseek-reasoner', **kwargs)

    @classmethod
    def create_dashscope_llm(cls, model_name: str = 'qwen-turbo', **kwargs) -> DashScopeLLM:
        """
        创建DashScope LLM实例
        
        Args:
            model_name: 模型名称，默认为 qwen-turbo
            **kwargs: 配置参数
            
        Returns:
            DashScopeLLM: DashScope LLM实例
        """
        return cls.create_llm(model_name, **kwargs)

    @classmethod
    def get_model_info(cls, model_name: str) -> Optional[Dict[str, Any]]:
        """
        获取模型信息
        
        Args:
            model_name: 模型名称
            
        Returns:
            Optional[Dict[str, Any]]: 模型信息，如果模型不存在则返回None
        """
        if model_name not in cls._strategies:
            return None

        # 创建临时实例获取信息
        temp_instance = cls.create_llm(model_name)
        return temp_instance.get_model_info()

    @classmethod
    def list_models(cls) -> Dict[str, Dict[str, Any]]:
        """
        列出所有模型及其信息
        
        Returns:
            Dict[str, Dict[str, Any]]: 模型信息字典
        """
        models_info = {}
        for model_name in cls._strategies.keys():
            models_info[model_name] = cls.get_model_info(model_name)
        return models_info

    @classmethod
    def get_models_by_provider(cls, provider: str) -> Dict[str, Dict[str, Any]]:
        """
        根据提供商获取模型信息
        
        Args:
            provider: 提供商名称 (如: 'DeepSeek', 'DashScope')
            
        Returns:
            Dict[str, Dict[str, Any]]: 该提供商的模型信息字典
        """
        provider_models = {}
        for model_name in cls._strategies.keys():
            model_info = cls.get_model_info(model_name)
            if model_info and provider.lower() in model_info.get('provider', '').lower():
                provider_models[model_name] = model_info
        return provider_models


class LLMManager:
    """
    LLM管理器
    管理LLM实例的生命周期，提供单例和缓存功能
    """

    def __init__(self):
        self._instances: Dict[str, BaseLLM] = {}
        self._default_configs: Dict[str, Dict[str, Any]] = {}

    def set_default_config(self, model_name: str, config: Dict[str, Any]):
        """
        设置模型的默认配置
        
        Args:
            model_name: 模型名称
            config: 默认配置
        """
        self._default_configs[model_name] = config

    def get_llm(self, model_name: str, use_cache: bool = True, **kwargs) -> BaseLLM:
        """
        获取LLM实例
        
        Args:
            model_name: 模型名称
            use_cache: 是否使用缓存
            **kwargs: 配置参数
            
        Returns:
            BaseLLM: LLM实例
        """
        cache_key = f"{model_name}_{hash(frozenset(kwargs.items()))}"

        if use_cache and cache_key in self._instances:
            return self._instances[cache_key]

        # 合并默认配置
        final_config = self._default_configs.get(model_name, {}).copy()
        final_config.update(kwargs)

        # 创建新实例
        llm = LLMFactory.create_llm(model_name, **final_config)

        if use_cache:
            self._instances[cache_key] = llm

        return llm

    def clear_cache(self, model_name: Optional[str] = None):
        """
        清除缓存
        
        Args:
            model_name: 模型名称，如果为None则清除所有缓存
        """
        if model_name is None:
            self._instances.clear()
        else:
            # 清除特定模型的缓存
            keys_to_remove = [key for key in self._instances.keys() if key.startswith(model_name)]
            for key in keys_to_remove:
                del self._instances[key]

    def get_cached_instances(self) -> Dict[str, BaseLLM]:
        """
        获取所有缓存的实例
        
        Returns:
            Dict[str, BaseLLM]: 缓存的实例字典
        """
        return self._instances.copy()

    def reset_instance(self, model_name: str, **kwargs):
        """
        重置特定实例
        
        Args:
            model_name: 模型名称
            **kwargs: 配置参数
        """
        cache_key = f"{model_name}_{hash(frozenset(kwargs.items()))}"
        if cache_key in self._instances:
            self._instances[cache_key].reset_instance()


# 全局LLM管理器实例
llm_manager = LLMManager()
