"""LLM管理器"""

import os
from typing import Dict, List, Any, Optional, Generator
from .providers import BailianProvider, BailianQwQProvider


class LLMManager:
    """LLM管理器 - 统一管理各种大模型提供商"""
    
    def __init__(self):
        self.providers = {
            "bailian": BailianProvider(),
            "bailian_qwq": BailianQwQProvider(),
        }
    
    def get_provider(self, name: str):
        """获取LLM提供商"""
        return self.providers.get(name)
    
    def list_providers(self) -> List[str]:
        """列出所有可用的提供商"""
        return list(self.providers.keys())
    
    def chat_completion(self, provider_name: str, messages: List[Dict[str, str]], **kwargs) -> Any:
        """聊天完成接口"""
        provider = self.get_provider(provider_name)
        if not provider:
            raise ValueError(f"未知的LLM提供商: {provider_name}")
        return provider.chat_completion(messages, **kwargs)
    
    def stream_chat(self, provider_name: str, messages: List[Dict[str, str]], **kwargs) -> Any:
        """流式聊天接口"""
        provider = self.get_provider(provider_name)
        if not provider:
            raise ValueError(f"未知的LLM提供商: {provider_name}")
        return provider.stream_chat(messages, **kwargs)
    
    def stream_thinking_chat(self, provider_name: str, messages: List[Dict[str, str]], **kwargs) -> Generator[Dict[str, Any], None, None]:
        """流式深度思考聊天接口"""
        provider = self.get_provider(provider_name)
        if not provider:
            raise ValueError(f"未知的LLM提供商: {provider_name}")
        
        if hasattr(provider, 'stream_thinking_chat'):
            return provider.stream_thinking_chat(messages, **kwargs)
        else:
            raise ValueError(f"提供商 {provider_name} 不支持深度思考功能")
    
    def add_provider(self, name: str, provider):
        """添加新的提供商"""
        self.providers[name] = provider


# 全局LLM管理器实例
llm_manager = LLMManager()
