"""
ConnLLM高级API
提供更简洁的API接口
"""
from typing import Dict, List, Any, Optional, Generator, Union, Callable

from .core.provider import LLMProvider, ProviderFactory
from .core.config import ConfigManager
from .core_v2.provider import BaseProviderV2
from .providers import OpenRouterProviderV2

# 全局配置管理器
_config_manager = ConfigManager()

class ConnLLM:
	"""ConnLLM高级API接口"""
	
	def __init__(self, provider: Union[str, LLMProvider, BaseProviderV2] = "openrouter", **config_overrides):
		"""
		初始化ConnLLM
		
		Args:
			provider: 提供商名称或实例
			**config_overrides: 配置覆盖参数
		"""
		if isinstance(provider, str):
			config_overrides["provider"] = provider
			self.provider = self._create_provider("default", config_overrides)
		else:
			self.provider = provider
		
		# 创建聊天API接口
		self.chat = ChatAPI(self.provider)
	
	def _create_provider(self, config_name: str = "default", overrides: Optional[Dict[str, Any]] = None) -> Union[LLMProvider, BaseProviderV2]:
		"""
		创建LLM提供商实例
		
		Args:
			config_name: 配置名称
			overrides: 覆盖配置的字典
			
		Returns:
			LLM提供商实例
		"""
		# 获取配置
		config = _config_manager.get_config(config_name, overrides)
		
		# 获取提供商类型
		provider_type = config.get("provider", "anthropic")
		
		# 使用V2架构创建OpenRouter提供商
		if provider_type == "openrouter":
			return OpenRouterProviderV2(config)
			
		# 回退到传统提供商
		return ProviderFactory.create(config)
	
	def execute_capability(self, capability_id: str, method_name: str, *args, **kwargs) -> Any:
		"""
		执行提供商能力
		
		Args:
			capability_id: 能力标识符
			method_name: 方法名称
			*args: 位置参数
			**kwargs: 关键字参数
			
		Returns:
			能力方法返回值
		"""
		if isinstance(self.provider, BaseProviderV2):
			# V2架构提供商
			if hasattr(self.provider, "capability_manager"):
				return self.provider.capability_manager.execute(capability_id, method_name, *args, **kwargs)
		
		# 不支持的提供商
		raise NotImplementedError(f"提供商 {type(self.provider).__name__} 不支持能力系统")


class ChatAPI:
	"""聊天API接口"""
	
	def __init__(self, provider: Union[LLMProvider, BaseProviderV2]):
		"""
		初始化聊天API
		
		Args:
			provider: 提供商实例
		"""
		self.provider = provider
	
	def complete(self, messages: List[Dict[str, Any]]) -> str:
		"""
		获取完成响应
		
		Args:
			messages: 消息列表
			
		Returns:
			完成的文本
		"""
		# 检查提供商类型并调用对应方法
		if isinstance(self.provider, BaseProviderV2):
			response = self.provider.complete(messages)
			# 提取文本内容
			if hasattr(self.provider, "_extract_text_from_response"):
				return self.provider._extract_text_from_response(response)
			# 尝试从响应中获取内容（兼容不同实现）
			elif isinstance(response, dict) and "choices" in response:
				try:
					return response["choices"][0]["message"]["content"]
				except (KeyError, IndexError):
					return str(response)
			return str(response)
		else:
			# 传统提供商
			return self.provider.complete(messages)
	
	def complete_stream(self, messages: List[Dict[str, Any]]) -> Generator[str, None, None]:
		"""
		获取流式完成响应
		
		Args:
			messages: 消息列表
			
		Returns:
			流式响应生成器
		"""
		# 检查提供商类型并调用对应方法
		if isinstance(self.provider, BaseProviderV2) and hasattr(self.provider, "model_capability_manager"):
			# 尝试使用流式能力
			if self.provider.model_capability_manager.model_supports("streaming", self.provider.config_manager.get_model()):
				streaming = self.provider.model_capability_manager.get("streaming")
				return streaming.stream_response(messages)
		
		# 回退方法
		return self.provider.complete_stream(messages)
