"""
OpenRouter流式输出功能实现
支持通过OpenRouter访问各种模型的流式文本生成
"""
from typing import Dict, Any, List, Optional, Generator
import json
import logging

from ...capabilities.streaming import StreamingCapability

logger = logging.getLogger("llm.openrouter.stream")

class OpenRouterStreaming(StreamingCapability):
	"""OpenRouter的流式输出实现"""
	
	def _initialize(self) -> None:
		"""初始化流式功能"""
		self.supported = True
		logger.debug("初始化OpenRouter流式输出功能")
	
	def is_supported(self) -> bool:
		"""
		检查是否支持流式输出
		
		Returns:
			是否支持流式输出
		"""
		# OpenRouter支持大部分模型的流式输出
		return self.supported
		
	def stream_response(self, messages: List[Dict[str, Any]], **kwargs) -> Generator[str, None, None]:
		"""
		生成流式响应
		
		Args:
			messages: 消息列表
			**kwargs: 额外的提供商特定参数
			
		Returns:
			字符串流生成器
		"""
		logger.debug("开始OpenRouter流式生成")
		
		try:
			# 获取参数
			params = {
				"model": self.provider.model,
				"messages": messages,
				"temperature": self.provider.temperature,
				"max_tokens": self.provider.max_tokens,
				"stream": True,  # 强制使用流式输出
				**kwargs
			}
			
			# 应用过滤器
			_, params = self.provider.filter_chat_args(**params)
			
			# 准备请求头
			headers = {
				"Authorization": f"Bearer {self.provider.api_key}",
				"HTTP-Referer": self.provider.app_url,
				"X-Title": self.provider.app_name,
				"Content-Type": "application/json"
			}
			
			# 直接使用API
			import requests
			
			response = self.provider._call_with_retry(
				requests.post,
				"https://openrouter.ai/api/v1/chat/completions",
				headers=headers,
				json=params,
				stream=True,
				timeout=self.provider.timeout
			)
			
			if response.status_code != 200:
				raise Exception(f"API请求失败: {response.status_code} {response.text}")
			
			for line in response.iter_lines():
				if line:
					if line.startswith(b"data: "):
						line = line[6:]  # 去掉 "data: " 前缀
						if line.strip() == b"[DONE]":
							break
						try:
							chunk = json.loads(line)
							
							# 处理文本内容
							delta = self.process_chunk(chunk)
							if delta:
								yield delta
							
							# 处理思考过程
							thinking = self.detect_thinking(chunk)
							if thinking:
								logger.debug(f"[思考过程] {thinking}")
						except json.JSONDecodeError:
							pass
			
		except Exception as e:
			logger.error(f"流式生成过程中出错: {str(e)}")
			yield f"\n[错误: {str(e)}]"
			
		logger.debug("OpenRouter流式生成结束")
	
	def process_chunk(self, chunk: Dict[str, Any]) -> Optional[str]:
		"""
		处理单个响应数据块
		
		Args:
			chunk: 响应数据块
			
		Returns:
			处理后的文本，如果数据块不包含文本则返回None
		"""
		try:
			# OpenRouter使用OpenAI兼容格式
			if "choices" in chunk and len(chunk["choices"]) > 0:
				choice = chunk["choices"][0]
				if "delta" in choice and "content" in choice["delta"]:
					return choice["delta"]["content"]
		except Exception as e:
			logger.error(f"处理流式数据块时出错: {str(e)}")
			
		return None
		
	def detect_finish_reason(self, chunk: Dict[str, Any]) -> Optional[str]:
		"""
		检测数据块中的完成原因
		
		Args:
			chunk: 响应数据块
			
		Returns:
			完成原因，如果数据块不包含完成原因则返回None
		"""
		try:
			if "choices" in chunk and len(chunk["choices"]) > 0:
				return chunk["choices"][0].get("finish_reason")
		except Exception as e:
			logger.error(f"检测完成原因时出错: {str(e)}")
			
		return None
	
	def detect_thinking(self, chunk: Dict[str, Any]) -> Optional[str]:
		"""
		检测思考过程
		
		Args:
			chunk: 响应数据块
			
		Returns:
			思考过程，如果数据块不包含思考过程则返回None
		"""
		try:
			# 处理Claude 3.7 Thinking特性
			if "choices" in chunk and len(chunk["choices"]) > 0:
				delta = chunk["choices"][0].get("delta", {})
				if "model_extra" in delta:
					return delta["model_extra"].get("reasoning")
		except Exception as e:
			logger.error(f"检测思考过程时出错: {str(e)}")
			
		return None
