"""
LLM客户端实现
基于火山引擎API的大语言模型客户端，支持流式和非流式对话
"""

import httpx
import asyncio
import logging
import json
import os
from typing import AsyncGenerator, Dict, Any, List, Optional, Union

from .errors import get_error_hint, LLMClientError
from config.config_manager import AppConfig

logger = logging.getLogger(__name__)


class LLMClient:
    """火山引擎LLM客户端"""
    
    def __init__(self, config: AppConfig):
        self.config = config
        volc = config.llm.volcengine
        self.api_url = volc.api_url.rstrip('/') + '/api/v3/chat/completions'
        self.api_key = volc.api_key
        self.model = config.llm.model.name
        self.parameters = self._build_parameters()
        self.timeout = config.llm.timeout
        
        if not self.api_key:
            logger.warning("未设置火山引擎API密钥，请设置环境变量 VOLCENGINE_LLM_API_KEY")
    
    def _build_parameters(self) -> Dict[str, Any]:
        """构建请求参数"""
        params = self.config.llm.parameters
        
        result = {
            "temperature": params.temperature,
            "top_p": params.top_p,
            "max_tokens": params.max_tokens,
            "stream": params.stream,
        }
        
        # 添加思考模式参数
        if params.thinking.type != "auto":
            result["thinking"] = {"type": params.thinking.type}
        
        # 添加高级参数
        if params.frequency_penalty != 0.0:
            result["frequency_penalty"] = params.frequency_penalty
        
        if params.presence_penalty != 0.0:
            result["presence_penalty"] = params.presence_penalty
        
        return result

    async def stream_chat(
        self,
        messages: List[Dict[str, Any]],
        tools: Optional[List[Dict[str, Any]]] = None,
        stream: bool = True,
        **kwargs
    ) -> AsyncGenerator[Dict[str, Any], None]:
        """
        流式对话，yield每个流块的完整结构，包含错误提示。
        错误时 yield {"error": ..., "hint": ...}
        """
        headers = {
            "Authorization": f"Bearer {self.api_key}",
            "Content-Type": "application/json"
        }

        payload = {
            "model": self.model,
            "messages": messages,
            "stream": stream,
            **self.parameters,
            **kwargs
        }

        # 添加工具调用支持
        if tools:
            payload["tools"] = tools

        # 调试日志：记录关键参数
        logger.info(f"LLM API请求参数 - model: {self.model}")
        logger.info(f"LLM API请求参数 - thinking: {payload.get('thinking', 'NOT_SET')}")
        logger.debug(f"LLM API请求参数 - 完整payload: {json.dumps(payload, ensure_ascii=False, indent=2)}")

        try:
            async with httpx.AsyncClient(timeout=self.timeout) as client:
                async with client.stream("POST", self.api_url, headers=headers, json=payload) as response:
                    # 检查状态码
                    if response.status_code != 200:
                        logger.error(f"请求失败，状态码: {response.status_code}")
                        error_data = {"code": f"HTTP_{response.status_code}", "message": "<Failed to read error body>"}

                        try:
                            error_details = await response.aread()
                            error_text = error_details.decode()
                            logger.error(f"服务器响应: {error_text}")

                            try:
                                error_json = json.loads(error_text)
                                # 提取火山引擎标准错误结构
                                if "error" in error_json and isinstance(error_json["error"], dict):
                                    error_data = error_json["error"]
                                    # 确保 code 和 message 存在
                                    error_data["code"] = error_data.get("code", f"HTTP_{response.status_code}")
                                    error_data["message"] = error_data.get("message", error_text)
                                else:
                                    error_data["message"] = error_text
                            except json.JSONDecodeError:
                                error_data["message"] = error_text
                        except Exception as read_err:
                            logger.error(f"读取错误响应体失败: {read_err}")

                        hint = get_error_hint(error_data)
                        yield {"error": error_data, "hint": hint}
                        return

                    # 状态码为 200，正常处理流式响应
                    async for line in response.aiter_lines():
                        if not line or not line.startswith("data: "):
                            continue
                        data = line[len("data: "):].strip()
                        if data == "[DONE]":
                            break

                        try:
                            obj = json.loads(data)

                            # 检查是否是流式错误块
                            if obj.get("error") and isinstance(obj["error"], dict):
                                error_data = obj["error"]
                                logger.error(f"流式传输中收到错误: {error_data}")
                                hint = get_error_hint(error_data)
                                yield {"error": error_data, "hint": hint}
                                continue

                            # 正常处理流式块
                            if "choices" in obj and len(obj["choices"]) > 0:
                                choice = obj["choices"][0]
                                delta = choice.get("delta", {})

                                # 构建返回的块结构
                                chunk = {}

                                # 处理内容
                                if "content" in delta and delta["content"]:
                                    chunk["content"] = delta["content"]

                                # 处理思维链内容
                                if "reasoning_content" in delta and delta["reasoning_content"]:
                                    chunk["reasoning_content"] = delta["reasoning_content"]

                                # 处理工具调用
                                if "tool_calls" in delta and delta["tool_calls"]:
                                    chunk["tool_calls"] = delta["tool_calls"]

                                # 处理结束原因
                                if "finish_reason" in choice and choice["finish_reason"]:
                                    chunk["finish_reason"] = choice["finish_reason"]

                                # 添加使用统计信息
                                if "usage" in obj:
                                    chunk["usage"] = obj["usage"]

                                yield chunk

                        except json.JSONDecodeError as e:
                            logger.warning(f"解析流式 JSON 失败: {e}, data: {data}")
                        except Exception as e:
                            logger.error(f"处理流式块时发生错误: {e}")
                            error_data = {"code": "StreamProcessingError", "message": str(e)}
                            hint = get_error_hint(error_data)
                            yield {"error": error_data, "hint": hint}

        except httpx.RequestError as e:
            logger.error(f"请求过程中发生网络错误: {e}")
            error_data = {"code": "NetworkError", "message": str(e)}
            hint = get_error_hint(error_data)
            yield {"error": error_data, "hint": hint}
        except Exception as e:
            logger.error(f"请求过程中发生未知错误: {e}")
            error_data = {"code": "UnknownError", "message": str(e)}
            hint = get_error_hint(error_data)
            yield {"error": error_data, "hint": hint}

    async def chat(
        self,
        messages: List[Dict[str, Any]],
        tools: Optional[List[Dict[str, Any]]] = None,
        stream: bool = False,
        **kwargs
    ) -> Dict[str, Any]:
        """
        非流式对话，返回完整响应结构。错误时抛出带提示的异常。
        """
        headers = {
            "Authorization": f"Bearer {self.api_key}",
            "Content-Type": "application/json"
        }

        payload = {
            "model": self.model,
            "messages": messages,
            "stream": stream,
            **self.parameters,
            **kwargs
        }

        # 添加工具调用支持
        if tools:
            payload["tools"] = tools

        try:
            async with httpx.AsyncClient(timeout=self.timeout) as client:
                resp = await client.post(self.api_url, headers=headers, json=payload)

                if resp.status_code != 200:
                    logger.error(f"请求失败，状态码: {resp.status_code}")
                    error_data = {"code": f"HTTP_{resp.status_code}", "message": "<Failed to read error body>"}

                    try:
                        error_text = resp.text
                        logger.error(f"服务器响应: {error_text}")

                        try:
                            error_json = resp.json()
                            if "error" in error_json and isinstance(error_json["error"], dict):
                                error_data = error_json["error"]
                                error_data["code"] = error_data.get("code", f"HTTP_{resp.status_code}")
                                error_data["message"] = error_data.get("message", error_text)
                            else:
                                error_data["message"] = error_text
                        except json.JSONDecodeError:
                            error_data["message"] = error_text
                    except Exception as read_err:
                        logger.error(f"读取或解析错误响应体失败: {read_err}")

                    hint = get_error_hint(error_data)
                    # 抛出包含提示的异常
                    raise LLMClientError(hint, error_data) from None

                # 状态码 200，返回 JSON
                return resp.json()

        except httpx.RequestError as e:
            logger.error(f"请求过程中发生网络错误: {e}")
            error_data = {"code": "NetworkError", "message": str(e)}
            hint = get_error_hint(error_data)
            raise LLMClientError(hint, error_data) from e
        except Exception as e:
            # 捕获上面抛出的 LLMClientError 或其他意外错误
            if not isinstance(e, LLMClientError):
                logger.error(f"请求过程中发生未知错误: {e}")
                error_data = {"code": "UnknownError", "message": str(e)}
                hint = get_error_hint(error_data)
                raise LLMClientError(hint, error_data) from e
            else:
                raise  # 直接重新抛出 LLMClientError
