from typing import List, Dict, Any, Optional, AsyncGenerator, Generator
import time
from openai import OpenAI, AsyncOpenAI
from app.core.config import settings

class LLM:
    def __init__(self, api_key: str = settings.OPENAI_API_KEY):
        """
        初始化 LLM 类
        """
        self.client = OpenAI(
            api_key=api_key,
            base_url=settings.OPENAI_BASE_URL
        )
        self.async_client = AsyncOpenAI(
            api_key=api_key,
            base_url=settings.OPENAI_BASE_URL
        )
        self.default_model = settings.DEFAULT_MODEL
        self.default_temperature = 0.7

    def _build_params(
        self,
        messages: List[Dict[str, str]],
        model: Optional[str] = None,
        temperature: Optional[float] = None,
        max_tokens: Optional[int] = None,
        tools: Optional[List[Dict]] = None,
        tool_choice: Optional[str] = None
    ) -> Dict[str, Any]:
        """
        构建请求参数
        """
        params = {
            "model": model or self.default_model,
            "messages": messages,
            "temperature": temperature if temperature is not None else self.default_temperature,
        }
        
        if max_tokens:
            params["max_tokens"] = max_tokens
        if tools:
            params["tools"] = tools
        if tool_choice:
            params["tool_choice"] = tool_choice
            
        return params

    def chat(
        self,
        messages: List[Dict[str, str]],
        model: Optional[str] = None,
        temperature: Optional[float] = None,
        max_tokens: Optional[int] = None,
        tools: Optional[List[Dict]] = None,
        tool_choice: Optional[str] = None
    ) -> Dict[str, Any]:
        """
        同步聊天
        """
        try:
            params = self._build_params(
                messages, model, temperature, max_tokens, tools, tool_choice
            )
            response = self.client.chat.completions.create(**params)
            
            return self._format_response(response)
        except Exception as e:
            return self._format_error(e)

    async def achat(
        self,
        messages: List[Dict[str, str]],
        model: Optional[str] = None,
        temperature: Optional[float] = None,
        max_tokens: Optional[int] = None,
        tools: Optional[List[Dict]] = None,
        tool_choice: Optional[str] = None
    ) -> Dict[str, Any]:
        """
        异步聊天
        """
        try:
            params = self._build_params(
                messages, model, temperature, max_tokens, tools, tool_choice
            )
            response = await self.async_client.chat.completions.create(**params)
            
            return self._format_response(response)
        except Exception as e:
            return self._format_error(e)

    def stream_chat(
        self,
        messages: List[Dict[str, str]],
        model: Optional[str] = None,
        temperature: Optional[float] = None,
        max_tokens: Optional[int] = None,
        tools: Optional[List[Dict]] = None,
        tool_choice: Optional[str] = None
    ) -> Generator[Dict[str, Any], None, None]:
        """
        同步流式聊天
        """
        try:
            params = self._build_params(
                messages, model, temperature, max_tokens, tools, tool_choice
            )
            params["stream"] = True
            
            for chunk in self.client.chat.completions.create(**params):
                yield self._format_stream_chunk(chunk)
        except Exception as e:
            yield self._format_error(e)

    async def astream_chat(
        self,
        messages: List[Dict[str, str]],
        model: Optional[str] = None,
        temperature: Optional[float] = None,
        max_tokens: Optional[int] = None,
        tools: Optional[List[Dict]] = None,
        tool_choice: Optional[str] = None
    ) -> AsyncGenerator[Dict[str, Any], None]:
        """
        异步流式聊天
        """
        try:
            params = self._build_params(
                messages, model, temperature, max_tokens, tools, tool_choice
            )
            params["stream"] = True
             # 调用API
            stream = self.async_client.chat.completions.create(**params)

            async for chunk in stream:
                yield self._format_stream_chunk(chunk)
        except Exception as e:
            yield self._format_error(e)

    @staticmethod
    def _format_response(response: Any) -> Dict[str, Any]:
        """
        格式化响应
        """
        return {
            "id": response.id,
            "object": "chat.completion",
            "created": int(time.time()),
            "model": response.model,
            "choices": [
                {
                    "index": 0,
                    "message": {
                        "role": "assistant",
                        "content": response.choices[0].message.content,
                        "tool_calls": response.choices[0].message.tool_calls
                    },
                    "finish_reason": response.choices[0].finish_reason
                }
            ],
            "usage": {
                "prompt_tokens": response.usage.prompt_tokens,
                "completion_tokens": response.usage.completion_tokens,
                "total_tokens": response.usage.total_tokens
            }
        }

    @staticmethod
    def _format_stream_chunk(chunk: Any) -> Dict[str, Any]:
        """
        格式化流式响应块
        """
        return {
            "id": chunk.id,
            "object": "chat.completion.chunk",
            "created": int(time.time()),
            "model": chunk.model,
            "choices": [
                {
                    "index": 0,
                    "delta": {
                        "content": chunk.choices[0].delta.content,
                        "role": chunk.choices[0].delta.role,
                        "tool_calls": chunk.choices[0].delta.tool_calls
                    },
                    "finish_reason": chunk.choices[0].finish_reason
                }
            ]
        }

    @staticmethod
    def _format_error(error: Exception) -> Dict[str, Any]:
        """
        格式化错误响应
        """
        return {
            "error": str(error),
            "status": "error"
        }

# 创建全局实例
llm = LLM() 