# -*- coding: utf-8 -*-
"""
@Time : 2025/9/23  22:17
@Author : Shang
@File : llm_service.py
@IDE : PyCharm
"""
import os
from typing import List, Dict, Any, Optional

import httpx


class LLMService:
    """
    OpenAI 兼容协议的简单封装。
    需要环境变量：LLM_BASE_URL, LLM_API_KEY, LLM_MODEL
    """
    def __init__(self, base_url: str, api_key: str, model: str, timeout_sec: float = 20.0):
        self.base_url = base_url.rstrip("/")
        self.api_key = api_key
        self.model = model
        self.timeout = timeout_sec
        self._client: Optional[httpx.AsyncClient] = None

    async def _get_client(self) -> httpx.AsyncClient:
        if self._client is None:
            headers = {
                "Authorization": f"Bearer {self.api_key}",
                "Content-Type": "application/json",
            }
            self._client = httpx.AsyncClient(base_url=self.base_url, headers=headers, timeout=self.timeout)
        return self._client

    async def chat(self, messages: List[Dict[str, str]]) -> str:
        """调用 chat.completions 接口，返回文本。"""
        client = await self._get_client()
        payload = {
            "model": self.model,
            "messages": messages,
            "temperature": 0.7,
        }
        # 兼容常见 OpenAI 风格路径
        url = "/v1/chat/completions"
        resp = await client.post(url, json=payload)
        resp.raise_for_status()
        data = resp.json()
        try:
            return data["choices"][0]["message"]["content"]
        except Exception:
            return data.get("choices", [{}])[0].get("text", "")

    async def aclose(self):
        if self._client:
            await self._client.aclose()
            self._client = None