from __future__ import annotations

import json
from dataclasses import dataclass
from typing import Dict, Generator, Iterable, List, Optional, Union

import requests


@dataclass
class ChatResult:
    """Structured chat result.

    Attributes:
        text: First choice text content.
        raw: Raw response JSON from the server.
    """

    text: str
    raw: Dict


class LLMClient:
    """Client for OpenAI-compatible chat completions API.

    Args:
        base_url: Base address of your gateway, e.g. "http://host:1341".
        api_key: Optional token for Authorization header as Bearer <api_key>.
        timeout: Request timeout in seconds.
        default_model: Default model name if not provided per-call.
        default_headers: Extra headers to send with each request.
    """

    def __init__(
        self,
        base_url: str,
        api_key: Optional[str] = None,
        timeout: int = 120,
        default_model: Optional[str] = None,
        default_headers: Optional[Dict[str, str]] = None,
    ) -> None:
        self.base_url = base_url.rstrip("/")
        self.api_key = api_key
        self.timeout = timeout
        self.default_model = default_model
        self.default_headers = default_headers or {}

    # ----------------------------- Public API ----------------------------- #
    def chat(
        self,
        system_prompt: Optional[str] = None,
        user_prompt: Optional[str] = None,
        messages: Optional[List[Dict[str, str]]] = None,
        model: Optional[str] = None,
        temperature: Optional[float] = None,
        top_p: Optional[float] = None,
        max_tokens: Optional[int] = None,
        presence_penalty: Optional[float] = None,
        frequency_penalty: Optional[float] = None,
        seed: Optional[int] = None,
        stop: Optional[Union[str, Iterable[str]]] = None,
        stream: bool = False,
        extra_params: Optional[Dict] = None,
    ) -> Union[ChatResult, Generator[str, None, None]]:
        """Send a chat completion request.

        Provide either `messages` or `system_prompt`/`user_prompt`.
        If `stream=True`, returns a generator yielding text deltas.
        Otherwise returns a ChatResult.
        """

        resolved_messages = self._build_messages(messages, system_prompt, user_prompt)
        payload: Dict = {
            "model": model or self.default_model,
            "messages": resolved_messages,
            "stream": stream,
        }

        if temperature is not None:
            payload["temperature"] = float(temperature)
        if top_p is not None:
            payload["top_p"] = float(top_p)
        if max_tokens is not None:
            payload["max_tokens"] = int(max_tokens)
        if presence_penalty is not None:
            payload["presence_penalty"] = float(presence_penalty)
        if frequency_penalty is not None:
            payload["frequency_penalty"] = float(frequency_penalty)
        if seed is not None:
            payload["seed"] = int(seed)
        if stop is not None:
            payload["stop"] = stop
        if extra_params:
            payload.update(extra_params)

        # Allow either host-only base_url (e.g., https://api.example.com)
        # or a full endpoint (e.g., https://api.example.com/v1/chat/completions)
        if self.base_url.endswith("/v1/chat/completions") or self.base_url.endswith("/chat/completions"):
            url = self.base_url
        else:
            url = f"{self.base_url}/v1/chat/completions"
        headers = self._build_headers()

        response = requests.post(url, json=payload, headers=headers, timeout=self.timeout, stream=stream)
        try:
            response.raise_for_status()
        except requests.HTTPError as http_err:
            # 尝试输出服务端返回的错误详情，便于定位 4xx/5xx
            try:
                detail = response.json()
            except Exception:
                detail = {"text": response.text}
            raise requests.HTTPError(f"HTTP {response.status_code}: {detail}") from http_err

        if stream:
            return self._iter_stream(response)

        data = response.json()
        text = self._extract_text(data)
        return ChatResult(text=text, raw=data)

    # --------------------------- Helper methods --------------------------- #
    def _build_messages(
        self,
        messages: Optional[List[Dict[str, str]]],
        system_prompt: Optional[str],
        user_prompt: Optional[str],
    ) -> List[Dict[str, str]]:
        if messages is not None:
            return messages

        result: List[Dict[str, str]] = []
        if system_prompt:
            result.append({"role": "system", "content": system_prompt})
        if user_prompt:
            result.append({"role": "user", "content": user_prompt})
        if not result:
            raise ValueError("Either `messages` or at least one of `system_prompt`/`user_prompt` must be provided.")
        return result

    def _build_headers(self) -> Dict[str, str]:
        headers: Dict[str, str] = {
            "Accept": "application/json",
            "Content-Type": "application/json",
        }
        headers.update(self.default_headers)
        if self.api_key:
            headers["Authorization"] = f"Bearer {self.api_key}"
        return headers

    def _iter_stream(self, response: requests.Response) -> Generator[str, None, None]:
        """Parse Server-Sent Events style stream and yield text deltas.

        Compatible with OpenAI-like `data: {json}` lines ending with `\n\n`.
        """

        for line in response.iter_lines(decode_unicode=True):
            if not line:
                continue
            if line.startswith("data: "):
                chunk = line[len("data: ") :].strip()
                if chunk == "[DONE]":
                    break
                try:
                    obj = json.loads(chunk)
                    choices = obj.get("choices") or []
                    if choices:
                        delta = choices[0].get("delta") or {}
                        content = delta.get("content")
                        if content:
                            yield content
                except json.JSONDecodeError:
                    # Skip malformed lines
                    continue

    def _extract_text(self, data: Dict) -> str:
        choices = data.get("choices") or []
        if not choices:
            return ""
        message = choices[0].get("message") or {}
        return message.get("content") or ""


def quick_chat(
    base_url: str,
    model: str,
    system_prompt: Optional[str] = None,
    user_prompt: Optional[str] = None,
    api_key: Optional[str] = None,
    **kwargs,
) -> ChatResult:
    """One-call convenience wrapper.

    Example:
        quick_chat(
            base_url="http://156.22.30.238:1341",
            model="qwen2.5-14b-test",
            system_prompt="你是专业的纪检委办案人员",
            user_prompt="写一段举报材料",
            temperature=0.5,
            top_p=0.95,
        )
    """

    client = LLMClient(base_url=base_url, api_key=api_key, default_model=model)
    return client.chat(system_prompt=system_prompt, user_prompt=user_prompt, **kwargs)  # type: ignore[arg-type]


__all__ = ["LLMClient", "ChatResult", "quick_chat"]


