import os
from typing import Dict, Iterable, List, Optional, Any


_client = None


def _resolve_base_url(explicit: Optional[str] = None) -> str:
    if explicit:
        return explicit.strip()
    env_url = os.getenv("LMSTUDIO_BASE_URL")
    if env_url:
        return env_url.strip()
    return "http://localhost:1234/v1"


def get_client(api_key: Optional[str] = None, base_url: Optional[str] = None):
    global _client
    if _client is not None:
        return _client
    from openai import OpenAI  # type: ignore
    key = (api_key or os.getenv("LMSTUDIO_API_KEY") or "lm-studio").strip()
    _client = OpenAI(api_key=key, base_url=_resolve_base_url(base_url))
    return _client


def chat(
    messages: List[Dict[str, str]],
    model: str = "meta-llama-3.1-8b-instruct",
    stream: bool = False,
    **kwargs,
) -> str | Iterable[str]:
    client = get_client()
    if not stream:
        completion = client.chat.completions.create(
            model=model,
            messages=messages,
            **kwargs,
        )
        return completion.choices[0].message.content
    chunks = client.chat.completions.create(
        model=model,
        messages=messages,
        stream=True,
        **kwargs,
    )
    def _iter() -> Iterable[str]:
        for c in chunks:
            choice = c.choices[0]
            delta = getattr(choice, "delta", None)
            if delta is not None:
                content = getattr(delta, "content", None)
                if content:
                    yield content
                continue
            msg = getattr(choice, "message", None)
            if msg is not None:
                content = getattr(msg, "content", None)
                if content:
                    yield content
    return _iter()


def chat_with_usage(
    messages: List[Dict[str, str]],
    model: str = "meta-llama-3.1-8b-instruct",
    **kwargs,
) -> Dict[str, Any]:
    client = get_client()
    completion = client.chat.completions.create(
        model=model,
        messages=messages,
        **kwargs,
    )
    u = getattr(completion, "usage", None)
    usage = {
        "prompt_tokens": getattr(u, "prompt_tokens", 0) if u is not None else 0,
        "completion_tokens": getattr(u, "completion_tokens", 0) if u is not None else 0,
        "total_tokens": getattr(u, "total_tokens", 0) if u is not None else 0,
    }
    return {"content": completion.choices[0].message.content, "usage": usage}


def simple_chat(
    prompt: str,
    system: Optional[str] = None,
    model: str = "meta-llama-3.1-8b-instruct",
    stream: bool = False,
    **kwargs,
) -> str | Iterable[str]:
    messages: List[Dict[str, str]] = []
    if system:
        messages.append({"role": "system", "content": system})
    messages.append({"role": "user", "content": prompt})
    return chat(messages=messages, model=model, stream=stream, **kwargs)


def simple_chat_with_usage(
    prompt: str,
    system: Optional[str] = None,
    model: str = "meta-llama-3.1-8b-instruct",
    **kwargs,
) -> Dict[str, Any]:
    messages: List[Dict[str, str]] = []
    if system:
        messages.append({"role": "system", "content": system})
    messages.append({"role": "user", "content": prompt})
    return chat_with_usage(messages=messages, model=model, **kwargs)


__all__ = ["get_client", "chat", "simple_chat", "chat_with_usage", "simple_chat_with_usage"]