"""
通义千问（Qwen）OpenAI 兼容接口封装

功能：
- 统一创建 OpenAI 兼容客户端（阿里云百炼 DashScope）
- 提供非流式与流式对话接口，便于在项目中复用
- 支持通过环境变量或本地硬编码的方式配置 API Key 与地域 Base URL

使用建议：
- 优先使用环境变量配置密钥（DASHSCOPE_API_KEY），避免将密钥写入仓库
- 若确需本地硬编码，请仅在本机修改 HARDCODED_API_KEY，并避免提交到版本库
"""
import os
from typing import Dict, Iterable, List, Optional, Any


_client = None
# 本地硬编码密钥（仅供本机调试使用）。强烈建议使用环境变量代替：DASHSCOPE_API_KEY
# 注意：请避免将真实密钥提交到版本库或日志中。
HARDCODED_API_KEY = "sk-17ba9a091cb849f6bc800817fd91e0c9"


def _resolve_base_url(explicit: Optional[str] = None) -> str:
    """解析并返回 DashScope Base URL。

    优先级：入参 > 环境变量 DASHSCOPE_BASE_URL > 基于 DASHSCOPE_REGION 的默认值。
    - 北京地域默认： https://dashscope.aliyuncs.com/compatible-mode/v1
    - 新加坡地域默认： https://dashscope-intl.aliyuncs.com/compatible-mode/v1
    """
    if explicit:
        return explicit.strip()
    env_url = os.getenv("DASHSCOPE_BASE_URL")
    if env_url:
        return env_url.strip()
    region = os.getenv("DASHSCOPE_REGION", "").lower().strip()
    if region in {"intl", "international", "sg", "singapore"}:
        return "https://dashscope-intl.aliyuncs.com/compatible-mode/v1"
    return "https://dashscope.aliyuncs.com/compatible-mode/v1"


def get_client(api_key: Optional[str] = None, base_url: Optional[str] = None):
    """获取或初始化 OpenAI 兼容客户端。

    Key 选择优先级：入参 api_key > HARDCODED_API_KEY > 环境变量 DASHSCOPE_API_KEY。
    """
    global _client
    if _client is not None:
        return _client
    from openai import OpenAI  # type: ignore
    key = (api_key or HARDCODED_API_KEY or os.getenv("DASHSCOPE_API_KEY") or "").strip()
    if not key:
        raise RuntimeError("Missing DASHSCOPE_API_KEY")
    _client = OpenAI(api_key=key, base_url=_resolve_base_url(base_url))
    return _client


def chat(
    messages: List[Dict[str, str]],
    model: str = "qwen-plus",
    stream: bool = False,
    **kwargs,
) -> str | Iterable[str]:
    """聊天接口封装，支持非流式与流式模式。

    - 非流式：直接返回完整字符串
    - 流式：返回迭代器，逐片段产出字符串（便于实时输出）
    """
    client = get_client()
    if not stream:
        completion = client.chat.completions.create(
            model=model,
            messages=messages,
            **kwargs,
        )
        return completion.choices[0].message.content
    # 流式模式：逐步读取增量内容
    chunks = client.chat.completions.create(
        model=model,
        messages=messages,
        stream=True,
        **kwargs,
    )
    def _iter() -> Iterable[str]:
        for c in chunks:
            choice = c.choices[0]
            delta = getattr(choice, "delta", None)
            if delta is not None:
                content = getattr(delta, "content", None)
                if content:
                    yield content
                continue
            msg = getattr(choice, "message", None)
            if msg is not None:
                content = getattr(msg, "content", None)
                if content:
                    yield content
    return _iter()


def chat_with_usage(
    messages: List[Dict[str, str]],
    model: str = "qwen-plus",
    **kwargs,
) -> Dict[str, Any]:
    client = get_client()
    completion = client.chat.completions.create(
        model=model,
        messages=messages,
        **kwargs,
    )
    u = getattr(completion, "usage", None)
    usage = {
        "prompt_tokens": getattr(u, "prompt_tokens", 0) if u is not None else 0,
        "completion_tokens": getattr(u, "completion_tokens", 0) if u is not None else 0,
        "total_tokens": getattr(u, "total_tokens", 0) if u is not None else 0,
    }
    return {"content": completion.choices[0].message.content, "usage": usage}


def simple_chat(
    prompt: str,
    system: Optional[str] = None,
    model: str = "qwen-plus",
    stream: bool = False,
    **kwargs,
) -> str | Iterable[str]:
    """简易聊天封装：提供系统提示和单轮用户输入。"""
    messages: List[Dict[str, str]] = []
    if system:
        messages.append({"role": "system", "content": system})
    messages.append({"role": "user", "content": prompt})
    return chat(messages=messages, model=model, stream=stream, **kwargs)


def simple_chat_with_usage(
    prompt: str,
    system: Optional[str] = None,
    model: str = "qwen-plus",
    **kwargs,
) -> Dict[str, Any]:
    messages: List[Dict[str, str]] = []
    if system:
        messages.append({"role": "system", "content": system})
    messages.append({"role": "user", "content": prompt})
    return chat_with_usage(messages=messages, model=model, **kwargs)


__all__ = ["get_client", "chat", "simple_chat", "chat_with_usage", "simple_chat_with_usage"]