# deepseek_client.py
# 使用 OpenAI 官方 Python SDK 的兼容模式调用 DeepSeek（function calling）
from __future__ import annotations
import os, json, re
from dotenv import load_dotenv

# 加载项目根目录下的 .env（若存在）
load_dotenv()
from typing import Any, Dict, List
from pydantic import BaseModel

from openai import OpenAI
from registry import ToolRegistry
# 注意：DeepSeek 兼容 OpenAI SDK 协议，需设置 base_url 与 api_key

DEEPSEEK_BASE_URL = os.getenv("DEEPSEEK_BASE_URL", "https://api.deepseek.com/v1")
DEEPSEEK_MODEL = os.getenv("DEEPSEEK_MODEL", "deepseek-chat")
DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY", "")

# 当没有 API Key 时，启用 MOCK 模式以便离线演示
MOCK_MODE = not bool(DEEPSEEK_API_KEY.strip())

# 懒创建：只有 LIVE 模式才实例化客户端
_client = None
def _client_live() -> OpenAI:
    global _client
    if _client is None:
        _client = OpenAI(base_url=DEEPSEEK_BASE_URL, api_key=DEEPSEEK_API_KEY)
    return _client

def pydantic_to_json_schema(model: type[BaseModel]) -> Dict[str, Any]:
    return model.model_json_schema()

def build_tools_payload(registry: ToolRegistry) -> List[Dict[str, Any]]:
    tools = []
    for t in registry.list():
        tools.append({
            "type": "function",
            "function": {
                "name": t.name,
                "description": t.desc or t.name,
                "parameters": pydantic_to_json_schema(t.params_model)
            }
        })
    return tools

# --- MOCK 模式的启发式工具调用（离线演示） ---
def _mock_tool_calls(user_text: str) -> List[Dict[str, Any]]:
    calls: List[Dict[str, Any]] = []
    m_query = re.findall(r'[“\"]([^”\"]+)[”\"]', user_text)
    query = None
    for a, b in m_query:
        query = a or b
        if query:
            break
    if query:
        calls.append({"name": "search", "arguments": {"query": query, "top_k": 5}})
    elif "向量数据库" in user_text:
        calls.append({"name": "search", "arguments": {"query": "向量数据库", "top_k": 5}})

    add_match = re.search(r'"?(-?\d+(?:\.\d+)?)"?\s*\+\s*"?(-?\d+(?:\.\d+)?)"?', user_text)
    if add_match:
        a, b = add_match.groups()
        calls.append({"name": "add", "arguments": {"a": float(a), "b": b}})

    if not calls:
        calls.append({"name": "search", "arguments": {"query": "RAG", "top_k": 3}})
    return calls

def deepseek_chat(messages: List[Dict[str, Any]], tools: List[Dict[str, Any]]) -> Dict[str, Any]:
    """第一轮：让模型生成 tool_calls。"""
    if MOCK_MODE:
        user_text = next((m["content"] for m in reversed(messages) if m["role"] == "user"), "")
        tool_calls = _mock_tool_calls(user_text)
        return {
            "mock": True,
            "choices": [{
                "message": {
                    "role": "assistant",
                    "content": None,
                    "tool_calls": [
                        {"id": f"call_{i+1}", "type": "function",
                         "function": {"name": c["name"], "arguments": json.dumps(c["arguments"], ensure_ascii=False)}}
                        for i, c in enumerate(tool_calls)
                    ]
                }
            }]
        }

    client = _client_live()
    resp = client.chat.completions.create(
        model=DEEPSEEK_MODEL,
        messages=messages,
        tools=tools,
        tool_choice="auto",
        temperature=0.1,
    )
    # 统一返回一个 dict，结构与 OpenAI HTTP 响应近似，便于上层复用
    return {"choices": [{"message": {
        "role": resp.choices[0].message.role,
        "content": resp.choices[0].message.content,
        "tool_calls": [
            {
                "id": tc.id,
                "type": tc.type,
                "function": {"name": tc.function.name, "arguments": tc.function.arguments}
            } for tc in (resp.choices[0].message.tool_calls or [])
        ]
    }}]}

def deepseek_chat_no_tools(messages: List[Dict[str, Any]]) -> Dict[str, Any]:
    """第二轮：不带 tools，让模型基于 tool 回执生成最终回答。"""
    if MOCK_MODE:
        tool_msgs = [m for m in messages if m.get("role") == "tool"]
        summary = []
        for tm in tool_msgs:
            name = tm.get("name", "tool")
            summary.append(f"{name} -> {tm.get('content')}")
        final = "本次调用完成。\n" + "\n".join(summary) if summary else "无工具调用。"
        return {"mock": True, "choices": [{"message": {"role": "assistant", "content": final}}]}

    client = _client_live()
    resp = client.chat.completions.create(
        model=DEEPSEEK_MODEL,
        messages=messages,
        temperature=0.2,
    )
    return {"choices": [{"message": {
        "role": resp.choices[0].message.role,
        "content": resp.choices[0].message.content
    }}]}
