# host.py
import asyncio
import json
import os
from dotenv import load_dotenv, find_dotenv

from fastmcp import Client
from fastmcp.client.transports import StreamableHttpTransport
from openai import AsyncOpenAI

# ---- 运行所需最小配置 ----
_ = load_dotenv(find_dotenv())
MCP_URL = os.getenv("MCP_URL", "http://127.0.0.1:8000/mcp")
DEEPSEEK_API_KEY  = os.getenv("DEEPSEEK_API_KEY")     
DEEPSEEK_MODEL    = os.getenv("DEEPSEEK_MODEL", "deepseek-chat")
DEEPSEEK_BASE_URL = os.getenv("DEEPSEEK_BASE_URL", "https://api.deepseek.com")

llm = AsyncOpenAI(api_key=DEEPSEEK_API_KEY, base_url=DEEPSEEK_BASE_URL)
transport = StreamableHttpTransport(url=MCP_URL)

SYSTEM = "你只能通过服务器工具(add/sub/mul/div)完成计算，禁止在回复中自行计算。"

# ---- 1) 动态发现服务器工具 -> 转为 LLM 的 tools schema ----
async def to_openai_tools():
    async with Client(transport) as c:
        tools = await c.list_tools()  # 动态获取服务器已有工具
    out = []
    for t in tools:
        schema = getattr(t, "inputSchema", {"type": "object", "properties": {}})
        if hasattr(schema, "model_dump"):
            schema = schema.model_dump()
        elif hasattr(schema, "dict"):
            schema = schema.dict()
        out.append({
            "type": "function",
            "function": {"name": t.name, "description": t.description or "", "parameters": schema},
        })
    return out

# ---- 2) 通过 MCP 调用工具（真正的计算都在服务器）----
async def call_mcp(name, args):
    async with Client(transport) as c:
        r = await c.call_tool(name, args)
    if getattr(r, "data", None) is not None:
        return r.data
    if getattr(r, "content", None):
        items = r.content
        if items and getattr(items[0], "text", None):
            return items[0].text
    return ""

# ---- 3) 最小对话：LLM 规划 -> MCP 工具 -> 回填 -> 直到无工具调用 ----
async def chat_once(user_text: str) -> str:
    tools = await to_openai_tools()
    messages = [
        {"role": "system", "content": "你只能通过服务器工具完成计算，禁止在回复中自行计算。"},
        {"role": "user", "content": user_text},
    ]

    while True:
        # 1) 让模型决定是否调用工具
        resp = await llm.chat.completions.create(
            model=DEEPSEEK_MODEL,
            messages=messages,
            tools=tools,
            tool_choice="auto",
            temperature=0,
        )
        msg = resp.choices[0].message

        # 2) 如果没有工具调用，直接返回模型文本
        if not getattr(msg, "tool_calls", None):
            return msg.content or ""

        # 3) 先把这条“包含 tool_calls 的 assistant 消息”加入对话历史
        #    （关键修复点：没有这步会触发 400 错误）
        tool_calls_payload = [
            {
                "id": tc.id,
                "type": "function",
                "function": {
                    "name": tc.function.name,
                    "arguments": tc.function.arguments or "{}",
                },
            }
            for tc in msg.tool_calls
        ]
        messages.append({
            "role": "assistant",
            "content": None,        # 可用 None/空串，避免和工具结果混淆
            "tool_calls": tool_calls_payload,
        })

        # 4) 执行所有工具调用，并把结果以 role=tool 回填（id 要与上面对应）
        async with Client(transport) as c:
            for tc in msg.tool_calls:
                name = tc.function.name
                args = json.loads(tc.function.arguments or "{}")
                result = await c.call_tool(name, args)

                # 取结构化 data；否则退回首段文本
                if getattr(result, "data", None) is not None:
                    content = json.dumps(result.data)
                elif getattr(result, "content", None):
                    items = result.content
                    content = items[0].text if items and getattr(items[0], "text", None) else ""
                else:
                    content = ""

                messages.append({
                    "role": "tool",
                    "tool_call_id": tc.id,   # 一定要与上面 assistant.tool_calls 中的 id 完全一致
                    "content": content,
                })
        # 进入下一轮，让模型基于工具结果继续（可能再次发起 tool_calls）


# ---- 4) 极简 REPL ----
async def main():
    print("MCP 计算器（HTTP/动态工具）已就绪：仅支持 + - * /。Ctrl+C 退出。")
    while True:
        try:
            q = input("> ").strip()
        except (EOFError, KeyboardInterrupt):
            print("\nbye")
            break
        if not q:
            continue
        print(await chat_once(q))

if __name__ == "__main__":
    asyncio.run(main())
