from typing import List, Dict
from fastmcp import Client
from openai import OpenAI
import asyncio
import json


class UserClient:
    def __init__(self, script="server.py", model="glm-4-flash"):
        self.model = model
        self.mcp_client = Client(script)
        self.openai_client = OpenAI(
            api_key="ded4eb99623939b8937218eb25510b68.v1KX8IfeCDoHZodN",
            base_url="https://open.bigmodel.cn/api/paas/v4/"
        )
        self.messages = [
            {"role": "system", "content": "你是一个AI助手，你需要借助工具，回答用户问题"}
        ]
        self.tools = []

    async def prepare_tools(self):
        tools = await self.mcp_client.list_tools()
        tools = [
            {
                "type": "function",
                "function": {
                    "name": tool.name,
                    "description": tool.description,
                    "input_schema": tool.inputSchema,
                    "parameters": tool.inputSchema
                }
            }
            for tool in tools
        ]
        return tools

    async def chat(self, messages: List[Dict]):
        async with self.mcp_client:
            if not self.tools:
                self.tools = await self.prepare_tools()
                messages.append(
                    {"role": "system", "content": f"你可以使用以下工具：{self.tools}"})

            response = self.openai_client.chat.completions.create(
                model=self.model,
                messages=messages,
                tools=self.tools,
            )
            print(response)
            if response.choices[0].finish_reason != "tool_calls":
                return response.choices[0].message
            # 执行工具
            for tool_call in response.choices[0].message.tool_calls:
                response = await self.mcp_client.call_tool(
                    tool_call.function.name,
                    arguments= json.loads(tool_call.function.arguments)
                )
                # print(f"Tool {tool_call.function.name} executed with result: {response_message}")
                self.messages.append({
                    "role": "assistant",
                    "content": response.content[0].text
                })
                return await self.chat(self.messages)

    async def loop(self):
        async with self.mcp_client:
            while True:
                question = input("User: ")
                if question.lower() in ["exit", "quit"]:
                    print("Exiting chat.")
                    break
                message = {"role": "user", "content": question}
                self.messages.append(message)
                response_message = await self.chat(self.messages)
                print("AI:", response_message.content)


async def main():
    user_client = UserClient()
    await user_client.loop()

if __name__ == "__main__":
    asyncio.run(main())
