import json
import sys

from mcp import ClientSession, StdioServerParameters
from openai import OpenAI
from mcp.client.sse import sse_client
import asyncio


class MCPClient:
    def __init__(self):
        self.session: ClientSession = None
        self.openai = OpenAI(
            base_url="https://api.deepseek.com",
            api_key="xxx"
        )
        self.model = "deepseek-chat"

    async def connect_to_server(self, server_url: str):
        self._stream_context = sse_client(server_url)
        stream = await self._stream_context.__aenter__()

        self._session_context = ClientSession(*stream)
        self.session = await self._session_context.__aenter__()

        await self.session.initialize()
        print("Initialized stdio client!")

        response = await self.session.list_tools()
        tools = response.tools
        print(f"Connected to server with tools {tools}")

    async def cleanup(self):
        if self._session_context:
            await self._session_context.__aexit__(None, None, None)
        if self._stream_context:
            await self._stream_context.__aexit__(None, None, None)

    async def process_request(self, content: str) -> str:
        messages = [
            {
                "role": "user",
                "content": content
            }
        ]
        response = await self.session.list_tools()
        can_be_used_tools = []
        for i in response.tools:
            can_be_used_tools.append({
                "type": "function",
                "name": i.name,
                "function": {
                    "name": i.name,
                    "description": i.description,
                    "parameters": i.inputSchema
                }
            })

        res = self.openai.chat.completions.create(
            model=self.model,
            messages=messages,
            max_tokens=512,
            tools=can_be_used_tools
        )
        first_response = res.choices[0]
        final_text = []

        if first_response.finish_reason == "tool_calls":
            # 添加模型返回的消息（包含tool_calls）
            model_message = first_response.message
            messages.append({
                "role": "assistant",
                "content": model_message.content or "",
                "tool_calls": model_message.tool_calls
            })

            tool_call = model_message.tool_calls[0]
            tool_name = tool_call.function.name
            tool_args = json.loads(tool_call.function.arguments)
            result = await self.session.call_tool(tool_name, tool_args)

            messages.append({
                "role": "tool",
                "content": result.content[0].text,
                "tool_call_id": tool_call.id
            })

            res2 = self.openai.chat.completions.create(
                model=self.model,
                messages=messages,
                max_tokens=512,
                tools=can_be_used_tools
            )
            final_text.append(f"Calling tool {tool_name} with args {tool_args}")
            final_text.append(res2.choices[0].message.content)

        elif first_response.finish_reason == "stop":
            final_text.append(first_response.message.content)

        return "\n".join(final_text)

    async def chat_loop(self):

        print("MCP client started!")
        print("Pleaset input your content:")
        while True:
            try:
                content = input("\nYour content: ").strip()
                if content == "quit":
                    break
                res = await self.process_request(content)
                print("\n" + res)
            except Exception as e:
                print(f"Error {e}")


async def main():
    client = MCPClient()
    if len(sys.argv) < 2:
        print("Please input the server url")
        sys.exit(1)
    try:
        await client.connect_to_server(server_url=sys.argv[1])
        await client.chat_loop()
    finally:
        await client.cleanup()


if __name__ == "__main__":
    asyncio.run(main())
