import asyncio
from typing import Optional
from contextlib import AsyncExitStack

from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client

from zhipuai import ZhipuAI
import os
from dotenv import load_dotenv
import json
from config import SYSTEM_PROMPT
import time

load_dotenv()  # load environment variables from .env


out_buf = ""
in_buf = ""
debug_mode = True


def clean_out_buf():
    global out_buf
    out_buf = ""


def print1(s):
    global out_buf
    print(s)
    out_buf += s + '\n'


def input1(s):
    print1(s)
    global in_buf
    while len(in_buf) == 0:
        time.sleep(0.2)
    temp = in_buf
    in_buf = ""
    return temp


class MCPClient:
    def __init__(self):
        # Initialize session and client objects
        self.session: Optional[ClientSession] = None
        self.exit_stack = AsyncExitStack()
        self.zhipu_client = ZhipuAI(api_key=os.getenv("ZHIPUAI_API_KEY", ""))
        self.model_name = "glm-4-plus"
        self.max_loop = 7

    # methods will go here
    async def connect_to_server(self, server_script_path: str):
        """Connect to an MCP server

        Args:
            server_script_path: Path to the server script (.py or .js)
        """
        is_python = server_script_path.endswith('.py')
        is_js = server_script_path.endswith('.js')
        if not (is_python or is_js):
            raise ValueError("Server script must be a .py or .js file")

        command = "python" if is_python else "node"
        server_params = StdioServerParameters(
            command=command,
            args=[server_script_path],
            env=None
        )

        stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
        self.stdio, self.write = stdio_transport
        self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))

        await self.session.initialize()

        # List available tools
        response = await self.session.list_tools()
        tools = response.tools
        print1(f"\nConnected to server with tools: {[tool.name for tool in tools]}")

    async def single_process(self, messages: list, glm_tools: list, final_text: list) -> bool:
        """
        根据messages信息得到response
        1. 不工具调用，返回True
        2. 工具调用，返回False，下回合继续循环
        """
        response = self.zhipu_client.chat.completions.create(
            model=self.model_name,
            messages=messages,
            tools=glm_tools
        )

        assistant_message = ""
        if not response.choices[0].message.tool_calls:
            assistant_message = response.choices[0].message.content
            final_text.append(assistant_message)
            return True  # end
        else:
            # Handle tool calls
            for tool_call in response.choices[0].message.tool_calls:
                tool_name = tool_call.function.name
                tool_args = json.loads(tool_call.function.arguments)
                print1(f"Calling tool {tool_name} with args {tool_args}")
                # Execute tool call
                result = await self.session.call_tool(tool_name, tool_args)
                print1(f"result is {result}")
                # tool_results.append({"call": tool_name, "result": result})
                # final_text.append(f"[Calling tool {tool_name} with args {tool_args}]")

                assistant_message = response.choices[0].message.content
                messages.append({
                    "role": "assistant",
                    "content": str(assistant_message)
                })
                messages.append({
                    "role": "user",
                    "content": str([text_content.text for text_content in result.content]),
                    "tool_call_id": tool_call.id
                })
                return False


    async def process_query(self, query: str) -> str:
        """Process a query using ZhipuAI and available tools"""
        messages = [
            {
                "role": "system",
                "content": SYSTEM_PROMPT
            },
            {
                "role": "user",
                "content": query + "如果要读取表格的话，不要只根据前几行做出回答"
            }
        ]

        response = await self.session.list_tools()
        available_tools = [{
            "name": tool.name,
            "description": tool.description,
            "input_schema": tool.inputSchema
        } for tool in response.tools]

        glm_tools = []
        tool_results = []
        for tool in available_tools:
            glm_tools.append({
                "type": "function",
                "function": {
                    "name": tool["name"],
                    "description": tool["description"],
                    "parameters": tool["input_schema"]
                }
            })
        # Process response and handle tool calls
        final_text = []
        
        for i in range(self.max_loop):
            flag = await self.single_process(messages, glm_tools, final_text)
            if flag:
                break

        return "\n".join(final_text)

    async def chat_loop(self):
        """Run an interactive chat loop"""
        print1("\nMCP Client Started!")
        print1("Type your queries or 'quit' to exit.")

        while True:
            try:
                query = input1("\nQuery: ").strip()
                clean_out_buf()

                if query.lower() == 'quit':
                    break

                response = await self.process_query(query)
                print1("\n" + response)

            except Exception as e:
                print1(f"\nError: {str(e)}")

    async def cleanup(self):
        """Clean up resources"""
        await self.exit_stack.aclose()


async def main():
    # if len(sys.argv) < 2:
    #     print("Usage: python client.py <path_to_server_script>")
    #     sys.exit(1)

    client = MCPClient()
    try:
        # await client.connect_to_server(sys.argv[1])
        await client.connect_to_server('../file_read_demo1/file_read.py')
        await client.chat_loop()
    finally:
        await client.cleanup()

if __name__ == "__main__":
    import sys
    asyncio.run(main())
