import argparse
import asyncio
import os
import json
import sys
import aiohttp
from typing import Optional
from contextlib import AsyncExitStack

from click import argument
from openai import AsyncOpenAI
from dotenv import load_dotenv
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client

# AI客户端-AI客户端的核心实现-处理与AI模型的通信-管理消息历史和工具调用
# 加载.env.dev文件
load_dotenv('.env.dev')

class MCPClient:
    def __init__(self):
        """初始化 MCP 客户端"""
        self.exit_stack = AsyncExitStack()
        # 加载环境变量
        load_dotenv('.env.dev')
        
        self.openai_api_key = os.getenv("OPENAI_API_KEY")  # 读取 OpenAI API Key
        self.base_url = os.getenv("OPENAI_API_URL")  # 读取 BASE URL
        self.model = os.getenv("OPENAI_API_MODEL")  # 读取 model
        
        # 添加调试信息
        print(f"Debug - API Key: {self.openai_api_key[:8]}...")
        print(f"Debug - Base URL: {self.base_url}")
        print(f"Debug - Model: {self.model}")
        
        if not self.openai_api_key:
            raise ValueError("❌ 未找到 OpenAI API Key，请在 .env.dev 文件中设置 OPENAI_API_KEY")
        self.client = AsyncOpenAI(api_key=self.openai_api_key, base_url=self.base_url)  # 创建OpenAI client
        self.session: Optional[ClientSession] = None
        self.exit_stack = AsyncExitStack()
        self.messages = []

    def clear_messages(self):
        """清空消息历史"""
        self.messages = []
        print("Debug - Messages history cleared")

    async def connect_to_server(self, server_script_path: str):
        """连接到 MCP 服务器并列出可用工具"""
        is_python = server_script_path.endswith('.py')
        is_js = server_script_path.endswith('.js')
        if not (is_python or is_js):
            raise ValueError("服务器脚本必须是 .py 或 .js 文件")

        # 必须设置项目根目录，否则无法获取到其他引用代码文件
        project_root = os.path.abspath(os.getcwd())
        # 使用当前Python解释器的路径
        command = sys.executable if is_python else "node"

        parser = argparse.ArgumentParser(description='命令行参数')
        parser.add_argument('--env', type=str, default='', help='运行环境')
        args, unknown = parser.parse_known_args()

        server_params = StdioServerParameters(
            command=command,
            args=[server_script_path, f'--env={args.env}'],
            env={"PYTHONPATH": project_root}
        )

        # 启动 MCP 服务器并建立通信
        stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
        self.stdio, self.write = stdio_transport
        self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))

        await self.session.initialize()

        # 列出 MCP 服务器上的工具
        response = await self.session.list_tools()
        tools = response.tools
        print("\n已连接到服务器，支持以下工具:", [tool.name for tool in tools])

    async def process_query(self, query: str):
        """
        使用大模型处理查询并调用可用的 MCP 工具 (Function Calling)
        如果是普通回复：直接返回文本，如果需要工具调用：调用工具并返回结果
        所有响应都是流式返回，实时显示在前端
        """
        # 清空之前的消息历史
        self.clear_messages()
        
        # 添加系统提示，明确区分普通对话和工具调用
        self.messages.append({
            "role": "system",
            "content": "你是一个智能助手。当需要查询数据库或使用工具时，请明确说明。对于简单的数学问题或一般性问题，直接回答即可。"
        })
        
        self.messages.append({"role": "user", "content": query})
        
        # 添加调试信息
        print(f"Debug - Messages: {self.messages}")
        print(f"Debug - Available tools: {[tool.name for tool in (await self.session.list_tools()).tools]}")

        response = await self.session.list_tools()
        available_tools = [{
            "type": "function",
            "function": {
                "name": tool.name,
                "description": tool.description,
                "input_schema": tool.inputSchema
            }
        } for tool in response.tools]

        # 使用 Ollama API 格式
        print(f"Debug - Sending request to API with tools: {available_tools}")
        async with aiohttp.ClientSession() as session:
            try:
                async with session.post(
                    f"{self.base_url}/api/chat",
                    json={
                        "model": self.model,
                        "messages": self.messages,
                        "stream": True,
                        "tools": available_tools
                    },
                    timeout=30  # 添加30秒超时
                ) as response:
                    if response.status != 200:
                        error_text = await response.text()
                        print(f"Debug - API Error: {error_text}")
                        yield "抱歉，处理您的请求时遇到问题，请稍后重试。"
                        return

                    is_tool_call = False
                    tool_name = None
                    tool_args = ''
                    tool_call_id = None
                    content = ''
                    print("Debug - Starting to receive response")
                    yield f'🤖AI：'

                    async for line in response.content:
                        if line:
                            try:
                                data = json.loads(line)
                                print(f"Debug - Received data: {data}")
                                if 'message' in data:
                                    message = data['message']
                                    # 处理工具调用
                                    if 'tool_calls' in message:
                                        yield "\n正在查询相关信息..."
                                        for tool_call in message['tool_calls']:
                                            function = tool_call['function']
                                            print(f"Debug - Tool call: {function}")
                                            # 调用工具
                                            result = await self.session.call_tool(
                                                function['name'],
                                                function['arguments']
                                            )
                                            print(f"Debug - Tool result: {result}")
                                            yield f"\n查询结果：\n{result}"
                                    else:
                                          # 处理普通回复
                                        content = message.get('content', '')
                                        if content:
                                            yield content  # 直接返回文本
                            except json.JSONDecodeError as e:
                                print(f"Debug - JSON decode error: {e}")
                                continue

                    if content:
                        print(f"Debug - Final content: {content}")
                        self.messages.append({
                            "role": "assistant",
                            "content": content 
                        })
            except asyncio.TimeoutError:
                print("Debug - Request timeout after 30 seconds")
                yield "请求超时，请稍后重试"
            except Exception as e:
                print(f"Debug - Unexpected error: {str(e)}")
                yield f"发生错误: {str(e)}"

    # ai_websocket.py 》client.put_query(user_msg) 》 mcp_client.py（put_query方法） 》process_query方法 》  AI模型处理
    async def put_query(self, query: str):
        print(f"\n🤖 OpenAI: ", end="", flush=True)
        response = self.process_query(query)  # 发送用户输入到 OpenAI API
        async for value in response:
            print(value, end="", flush=True)
            yield value

    async def chat_loop(self):
        """运行交互式聊天循环"""
        print("\n🤖 MCP 客户端已启动！输入 'quit' 退出")

        while True:
            try:
                query = input("\n你: ").strip()
                if query.lower() == 'quit':
                    break


                print(f"\n🤖 OpenAI: ", end="", flush=True)
                response = self.process_query(query)  # 发送用户输入到 OpenAI API
                async for value in response:
                    print(value, end="", flush=True)

            except Exception as e:
                print(f"\n⚠️ 发生错误: {str(e)}")

    async def cleanup(self):
        """清理资源"""
        await self.exit_stack.aclose()


async def main(server_script_path: str):

    client = MCPClient()
    try:
        await client.connect_to_server(server_script_path)
        await client.chat_loop()
    finally:
        await client.cleanup()


if __name__ == "__main__":

    asyncio.run(main('mcp_server.py'))