# 纯MCP的多服务器
import asyncio
import os
import json
import logging
import shutil
from contextlib import AsyncExitStack
from typing import Optional, Any, Dict, List

import httpx
from dotenv import load_dotenv
from openai import OpenAI
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client

# 设置日志格式
logging.basicConfig(
    level=logging.INFO,
    format="%(asctime)s - %(levelname)s - %(message)s"
)

"环境配置"
class Configuration:
    """读取.env 与 servers_config.json"""
    def __init__(self) -> None:
        load_dotenv()
        self.api_key: str = os.getenv("DEEPSEEK_API_KEY").strip()
        self.base_url: str | None = os.getenv("BASE_URL")
        self.model: str = os.getenv("MODEL") or "deepseek-chat"
        if not self.api_key:
            raise ValueError("未找到DEEPSEEK_API_KEY，请在.env文件中配置")

    @staticmethod
    def load_config(file_path: str) -> Dict[str, Any]:
        with open(file_path, "r", encoding="utf-8") as f:
            return json.load(f)

class Tool:
    def __init__(self, name: str, description: str, input_schema: Dict[str, Any]) -> None:
        self.name: str = name
        self.description: str = description
        self.input_schema: Dict[str, Any] = input_schema

    def format_for_llm(self) -> str:
        """生成用于 LLM 提示的工具描述"""
        args_desc = []
        if "properties" in self.input_schema:
            for param_name, param_info in self.input_schema["properties"].items():
                arg_desc = f"- {param_name}: {param_info.get('description', '')}"
                if param_name in self.input_schema.get("required", []):
                    arg_desc += " (required)"
                args_desc.append(arg_desc)
        return f"""
    Tool: {self.name}
    Description: {self.description}
    Arguments: {chr(10).join(args_desc)}
    """

class Server:
    """管理单个 MCP 服务器连接和工具调用"""
    def __init__(self, name: str, config: Dict[str, Any]) -> None:
        self.name: str = name
        self.config: Dict[str, Any] = config
        self.session: Optional[ClientSession] = None
        self.exit_stack: AsyncExitStack = AsyncExitStack()
        self._cleanup_lock = asyncio.Lock()

    async def initialize(self) -> None:
        """初始化与 MCP 服务器的连接"""
        # command字段 直接从配置获取
        command = self.config["command"]
        if command is None:
            raise ValueError("command不能为空")

        server_params = StdioServerParameters(
            command=command,
            args=self.config["args"],
            env={**os.environ, **self.config["env"]} if self.config.get("env") else None
        )

        try:
            stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
            read_stream, write_stream = stdio_transport
            session = await self.exit_stack.enter_async_context(
                ClientSession(read_stream, write_stream)
            )
            await session.initialize()
            self.session = session
        except Exception as e:
            logging.error(f"Server初始化错误 {self.name}: {e}")
            await self.cleanup()
            raise

    async def list_tools(self) -> List[Any]:
        "获取服务器可用的工具列表"
        if not self.session:
            raise RuntimeError(f"Server {self.name} 没有初始化")
        tools_response = await self.session.list_tools()
        tools = []
        for item in tools_response:
            if isinstance(item, tuple) and item[0] == "tools":
                for tool in item[1]:
                    # print(tool.__dict__.keys())  # 查看所有属性
                    tools.append(Tool(name=tool.name, description=tool.description, input_schema=tool.inputSchema))
        return tools

    async def execute_tool(self, tool_name: str, arguments: Dict[str, Any], retries: int = 2, delay: int = 5) -> Any:
        """delay：重试间隔秒数"""
        if not self.session:
            raise RuntimeError(f"execute_tool()函数报错：Server {self.name} 没有初始化")
        attempt = 0
        while attempt < retries:
            try:
                logging.info(f"Execution {tool_name} on server {self.name}")
                result = await self.session.call_tool(tool_name, arguments)
                return result
            except Exception as e:
                attempt += 1
                logging.warning(f"execute_tool()函数报错：Error executing tool : {e}")
                if attempt < retries:
                    logging.info(f"execute_tool()函数报错：Retrying in {delay}")
                    await asyncio.sleep(delay)
                else:
                    logging.error("execute_tool()函数报错：Max retries reached. Failed")
                    raise

    async def cleanup(self) -> None:
        """清理服务器资源"""
        async with self._cleanup_lock:
            try:
                await self.exit_stack.aclose()
                self.session = None
            except Exception as e:
                logging.error(f"cleanup()函数报错：Error during cleanup of server {self.name}")

class LLMClient:
    def __init__(self, api_key: str, base_url: Optional[str], model: str) -> None:
        self.client = OpenAI(api_key=api_key, base_url=base_url)
        self.model = model

    def get_response(self, messages: List[Dict[str, Any]], tools: Optional[List[Dict[str, Any]]] = None):
        """
        发送消息给大模型 API，支持传入工具参数（function calling 格式）
        """
        payload = {
            "model": self.model,
            "messages": messages,
            "tools": tools,
        }
        try:
            response = self.client.chat.completions.create(**payload)
            return response
        except Exception as e:
            logging.error(f"get_response()函数错误：Error during LLM call: {e}")
            raise

class MultiServerMCPClient:
    def __init__(self) -> None:
        self.exit_stack = AsyncExitStack()
        config = Configuration()
        self.openai_api_key = config.api_key
        self.base_url = config.base_url
        self.model = config.model

        self.client = LLMClient(api_key=self.openai_api_key, base_url=self.base_url, model=self.model)
        self.servers: Dict[str, Server] = {}
        self.tools_by_server: Dict[str, List[Any]] = {}
        self.all_tools: List[Dict[str, Any]] = []

    async def connect_to_servers(self, servers_config: Dict[str, Any]) -> None:
        """根据配置文件同时启动多个服务器并获取工具"""
        mcp_servers = servers_config.get("mcpServers", {})
        for server_name, srv_config in mcp_servers.items():
            server = Server(server_name, srv_config)
            await server.initialize()
            self.servers[server_name] = server
            tools = await server.list_tools()
            self.tools_by_server[server_name] = tools
            for tool in tools:
                # 统一重命名: serverName_toolName
                function_name = f"{server_name}_{tool.name}"
                self.all_tools.append({
                    "type": "function",
                    "function": {
                        "name": function_name,
                        "description": tool.description,
                        "input_schema": tool.input_schema
                    }
                })

        self.all_tools = await self.transform_json(self.all_tools)
        logging.info("已连接到下列服务器：")
        for name in self.servers:
            srv_cfg = mcp_servers[name]
            logging.info(f" - {name}: command={srv_cfg['command']}, args={srv_cfg['args']}")
        logging.info("汇总的工具：")
        for t in self.all_tools:
            logging.info(f" - {t['function']['name']}")


    async def transform_json(self, json_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
        result = []
        for item in json_data:
            if not isinstance(item, dict) or "type" not in item:
                continue
            if item.get("type") != "function":
                continue

            old_func = item["function"]
            if not isinstance(old_func, dict):
                continue
            if "name" not in old_func or "description" not in old_func:
                continue

            new_func = {
                "name": old_func["name"],
                "description": old_func["description"],
                "parameters": {}
            }
            if "input_schema" in old_func and isinstance(old_func["input_schema"], dict):
                old_schema = old_func["input_schema"]
                new_func["parameters"]["type"] = old_schema.get("type", "object")
                new_func["parameters"]["properties"] = old_schema.get("properties", {})
                new_func["parameters"]["required"] = old_schema.get("required", [])
            new_item = {
                "type": item["type"],
                "function": new_func
            }
            result.append(new_item)
        return result

    async def chat_base(self, messages: List[Dict[str, Any]]) -> Any:
        response = self.client.get_response(messages=messages, tools=self.all_tools)
        if response.choices[0].finish_reason == "tool_calls":
            while True:
                messages = await self.create_function_response_messages(messages, response)
                response = self.client.get_response(messages=messages, tools=self.all_tools)
                if response.choices[0].finish_reason != "tool_calls":
                    break
        return response

    async def create_function_response_messages(self, messages: List[Dict[str, Any]], response) -> Any:
        function_call_messages = response.choices[0].message.tool_calls
        messages.append(response.choices[0].message.model_dump())
        for function_call_message in function_call_messages:
            tool_name = function_call_message.function.name
            tool_args = json.loads(function_call_message.function.arguments)
            # 调用 MCP 工具
            function_response = await self._call_mcp_tool(tool_name, tool_args)
            # 打印返回值及其类型
            messages.append({
                "role": "tool",
                "content": function_response,
                "tool_call_id": function_call_message.id
            })
        return messages

    async def process_query(self, query: str) -> str:
        """使用LLM(如qwq:32b)处理用户query并调用可用的工具(Function Calling)"""
        messages = [{"role": "user", "content": query}]
        response = self.client.get_response(messages, tools=self.all_tools)
        content = response.choices[0]
        logging.info(content)

        if content.finish_reason == "tool_calls":
            tool_call = content.message.tool_calls[0]
            tool_name = tool_call.function.name
            tool_args = json.loads(tool_call.function.arguments)
            logging.info(f"\n调用工具：{tool_name}，参数：{tool_args}\n")
            result = await self._call_mcp_tool(tool_name, tool_args)
            messages.append(content.message.model_dump())
            messages.append({
                "role": "tool",
                "content": result,
                "tool_call_id": tool_call.id,
            })
            response = self.client.get_response(messages, tools=self.all_tools)
            return response.choices[0].message.content
        return content.message.content

    async def _call_mcp_tool(self, tool_full_name:str, tool_args: Dict[str, Any]) -> Any:
        parts = tool_full_name.split("_", 1)
        if len(parts) != 2:
            return f"无效的工具名称：{tool_full_name}"
        server_name, tool_name = parts
        server = self.servers.get(server_name)
        if not server:
            return f"找不到服务器：{server_name}"
        resp = await server.execute_tool(tool_name, tool_args)

        content = resp.content
        if isinstance(content, list):
            texts = [c.text for c in content if hasattr(c, "text")]
            return "\n".join(texts)
        elif isinstance(content, dict) or isinstance(content, list):
            # 如果是 dict 或 list，但不是 TextContent 类型
            return json.dumps(content, ensure_ascii=False)
        elif content is None:
            return "工具执行无输出"
        else:
            return str(content)

    async def chat_loop(self) -> None:
        """交互式聊天循环"""
        logging.info("多服务器MCP+function calling 客户端已启动，输入quit退出")
        messages: List[Dict[str, Any]] = []
        try:
            while True:
                query = input("\nQuery: ").strip()
                if query.lower() == 'quit':
                    break
                try:
                    messages.append({"role": "user", "content": query})
                    messages = messages[-20:]  # 保持最新 20 条上下文
                    response = await self.chat_base(messages)
                    messages.append(response.choices[0].message.model_dump())
                    result = response.choices[0].message.content
                    print(f"\nAI: {result}")
                except Exception as e:
                    print(f"\nError in chat_loop(): {str(e)}")
        except Exception as e:
            logging.error(f"chat_loop exception: {e}")
        finally:
            await self.cleanup()

    async def cleanup(self) -> None:
        """清理资源"""
        await self.exit_stack.aclose()

async def main():
    config = Configuration()
    servers_config = config.load_config("servers_config2.json")
    client = MultiServerMCPClient()
    try:
        await client.connect_to_servers(servers_config)
        await client.chat_loop()  # cleanup 已在 chat_loop 的 finally 中调用
    except KeyboardInterrupt:
        logging.info("Received KeyboardInterrupt, shutting down...")
    except Exception as e:
        logging.error(f"Unexpected error: {e}")
        raise

if __name__ == "__main__":
    asyncio.run(main())