"""8.8 通用mcp client集成各个组件开发-1"""
import asyncio
import json

from openai import OpenAI
from openai.types.chat.chat_completion import Choice

from connect_factory import ServerManager



class MCPClient:



    def __init__(self, manager: ServerManager):
        # self.async_exit_stack = AsyncExitStack()
        self.session = None
        self.deepseek = OpenAI(
            api_key="sk-15af4e21f828460683b16ce9e78b2346",
            base_url="https://api.deepseek.com"
        )
        self.manager = manager

    async def large_model_strategy(self,choice_result:Choice,messages:list[dict],tools:[dict]):
        if choice_result.finish_reason == "stop":
            result = choice_result.message.content
            return result
        elif choice_result.finish_reason == "tool_calls":
            # 第二次调用大模型的前置参数
            messages.append(choice_result.message.model_dump())
            tool_calls = choice_result.message.tool_calls
            for tool_call in tool_calls:
                print("工具名称：", tool_call.function.name, ",参数：", tool_call.function.arguments)
                arguments = json.loads(tool_call.function.arguments)
                tool_result = await self.manager.execute_call(tool_call.function.name, arguments)
                # 六、使用大模型生成最终的结果，并且使用语言模型生成最终的结果
                messages.append({
                    "role": "tool",
                    "content": tool_result,
                    "tool_call_id": tool_call.id
                })
                print("==== 工具调用结果tool_result：", tool_result)

            # 再次调用大模型
            deepseek_response = self.deepseek.chat.completions.create(
                model="deepseek-chat",
                messages=messages,
                tools=tools,
            )
            # 获取最终的结果
            #result = deepseek_response.choices[0].message.content
            return await self.large_model_strategy(deepseek_response.choices[0],messages,tools)

    async def execute(self,question:str):
        # 一、获取server.py中的工具列表
        mcp_functions =  self.manager.mcp_functions
        # 二、创建function calling 格式(大模型使用）、
        function_tools = []
        for func_name,mcp_function in mcp_functions.items():
            function_tools.append({
                "type":"function",
                "function":{
                    "name":mcp_function.name,
                    "description":mcp_function.description,
                    "parameters":mcp_function.input_schema
                }
            })

        # 三、 创建messages,deepseek大模型的格式
        messages = [
            {
                "role":"user",
                "content":question
            }
        ]
        # 四、调用deepseek大模型
        deepseek_response = self.deepseek.chat.completions.create(
            model="deepseek-chat",
            messages=messages,
            tools=function_tools
        )
        # 打印出大模型的决策结果
        print("==== deepseek 响应持结果：",deepseek_response)
        choice_result = deepseek_response.choices[0]
        result = await self.large_model_strategy(choice_result,messages,function_tools)
        print("==== 大模型最终推理的结果：", result)



async def main():
    #初始化状态信息
    manager = ServerManager()
    await manager.initialize()
    client = MCPClient(manager)
    try:
        while(True):
            #10加20之和再减去3等于多少
            question = input("请输入问题：")
            if question == "exit":
                break
            await client.execute(question)
    except Exception as e:
        print(f"连接失败: {e}")
        return
    finally:
        await manager.aclose()

if __name__ == "__main__":
    loop = asyncio.new_event_loop()
    asyncio.set_event_loop(loop)
    loop.run_until_complete(main())