import asyncio

from langchain_mcp_adapters.client import MultiServerMCPClient
from langgraph.constants import START, END
from langgraph.graph import MessagesState, StateGraph
from langgraph.prebuilt import ToolNode, tools_condition

from agent.env_utils import ZHIPU_API_KEY
from agent.my_llm import llm

zhipuai_mcp_server_config = {
    'url': 'https://open.bigmodel.cn/api/mcp/web_search/sse?Authorization='+ZHIPU_API_KEY,
    'transport': 'sse',
}

my12306_mcp_server_config = {
    'url': 'https://mcp.api-inference.modelscope.net/32eeac735e9b46/sse',
    'transport': 'sse',
}

chart_mcp_server_config = {
    'url': 'https://mcp.api-inference.modelscope.net/75cba3ce5a964d/sse',
    'transport': 'sse',
}


# MCP客户端
mcp_client = MultiServerMCPClient(
    {
        'zhipuai_mcp': zhipuai_mcp_server_config,
        '12306_mcp': my12306_mcp_server_config,
        'chart_mcp': chart_mcp_server_config,
    }
)


class State(MessagesState):
    pass


async def create_graph():
    tools = await mcp_client.get_tools()    # 获取来自MCP服务端的工具tools

    builder = StateGraph(State)

    # 将mcp服务可用的工具列表传给llm大模型
    llm_with_tools = llm.bind_tools(tools)

    # Node节点函数定义，调用大模型的节点函数，需返回对 state 信息的更新内容，采用add_messages的默认reducer函数
    async def chatbot(state: State):
        return {'messages': [await llm_with_tools.ainvoke(state["messages"])]}

    builder.add_node('chatbot', chatbot)

    tool_node = ToolNode(tools)
    builder.add_node('tools', tool_node)

    builder.add_conditional_edges(
        "chatbot",
        tools_condition,
    )

    builder.add_edge('tools', 'chatbot')
    builder.add_edge(START, 'chatbot')

    graph = builder.compile()
    return graph


agent = asyncio.run(create_graph())
