import os
from langchain_openai import ChatOpenAI
from langgraph.graph import StateGraph, START
from typing import TypedDict, Annotated
from langgraph.graph.message import add_messages
from langchain_core.messages import BaseMessage
from langgraph.prebuilt import ToolNode, tools_condition
from langgraph.checkpoint.memory import MemorySaver

from rag.mcp_client import MCPClient, convert_mcp_tools_to_openai_format


class State(TypedDict):
    messages: Annotated[list[BaseMessage], add_messages]


async def create_graph():
    """
    创建一个知识图谱
    """
    graph_builder = StateGraph(State)

    mcp_client = MCPClient()
    await mcp_client.connect_to_server(
        "npx",
        [
            "-y",
            "@modelcontextprotocol/server-filesystem",
            "C:\\Users\\zengd\\Desktop\\rag",
        ],
    )
    tools = convert_mcp_tools_to_openai_format(await mcp_client.list_tools())

    toolNode = ToolNode(tools=tools)
    graph_builder.add_node("tools", toolNode)

    chat_model = ChatOpenAI(
        model="qwen/qwen3-235b-a22b:free",
        base_url=os.getenv("OPENROUTER_BASE_URL"),
        api_key=os.getenv("OPENROUTER_API_KEY"),
    )
    call_model_with_tools = chat_model.bind_tools(tools)

    def chat_node(state: State):
        return {"messages": call_model_with_tools.invoke(state["messages"])}

    graph_builder.add_node("chat", chat_node)

    graph_builder.add_edge(START, "chat")
    graph_builder.add_conditional_edges(
        "chat",
        tools_condition,
    )
    graph_builder.add_edge("tools", "chat")

    return graph_builder.compile(checkpointer=MemorySaver(), interrupt_before=["tools"])


if __name__ == "__main__":
    from langchain_core.messages import HumanMessage
    from dotenv import load_dotenv  

    load_dotenv('.env')
    load_dotenv('.env.dev', override=True)
    import asyncio
    
    # 使用asyncio运行异步函数
    graph = asyncio.run(create_graph())
    config = {"configurable": {"thread_id": "thread-1"}}
    result = graph.invoke({"messages": [HumanMessage(content="你好")]}, config)
    print(result)
