from langchain.agents.middleware import wrap_tool_call
from langchain.tools.tool_node import ToolCallRequest
from langchain.messages import ToolMessage
from langchain_core.messages import AIMessage, SystemMessage, HumanMessage
from langgraph.types import Command
from typing import Callable

from langchain.agents import create_agent
from langchain.tools import tool

from langchain_ollama import ChatOllama

from langchain_mcp_adapters.client import MultiServerMCPClient  
from langchain.agents import create_agent

import asyncio

@wrap_tool_call
def monitor_tool(
    request: ToolCallRequest,
    handler: Callable[[ToolCallRequest], ToolMessage | Command],
) -> ToolMessage | Command:
    print(f"[monitor_tool] tool name: {request.tool_call['name']}")
    print(f"[monitor_tool] arguments: {request.tool_call['args']}")
    try:
        result = handler(request)
        print(f"[monitor_tool] Tool result: {result}")
        return result
    except Exception as e:
        print(f"Tool failed: {e}")
        raise

@tool("web_search")  # Custom name
def search(query: str) -> str:
    """Search the web for information."""
    return f"Results for: Trump will be here on Friday. He will give a speech at 3 PM."

@tool("get_weather")
def get_weather(location: str) -> str:
    """Get weather information for a location."""
    return f"Weather in {location}: Sunny, 72°F"

async def run_agent():
    client = MultiServerMCPClient(  
        {
            "weather": {
                # "transport": "streamable_http",
                # "url": "http://localhost:8000/mcp",
                "transport": "sse",
                "url": "https://u-wanghh2000-mcp-weather-tool-s8.space.opencsg.com/sse",
            }
        }
    )

    tools = await client.get_tools()  

    llm = ChatOllama(
        model="qwen3:8b",
        temperature=0.5,
    )

    agent = create_agent(
        model=llm,
        # tools=[search],
        tools=tools,
        middleware=[monitor_tool],
    )
    
    # user_input = HumanMessage("What is the schedule for Trump's visit?")
    user_input = HumanMessage("西安今天天气怎么样?")

    result = await agent.ainvoke(
        input={"messages": [user_input]},
        context={"user_role": "expert"}
    )

    print("#" * 30)
    messages = result["messages"]

    for message in messages:
        print(message)

asyncio.run(run_agent())
