from langchain_ollama import ChatOllama
from langgraph.checkpoint.memory import InMemorySaver
from langgraph.prebuilt import create_react_agent
from langgraph.types import interrupt, Command


# Define the graph state
# An example of a sensitive tool that requires human review / approval
def book_hotel(hotel_name: str):
    """Book a hotel"""
    response = interrupt(

        f"Trying to call `book_hotel` with args {{'hotel_name': {hotel_name}}}. "
        "Please approve or suggest edits."
    )
    if response["type"] == "accept":
        pass
    elif response["type"] == "edit":
        hotel_name = response["args"]["hotel_name"]
    else:
        raise ValueError(f"Unknown response type: {response['type']}")
    return f"Successfully booked a stay at {hotel_name}."


if __name__ == '__main__':
    # Build the graph
    checkpointer = InMemorySaver()
    model = ChatOllama(model="qwen3:30b", base_url="http://192.168.97.217:11434")
    agent = create_react_agent(
        model=model,
        tools=[book_hotel],
        checkpointer=checkpointer)

    config = {
        "configurable": {
            "thread_id": "1"
        }
    }

    for chunk in agent.stream(
            {"messages": [{"role": "user", "content": "book a stay at McKittrick hotel"}]},
            config
    ):
        print(chunk)
        print("\n")
    for chunk in agent.stream(
        Command(resume={"type": "accept"}),


        # Command(resume={"type": "edit", "args": {"hotel_name": "McKittrick Hotel"}}),
        config
    ):
        print(chunk)
        print("\n")