from langchain_core.messages import HumanMessage
from langgraph.graph import StateGraph
from langgraph.graph import END
from langgraph.graph import MessagesState

from langgraph.prebuilt import ToolNode
from langgraph.checkpoint.memory import MemorySaver

from model import get_model
from tools import get_weather, get_goods, save_goods

tools = [get_weather, get_goods, save_goods]
tool_node = ToolNode(tools)

model = get_model()
model = model.bind_tools(tools)


def agent_node(state: MessagesState):
    messages = state['messages']
    response = model.invoke(messages)
    return {"messages": [response]}


def should_continue(state: MessagesState):
    messages = state['messages']
    last_messages = messages[-1]
    if last_messages.tool_calls:
        return "tools"
    return END


workflow = StateGraph(MessagesState)
workflow.add_node("agent", agent_node)
workflow.add_node("tools", tool_node)
workflow.set_entry_point("agent")

workflow.add_edge("tools", "agent")
workflow.add_conditional_edges(
    "agent",
    should_continue
)

app = workflow.compile(checkpointer=MemorySaver())

while True:
    input_message = input("You:")
    if input_message == "q":
        break
    final_state = app.invoke(
        {"messages": [HumanMessage(content=input_message)]},
        config={"configurable": {"thread_id": 42}}
    )
    result = final_state["messages"][-1].content
    print("Bot:", result)
