from langgraph.graph import MessagesState
from typing import Any, Dict
from langgraph.graph import StateGraph
from langchain_ollama import ChatOllama
from langchain_core.messages import AIMessage
from langchain_core.tools import tool
from langgraph.prebuilt import ToolNode
from langgraph.graph import START, END

# llm = ChatOllama(model="qwen3:8b", temperature=0.5, reasoning=False, base_url="http://192.168.0.101:11434/")
llm = ChatOllama(model="qwen3:8b", temperature=0.5, reasoning=False, base_url="http://127.0.0.1:11434/")

@tool
def get_weather(city: str) -> str:
    """Get weather for a given city."""
    print(f"Getting weather for {city}")
    return f"It's always raining in {city}!"

tools = [get_weather]

tool_node = ToolNode(tools)

llm_with_tools = llm.bind_tools(tools)

def call_llm(state: MessagesState) -> Dict[str, Any]:
    """Process input and returns output.

    Can use runtime configuration to alter behavior.
    """
    # response = llm.invoke(state["messages"])
    response = llm_with_tools.invoke(state["messages"])
    return {"messages": [response]}

def should_continue(state: MessagesState):
    messages = state["messages"]
    last_message = messages[-1] # 获取列表最后一个元素的写法
    if last_message.tool_calls:
        return "tools"
    return END

chatbotGraph = (
    StateGraph(MessagesState)
    .add_node(call_llm)
    .add_node("tools", tool_node)
    .add_edge(START, "call_llm")
    .add_conditional_edges("call_llm", should_continue, ["tools", END])
    .add_edge("tools", "call_llm")
    .compile(name="cbg")
)