# agent.py
from typing import TypedDict, Annotated, Sequence
import operator
from langchain_core.messages import BaseMessage, HumanMessage, ToolMessage
from langchain_community.chat_models.tongyi import ChatTongyi
from langchain_core.tools import tool
from langgraph.graph import StateGraph, END
from langgraph.prebuilt import ToolNode

# ----------------------------
# 1. 定义工具（模拟天气查询）
# ----------------------------
@tool
def get_weather(location: str) -> str:
    """获取指定城市的天气（模拟）"""
    return f"{location} 的天气是晴朗，25°C。"

tools = [get_weather]
tool_node = ToolNode(tools)

# ----------------------------
# 2. 定义状态（State）
# ----------------------------
class AgentState(TypedDict):
    messages: Annotated[Sequence[BaseMessage], operator.add]

# ----------------------------
# 3. 定义 LLM 和绑定工具
# ----------------------------
llm = ChatTongyi(model="qwen-plus", temperature=0.1)
llm_with_tools = llm.bind_tools(tools)

# ----------------------------
# 4. 定义节点函数
# ----------------------------
def call_model(state: AgentState):
    messages = state["messages"]
    response = llm_with_tools.invoke(messages)
    return {"messages": [response]}

def should_continue(state: AgentState):
    messages = state["messages"]
    last_message = messages[-1]
    if last_message.tool_calls:
        return "tools"
    return END

# ----------------------------
# 5. 构建图（Graph）
# ----------------------------
workflow = StateGraph(AgentState)

workflow.add_node("agent", call_model)
workflow.add_node("tools", tool_node)

workflow.set_entry_point("agent")
workflow.add_conditional_edges(
    "agent",
    should_continue,
    {
        "tools": "tools",
        END: END,
    }
)
workflow.add_edge("tools", "agent")

# 编译为可调用的 app
graph = workflow.compile()