import os

import dotenv
from langchain_core.runnables import RunnableConfig
from langgraph.checkpoint.memory import InMemorySaver
from langgraph.prebuilt import create_react_agent
from langchain_openai import ChatOpenAI

dotenv.load_dotenv()

# 检查点
checkpointer = InMemorySaver()


# 定义工具
def get_weather_tool(city: str) -> str:
    """获取指定城市的天气"""
    return f"{city}今天天气晴朗,温度25°"


# 1 构建LLM
llm = ChatOpenAI(
    model="deepseek-chat",
    base_url=os.getenv("DS_BASE"),
    api_key=os.getenv("DS_API_KEY"),
    temperature=0
).bind_tools([get_weather_tool])

# 2 agent
agent = create_react_agent(
    model=llm,
    tools=[get_weather_tool],
    checkpointer=checkpointer,
)

# 配置thread_id
config: RunnableConfig = {
    "configurable": {
        "thread_id": "1"
    }
}

xa_resp = agent.invoke(
    {"messages": [{"role": "user", "content": "西安天气怎么样?"}]},
    config=config,
)

print(xa_resp)

sz_resp = agent.invoke(
    {"messages": [{"role": "user", "content": "苏州呢?"}]},
    config=config,
)

print(sz_resp)
