# 首先我们初始化我们想要使用的模型。
from langchain_core.messages import HumanMessage, AIMessage, ToolMessage
from langchain_openai import ChatOpenAI

# 使用 gpt-4o 模型，设定温度为 0（温度控制生成内容的随机性，0 表示确定性输出）
from config.model_config import get_chat_openai_zhipu_flash_250414
model = get_chat_openai_zhipu_flash_250414(temperature=0)

# 对于本教程，我们将使用一个自定义工具，该工具返回两个城市（纽约和旧金山）的预定义天气值
from typing import Literal
from langchain_core.tools import tool


# 定义一个工具函数 get_weather，它根据城市名称返回预定义的天气信息
@tool
def get_weather(city: Literal["nyc", "sf"]):
    """Use this to get weather information."""
    if city == "nyc":
        return "It might be cloudy in nyc"
    elif city == "sf":
        return "It's always sunny in sf"
    else:
        raise AssertionError("Unknown city")


# 将工具放入一个列表中
tools = [get_weather]

# 导入 create_react_agent 函数，用于创建 REACT 代理
from langgraph.prebuilt import create_react_agent
from langgraph.checkpoint.memory import MemorySaver

memory = MemorySaver()
# 使用指定的模型和工具创建 REACT 代理
graph = create_react_agent(model, tools=tools, checkpointer=memory)



# 定义一个函数用于打印流数据
def print_stream(stream):
    for message, meta in stream:
        print(type(message))
        print(message.content, end="", flush=True)


# 使用需要工具调用的输入运行应用程序
inputs = {"messages": [("user", "what is the weather in sf")]}
config = {"configurable": {"thread_id": "42"}}
for event in graph.stream(inputs,
                        config=config, stream_mode="values"):
    message = event['messages'][-1]
    # print(type(message), message)
    if (
            isinstance(message, AIMessage)
            and message.tool_calls
            and len(message.tool_calls) > 0
    ):
        print('Looking up the exchange rates..')
    elif isinstance(message, ToolMessage):
        print('Processing the exchange rates..')
    elif isinstance(message, AIMessage):
        print(message)

current_state = graph.get_state(config)
print(current_state)
structured_response = current_state.values.get('structured_response')
print(structured_response)
print('\n')
# 尝试一个不需要工具的问题
# inputs = {"messages": [("user", "who built you?")]}
# print_stream(graph.stream(inputs, stream_mode="values"))
