from langchain.tools import tool, ToolRuntime
from langchain.agents import create_agent
from model.Ark import ArkModel
from langgraph.checkpoint.memory import InMemorySaver

def base_stream():
    model = ArkModel().model

    @tool
    def get_weather(add: str, runtime:ToolRuntime):
        """ 获取一个地址的天气信息

            :Args
                add: 地址名称
        """
        return '晴天'

    agent = create_agent(
        model=model,
        tools=[get_weather],
        checkpointer=InMemorySaver()
    )

    ## 一个节点一个节点的输出
    # for chunk in agent.stream({ "messages": [{ 'role': 'user', 'content': '今天深圳的天气怎么样?' }] },
    #                           { 'configurable': { 'thread_id': '123123' } },
    #                           stream_mode="updates"
    #                           ):
    #     for step, data in chunk.items():
    #         print(f"step: {step}")
    #         print(f"content: {data['messages'][-1].content_blocks}")

    ## 真正的流式输出、整体的流程都会流式响应输出
    # for token, metadata in agent.stream({ "messages": [{ 'role': 'user', 'content': '今天深圳的天气怎么样?' }] },
    #                           { 'configurable': { 'thread_id': '123123' } },
    #                           stream_mode="messages"
    #                           ):
    #     print(f"node: {metadata['langgraph_node']}", token.content_blocks, flush=True)

def run():
    base_stream()