'''
自定义图推理
基于langgraph

'''

import keys # 导入
import traceback
# from typing import Annotated
from typing import Any, Literal, Union, Annotated, Callable
from typing_extensions import TypedDict
from langgraph.graph import StateGraph
from langgraph.graph.message import add_messages
from langchain_core.messages import ToolMessage

class CustomState(TypedDict): # 字典
    messages: Annotated[list, add_messages]
    
from langchain_core.language_models.chat_models import BaseChatModel
# tools
from lc_tools import get_tool_TavilySearchResults, get_tool_open_pycharm, get_lc_rag_tool
tool_TavilySearchResults = get_tool_TavilySearchResults()
tool_open_pycharm = get_tool_open_pycharm()
lc_rag_tool = get_lc_rag_tool()
# tools = [tool_TavilySearchResults, tool_open_pycharm, lc_rag_tool]
# tools = [lc_rag_tool]
tools_dict = {
    'lc_rag_tool': lc_rag_tool,
    'tool_TavilySearchResults': tool_TavilySearchResults,
    'tool_open_pycharm':tool_open_pycharm
}
def get_custom_graph_infer(model_path: Union[str, Callable] ,is_stream=False, is_custom_llm=True,toolnames=['tool_TavilySearchResults']):
    # llm client Custom_qwen
    if is_custom_llm:
        from lc_infer import Custom_Langchain_ChatLLM
        # model_path = r'/home/ps/zhangxiancai/llm_deploy/bigfiles/models/Qwen2.5-14B-Instruct'
        llm = Custom_Langchain_ChatLLM(mode_name_or_path=model_path)  # qwen-14b
    else:
        from langchain_community.chat_models.tongyi import ChatTongyi
        llm = ChatTongyi(streaming=True) # 远程的
    # tools
    # from lc_tools import get_tool_TavilySearchResults, get_tool_open_pycharm,get_lc_rag_tool
    # tool_TavilySearchResults = get_tool_TavilySearchResults()
    # tool_open_pycharm = get_tool_open_pycharm()
    # lc_rag_tool = get_lc_rag_tool()
    # # tools = [tool_TavilySearchResults, tool_open_pycharm, lc_rag_tool]
    # tools = [lc_rag_tool]
    tools = []
    if toolnames:
        for name in toolnames:
            tools.append(tools_dict[name])

    # 定义 T 节点, 1节点对应n个invoke
    llm_with_tools = llm.bind_tools(tools)  # tool_schame作为参数传递给llm
    def chatbot(state: CustomState):
        print('chatbot')
        print(state)
        message = llm_with_tools.invoke(state["messages"])
        print('chatbot')
        print(message)
        assert len(message.tool_calls) <= 1 # 1个
        return {"messages": [message]}

    # 定义 A 节点
    import json
    from langchain_core.messages import ToolMessage
    class BasicToolNode:  #
        """A node that runs the tools requested in the last AIMessage."""
        '''接收最后一个message,其中的执行tool_calls'''

        def __init__(self, tools: list) -> None:
            self.tools_by_name = {tool.name: tool for tool in tools}

        def __call__(self, inputs: dict):
            if messages := inputs.get("messages", []):
                message = messages[-1]
            else:
                raise ValueError("No message found in input")
            outputs = []
            for tool_call in message.tool_calls:  #
                tool_result = self.tools_by_name[tool_call["name"]].invoke(
                    tool_call["args"]
                )  # messages[-1].tool_calls
                outputs.append(
                    ToolMessage(
                        # content=json.dumps(tool_result, ensure_ascii=False).encode('utf-8'), #
                        content=tool_result,
                        name=tool_call["name"],
                        tool_call_id=tool_call["id"],
                    )
                )
            return {"messages": outputs}

    tool_node = BasicToolNode(tools=tools)  # tools包装成node

    # 有条件边: T之后,根据结果确定执行tool还是end
    def route_tools(
            state: CustomState,
    ):
        """
        Use in the conditional_edge to route to the ToolNode if the last message
        has tool calls. Otherwise, route to the end.
        """
        if isinstance(state, list):
            ai_message = state[-1]
        elif messages := state.get("messages", []):
            ai_message = messages[-1]
        else:
            raise ValueError(f"No messages found in input state to tool_edge: {state}")
        if hasattr(ai_message, "tool_calls") and len(ai_message.tool_calls) > 0:
            return "tools"
        return END

    # 构建图

    from langgraph.graph import START, END
    from langgraph.checkpoint.memory import MemorySaver
    graph_builder = StateGraph(CustomState)
    graph_builder.add_node("chatbot", chatbot)  # T
    graph_builder.set_entry_point("chatbot")
    graph_builder.set_finish_point("chatbot")

    graph_builder.add_node("tools", tool_node)  # A
    graph_builder.add_conditional_edges(
        "chatbot",  # 入node
        route_tools,
        {"tools": "tools", END: END},  # 出 node
    )  # 边

    # Any time a tool is called, we return to the chatbot to decide the next step
    graph_builder.add_edge("tools", "chatbot")
    graph_builder.add_edge(START, "chatbot")

    memory = MemorySaver()  # 也可以是SqliteSaver 用于exe_id
    graph = graph_builder.compile(checkpointer=memory)
    if is_stream:
        return graph.stream # todo
    else:
        return graph.invoke


def test_graph():
    model_path = r'/home/ps/zhangxiancai/llm_deploy/bigfiles/models/Qwen2.5-14B-Instruct'
    graph_infer = get_custom_graph_infer(model_path, is_stream=True, is_custom_llm=False)
    exe_id = {"configurable": {"thread_id": "1"}}  # 用于多次graph时保存值; 节点的输入输出会追加到下一次输入上
    res = graph_infer({"messages": [{"role": "user", "content": '上海新闻'}]}, exe_id, stream_mode='messages')
    # res = graph_infer({"messages": [{HumanMessage(content="你好")}]}, exe_id)
    # res = graph_infer({"messages": [{"role": "user", "content": '打开pycharm'}]}, exe_id)
    # res = graph_infer({"messages": [{"role": "user", "content": '查询上海新闻今天'}]}, exe_id)

    if isinstance(res,str):
        print(res)
    else:
        # events = res
        # for eind, event in enumerate(events):
        #     print(f'event {eind}')
        #     for vind, value in enumerate(event.values()): # AddableUpdatesDict 单个节点输出
        #         # print(f'value{vind} {value}')
        #         print("Assistant:", value["messages"][-1].pretty_print())

        # for message_chunk, metadata in res: # stream_mode='messages'
        #     if message_chunk.content:
        #         print(message_chunk.content, end="|", flush=True)
        for message_chunk, metadata in res:  # 生成器 # stream_mode='messages'; AIMessageChunk/ToolMessage, {}
            if isinstance(message_chunk, ToolMessage):  # .content  [{},{}]
                continue
            if message_chunk.content:  # str
                print(message_chunk.content + ' xxxxx ')

if __name__ == '__main__':
    test_graph()