from src.module.TongyiModel import TongyiModel

from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import START, END, MessagesState, StateGraph
from langgraph.prebuilt import create_react_agent, ToolNode
from langchain.tools import tool

class Tools:
    @staticmethod
    @tool
    def search_weather(address: str):
        """ 查询一个地址的天气信息

        Args:
            address: 地址
        """
        return '晴朗' if address == '深圳' else '多云'

    @staticmethod
    @tool
    def search_road(address: str):
        """ 查询一个地址的交通情况

        Args:
            address: 地址
        """
        return '通畅' if address == '深圳' else '拥堵'

class LangGraphAgent:
    def __init__(self):
        self.model = TongyiModel().model
        tools = [Tools().search_weather, Tools().search_road]
        self.tools_node = ToolNode(tools)
        self.model = TongyiModel().model.bind_tools(tools)



    @staticmethod
    def conditional(state: MessagesState):
        messages = state['messages'][-1]
        if messages.tool_calls:
            return 'tools'
        else:
            return END

    # 定义模型节点
    def llm_invoke(self, state: MessagesState):
        response = self.model.invoke(state['messages'])

        print('----------------->', response)
        return { 'messages': response }

    def start(self):
        graph = StateGraph(MessagesState)
        graph.add_node('chatbot', self.llm_invoke) # 模型节点
        graph.add_node('tools', self.tools_node) #工具节点

        graph.add_edge(START, 'chatbot')
        graph.add_conditional_edges('chatbot', self.conditional)
        graph.add_edge('tools', 'chatbot')
        graph.add_edge('chatbot', END)

        saver = MemorySaver()
        llm = graph.compile(checkpointer=saver)
        response = llm.invoke({ 'messages': '我目前在深圳，我今天出去转转，有什么需要注意的吗?' }, config={ 'configurable': { 'thread_id': 123 } })
        print(response)

