from typing import Annotated, List, Optional
from typing_extensions import TypedDict
from langgraph.graph import StateGraph, END
from langgraph.graph.message import add_messages
from langgraph.prebuilt import ToolNode, tools_condition
from langchain_core.messages import SystemMessage
# from agent.llm.deepseek import model
from agent.llm.qwen import model
from agent.tools.weather import get_weather
from agent.tools.calculate import calculate
from agent.prompt.system import SYSTEM_PROMPT_TEMPLATE


# 状态定义
class State(TypedDict):
    """对话状态，包含消息历史"""
    messages: Annotated[list, add_messages]


class LangGraphChatbot:
    """基于 LangGraph 的智能聊天机器人
    
    特性:
    - 支持多轮对话
    - 工具调用（天气查询、计算器等）
    - 状态持久化（可选）
    """
    
    def __init__(self, tools: Optional[List] = None, checkpointer=None):
        """初始化聊天机器人
        
        Args:
            tools: 工具列表，默认包含 calculate 和 get_weather
            checkpointer: 检查点存储器，用于持久化对话状态
        """
        self.llm = model
        self.prompt_template = SYSTEM_PROMPT_TEMPLATE
        
        # 初始化工具
        self.tools = tools or [calculate, get_weather]
        self.llm_with_tools = self.llm.bind_tools(self.tools)
        
        # 预构建系统消息
        self._system_message = self._build_system_message()
        
        # 构建图
        self.checkpointer = checkpointer
        self.graph = self._build_graph()
    
    def _build_system_message(self) -> SystemMessage:
        """构建系统提示消息"""
        tools_info = "\n".join([
            f"- {tool.name}: {tool.description}" 
            for tool in self.tools
        ])
        content = self.prompt_template.format(tools_info=tools_info)
        return SystemMessage(content=content)
    
    def _chatbot_node(self, state: State) -> dict:
        """聊天机器人节点 - 处理用户消息并生成响应"""
        # 构建完整消息列表（系统提示 + 历史消息）
        messages = [self._system_message] + state["messages"]
        
        # 调用 LLM
        response = self.llm_with_tools.invoke(messages)
        
        return {"messages": [response]}
    
    def _build_graph(self) -> StateGraph:
        """构建 LangGraph 状态图"""
        # 创建工具节点
        tool_node = ToolNode(tools=self.tools)
        
        # 创建图构建器
        builder = StateGraph(State)
        
        # 添加节点
        builder.add_node("chatbot", self._chatbot_node)
        builder.add_node("tools", tool_node)
        
        # 添加条件边：根据是否需要调用工具来决定流向
        builder.add_conditional_edges(
            "chatbot",
            tools_condition,  # 自动判断是否需要调用工具
        )
        
        # 工具执行后返回聊天机器人节点
        builder.add_edge("tools", "chatbot")
        
        # 设置入口点
        builder.set_entry_point("chatbot")
        
        # 编译图
        return builder.compile(checkpointer=self.checkpointer)
    
    def chat(self, message: str, thread_id: str = "default") -> str:
        """发送消息并获取响应
        
        Args:
            message: 用户消息
            thread_id: 线程ID，用于区分不同对话会话
            
        Returns:
            机器人的响应内容
        """
        config = {"configurable": {"thread_id": thread_id}}
        
        # 流式调用图
        events = self.graph.stream(
            {"messages": [("user", message)]},
            config,
            stream_mode="values"
        )
        
        # 获取最后一个事件的最后一条消息
        for event in events:
            last_message = event["messages"][-1]
        
        return last_message.content
    
    def stream_chat(self, message: str, thread_id: str = "default"):
        """流式对话（生成器）
        
        Args:
            message: 用户消息
            thread_id: 线程ID
            
        Yields:
            每个状态更新的消息
        """
        config = {"configurable": {"thread_id": thread_id}}
        
        for event in self.graph.stream(
            {"messages": [("user", message)]},
            config,
            stream_mode="values"
        ):
            yield event["messages"][-1]
    
    def get_history(self, thread_id: str = "default") -> List:
        """获取对话历史
        
        Args:
            thread_id: 线程ID
            
        Returns:
            消息历史列表
        """
        if not self.checkpointer:
            return []
        
        config = {"configurable": {"thread_id": thread_id}}
        state = self.graph.get_state(config)
        return state.values.get("messages", [])
    
    def clear_history(self, thread_id: str = "default"):
        """清除对话历史
        
        Args:
            thread_id: 线程ID
        """
        if self.checkpointer:
            config = {"configurable": {"thread_id": thread_id}}
            self.graph.update_state(config, {"messages": []})


