import os
os.environ["DEEPSEEK_API_KEY"] = "sk-1a1c5c7e74664f97b9ae47a8e93f2620"
os.environ["TAVILY_API_KEY"] = "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"

from typing import Annotated
from PIL import Image as PILImage
import io
import json

from langchain_deepseek import ChatDeepSeek
from langchain_tavily import TavilySearch
from langchain_core.messages import BaseMessage
from typing_extensions import TypedDict

from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import StateGraph
from langgraph.graph.message import add_messages
from langgraph.prebuilt import ToolNode, tools_condition
from langgraph.graph import END, START


#  定义图的第一步是定义它的 State。State 包括图的架构和处理状态更新的 reducer 函数。
# 在我们的示例中，State 是一个 TypedDict，其中有一个键：messages。add_messages reducer 函数用于将新消息附加到列表中，而不是覆盖它。没有 reducer 注释的键将覆盖先前的值。
class State(TypedDict):
    # Messages have the type "list". The `add_messages` function
    # 在注释中定义了该状态键应如何更新。
    # （在这种情况下，它将消息附加到列表中，而不是覆盖它们）
    messages: Annotated[list, add_messages]


def chatbot(state: State):
    return {"messages": [llm_with_tools.invoke(state["messages"])]}


def route_tools(
    state: State,
):
    """
    在conditional_edge中使用以便在最后一条消息有工具调用时路由到ToolNode。否则，路由到结束。
    """
    if isinstance(state, list):
        ai_message = state[-1]
    elif messages := state.get("messages", []):
        ai_message = messages[-1]
    else:
        raise ValueError(f"No messages found in input state to tool_edge: {state}")
    if hasattr(ai_message, "tool_calls") and len(ai_message.tool_calls) > 0:
        return "tools"
    return END


def format_search_results(content):
    """格式化搜索结果为更友好的显示"""
    try:
        # 尝试解析JSON
        data = json.loads(content)
        if "results" in data:
            print("Assistant:🔍 搜索到以下相关信息：")
            for i, result in enumerate(data["results"], 1):
                print(f"  {i}. {result.get('title', '无标题')}")
                print(f"     链接: {result.get('url', '无链接')}")
                print(f"     内容: {result.get('content', '无内容')[:200]}...")
                print()
            return True
    except:
        pass
    return False


def stream_graph_updates(user_input: str):
    config = {"configurable": {"thread_id": "1"}}
    final_response = None
    
    # 收集所有事件，但只保存最终结果
    for event in graph.stream({"messages": [("user", user_input)]}, config=config):
        for value in event.values():
            if "messages" in value and value["messages"]:
                last_message = value["messages"][-1]
                if hasattr(last_message, 'content') and last_message.content:
                    content = last_message.content.strip()
                    
                    # 跳过空内容
                    if not content:
                        continue
                    
                    # 检查是否是搜索结果的JSON数据
                    if content.startswith('{') and '"results"' in content:
                        format_search_results(content)
                    else:
                        # 保存AI的最终回答
                        final_response = content
    
    # 只显示最终回答
    if final_response and final_response.strip():
        print("Assistant:", final_response)
    else:
        print("Assistant: 抱歉，我无法提供相关信息。")


def simple_chatbot():
    while True:
        try:
            user_input = input("User: ")
            if user_input.lower() in ["quit", "exit", "q"]:
                print("Goodbye!")
                break

            stream_graph_updates(user_input)
        except:
            # 如果 input() 不可用，则备选方案。
            user_input = "What do you know about LangGraph?"
            print("User: " + user_input)
            stream_graph_updates(user_input)
            break
    

if __name__ == "__main__":
    graph_builder = StateGraph(State)
    tool = TavilySearch(max_results=2)
    tools = [tool]
    llm =  ChatDeepSeek(model="deepseek-chat", temperature=1.0)
    llm_with_tools = llm.bind_tools(tools)

    graph_builder.add_node("chatbot", chatbot)

    tool_node = ToolNode(tools=[tool])
    graph_builder.add_node("tools", tool_node)

    graph_builder.add_conditional_edges(
        "chatbot",
        route_tools,
    )
    graph_builder.add_edge("tools", "chatbot")
    graph_builder.add_edge(START, "chatbot")

    memory = MemorySaver()
    graph = graph_builder.compile(
        checkpointer=memory
    )

    # png_data = graph.get_graph().draw_mermaid_png()
    # img = PILImage.open(io.BytesIO(png_data))
    # img.show()
    # 保存图像
    # img.save("chatbot_graph.png")
    simple_chatbot()