import uuid

from langgraph.store.memory import InMemoryStore
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph.message import add_messages
from langgraph.graph import StateGraph,MessagesState,START,END
from langgraph.store.base import BaseStore

from langchain_openai import ChatOpenAI
from langchain_core.runnables import RunnableConfig

from typing import TypedDict,Annotated

import asyncio



in_memory_store = InMemoryStore()
memory = MemorySaver()

api_key = "sk-6S0PtpNia71gjcfwSsDPsJ9mGqsVPr2XRQzAx1dHbJS7RW4t"
api_base="https://chatapi.littlewheat.com/v1"

llm = ChatOpenAI(model="gpt-4o",api_key=api_key,base_url=api_base)

# 定义状态模式
class State(TypedDict):
    messages:Annotated[list,add_messages]

# 定义对话节点， 访问记忆并在模型调用中使用它们。
def call_mode(state:MessagesState,config:RunnableConfig,*,store:BaseStore):
    # 获取用户id
    user_id = config["configurable"]["user_id"]

    # 定义命名空间
    namespace = ("memories",user_id)

    # 根据用户id检索记忆
    memories = store.search(namespace)
    info = "\n".join([d.value["data"] for d in memories])

    # 存储记忆
    last_messasge = state["messages"][-1]
    store.put(namespace,str(uuid.uuid4()),{"data":last_messasge.content})

    system_msg = f"Answer the user's question in context: {info}"

    response = llm.invoke([{"type":"system","content":system_msg}]+ state["messages"])

    # 存储记忆
    store.put(namespace,str(uuid.uuid4()),{"data":response.content})

    return {"messages":response}

# 构建状态图
builder = StateGraph(State)

# 向图中添加节点
builder.add_node("call_mode",call_mode)

# 构建边
builder.add_edge(START,"call_mode")
builder.add_edge("call_mode",END)

# 编译图
graph = builder.compile(checkpointer=memory,store=in_memory_store)

async def ask_who(graph):
    config = {"configurable":{"thread_id":"555"}, "user_id": "8"}

    async for chunk in graph.astream({"messages":["你好，我是西瓜老师"]},config,stream_mode="values"):
        chunk["messages"][-1].pretty_print()

    async for chunk in graph.astream({"messages": ["你知道我叫什么吗？"]}, config, stream_mode="values"):
        chunk["messages"][-1].pretty_print()


asyncio.run(ask_who(graph))

async def find_who(graph):
    config = {"configurable": {"thread_id": "555"}, "user_id": "8"}

    async for chunk in graph.astream({"messages": ["你知道我叫什么吗？"]}, config, stream_mode="values"):
        chunk["messages"][-1].pretty_print()

asyncio.run(find_who(graph))

async def is_who(graph):
    config = {"configurable": {"thread_id": "555"}, "user_id": "6"}

    async for chunk in graph.astream({"messages": ["你知道我叫什么吗？"]}, config, stream_mode="values"):
        chunk["messages"][-1].pretty_print()


for memory in in_memory_store.search(("memories", "8")):
    print(memory.value)

for memory in in_memory_store.search(("memories", "6")):
    print(memory.value)

