from typing import Annotated
from langgraph.graph.message import MessagesState
from langgraph.types import RunnableConfig
from langgraph.store.base import BaseStore
from langchain_openai import ChatOpenAI
from langchain_core.messages import HumanMessage, SystemMessage
import uuid

from langgraph_memory.src.config.config import Settings


def update_memory(state: MessagesState, config: RunnableConfig, *, store: BaseStore):
    """从对话中提取记忆并存入 store"""
    user_id = config["configurable"]["user_id"]
    namespace = (user_id, "memories")

    # 手动合并上下文来进行长期记忆存储/用异步的方式来调用模型总结后进行存储
    last_msg = state["messages"][-1]
    memory_text = last_msg.content
    memory_id = str(uuid.uuid4())
    store.put(namespace, memory_id, {"memory": memory_text})
    print(f"✅ Memory stored: {memory_text[:50]}...")
    # if last_msg.role == "user":
    #     memory_text = last_msg.content
    #     memory_id = str(uuid.uuid4())
    #     store.put(namespace, memory_id, {"memory": memory_text})
    #     print(f"✅ Memory stored: {memory_text[:50]}...")
    # else:
    #     memory_text = last_msg.content
    #     memory_id = str(uuid.uuid4())
    #     store.put(namespace, memory_id, {"memory": memory_text})
    #     print(f"✅ Memory stored: {memory_text[:50]}...")

    return state

def call_model(state: MessagesState, config: RunnableConfig, *, store: BaseStore):
    """调用模型，注入检索到的记忆"""

    user_id = config["configurable"]["user_id"]
    namespace = (user_id, "memories")

    # 语义检索记忆
    last_msg = state["messages"][-1].content
    memories = store.search(namespace, query=last_msg, limit=3)
    memory_context = "\n".join([m.value["memory"] for m in memories if "memory" in m.value])

    # 构造提示词
    system_prompt = f"""
    You are a helpful assistant. Here are some memories about the user:
    {memory_context}
    """
    messages = [SystemMessage(content=system_prompt)] + state["messages"]

    # 调用模型
    model = ChatOpenAI(model=Settings.LLM_NAME, base_url=Settings.LLM_URL)
    response = model.invoke(messages)

    # 返回更新状态
    return {"messages": [response]}