from langchain_community.chat_message_histories import RedisChatMessageHistory
from langchain_core.messages import trim_messages
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import RunnableConfig
from langchain_core.runnables.history import RunnableWithMessageHistory

from a0base.base_llm import pop_llm

# ------------- 提示模板 -------------
prompt = ChatPromptTemplate.from_messages([
    ("system", "You are a helpful assistant."),
    MessagesPlaceholder("history"),
    ("human", "{question}")
])

# ------------- 裁剪器 -------------
k = 3  # 保留最近 k 轮
trimmer = trim_messages(
    max_tokens=1_000_000,
    strategy="last",
    token_counter=lambda msgs: len(msgs),  # 按消息条数
    include_system=False,
    allow_partial=False,
)


# ------------- 取历史 + 裁剪 -------------
def get_trimmed_history(session_id: str):
    """
    1. 从 Redis 拉取历史
    2. 裁剪后写回（覆盖旧 key）
    3. 返回裁剪后的 RedisChatMessageHistory
    """
    hist = RedisChatMessageHistory(
        session_id=session_id,
        url='redis://localhost:6379/0',
        ttl=60 * 60 * 24  # 可选：24h 过期
    )
    # 取出完整历史 -> 裁剪 -> 清空 -> 写入新消息
    trimmed = trimmer.invoke(hist.messages[-k * 2:])
    hist.clear()
    hist.add_messages(trimmed)
    return hist


# ------------- 带记忆的链 -------------
chain = RunnableWithMessageHistory(
    prompt | pop_llm,
    get_trimmed_history,
    input_messages_key="question",
    history_messages_key="history"
)


# ------------- 聊天循环 -------------
def chat(user_id: str):
    config = RunnableConfig(configurable={"session_id": "session_" + user_id})
    while True:
        q = input(f"{user_id}> ")
        if q.lower() in {"exit", "quit"}:
            break
        ai = chain.invoke({"question": q}, config=config)
        print("AI:", ai)


if __name__ == "__main__":
    chat("alice")
