from langchain_community.chat_message_histories import ChatMessageHistory
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_core.messages import HumanMessage
from langchain_core.runnables import RunnableParallel, ConfigurableFieldSpec
from langchain_core.runnables import RunnableWithMessageHistory
from langchain_ollama import ChatOllama

store = {}


def get_session_history(user_id: str, conversation_id: str) -> BaseChatMessageHistory:
    if (user_id, conversation_id) not in store:
        store[(user_id, conversation_id)] = ChatMessageHistory()
    return store[(user_id, conversation_id)]

model = ChatOllama(
    model="llama3",
    temperature=0.5,
)

runnable = RunnableParallel({"output_message":  model})


# 构建自动记录聊天消息的RunnableWithMessageHistory对象
with_message_history = RunnableWithMessageHistory(
    runnable,
    get_session_history,
    output_messages_key="output_message",
    history_factory_config=[
        ConfigurableFieldSpec(
            id="user_id",
            annotation=str,
            name="用户 ID",
            description="用户的唯一标识符。",
            default="",
            is_shared=True,
        ),
        ConfigurableFieldSpec(
            id="conversation_id",
            annotation=str,
            name="对话 ID",
            description="对话的唯一标识符。",
            default="",
            is_shared=True,
        ),
    ],
)


# 问1
print(with_message_history.invoke(
    [HumanMessage(content="Simone de Beauvoir 对自由意志的看法是什么？请用中文回复")],
    config={"configurable": {"user_id": "123", "conversation_id": "baz"}},
))

# 问2
print(with_message_history.invoke(
    [HumanMessage(content="这与萨特的观点有何不同?请用中文回复")],
    config={"configurable": {"user_id": "123", "conversation_id": "baz"}},
))
