from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_community.llms import Tongyi
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough, RunnableLambda
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain_community.chat_message_histories import ChatMessageHistory

llm = Tongyi()
prompt = ChatPromptTemplate.from_messages([
    ("system", "你好，我是一个万能的聊天机器人。"),
    MessagesPlaceholder(variable_name='history'),  # 历史消息占位符
    ("human", "{input}")
])

chain = (prompt | llm)

memory = ChatMessageHistory()


def get_memory():
    return memory


runnable_history = RunnableWithMessageHistory(
    chain,
    get_memory,
    input_messages_key="input",  # 输入消息的key
    history_messages_key="history",  # 历史消息的key
)
res = runnable_history.invoke({"input": "你好，我是你大哥。"})
print(res)

res = runnable_history.invoke({"input": "你好，请问我是谁？"})
print(res)