import os

from langchain_core.chat_history import InMemoryChatMessageHistory
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import RunnableWithMessageHistory
from langchain_openai import ChatOpenAI

os.environ["DASHSCOPE_API_KEY"] = "sk-***"
api_key = os.getenv("DASHSCOPE_API_KEY")
if not api_key:
    raise ValueError("DASHSCOPE_API_KEY environment variable not set!")

llm = ChatOpenAI(model='qwen-plus',
                 api_key=os.getenv("DASHSCOPE_API_KEY"),
                 base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
                 temperature=0.1)

prompt = ChatPromptTemplate.from_messages([
    ("system", "你是一个乐于助人的助手，尽你所能回答所有问题，提供的聊天历史包含与你对话用户的相关信息"),
    MessagesPlaceholder(variable_name="chat_history", optional=True),  # 运行时填充实际消息
    # ("placeholder","chat_history"),
    ("user", "{input}")
])

chain = prompt | llm  # 基础链

# 聊天记录存在关系型数据库，或者redis

store = {}  # 用户聊天记录存储，key一个会话存一份


def get_session_history(session_id: str):
    """从内存中里历史消息列表中返回当前会话的所有历史消息"""
    if session_id not in store:
        store[session_id] = InMemoryChatMessageHistory()  # InMemoryChatMessageHistory()返回某一个会话列表
    return store[session_id]


chat_with_message_history = RunnableWithMessageHistory(
    chain,
    get_session_history,
    input_messages_key='input',
    history_messages_key='chat_history',
)

result = chat_with_message_history.invoke({'input': '你好，我是lp'}, config={"configurable": {"session_id": "user123"}})
print(result)

result1 = chat_with_message_history.invoke({'input': '我的名字叫什么'}, config={"configurable": {"session_id": "user123"}})
print(result1)

