"""
让llm记住新对话，实现长期记忆
"""

from langchain_community.chat_models import ChatOpenAI, ChatOllama
from langchain.prompts import PromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate, MessagesPlaceholder
from langchain_community.chat_message_histories import RedisChatMessageHistory
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain.chains import LLMChain

ONEAPI_API_BASE = "http://localhost:3000/v1"
ONEAPI_CHAT_API_KEY = "sk-4Rqi5cIas3Rg5cwh59231eB0B82c42E7AdAe9a17CcE29591"
ONEAPI_CHAT_MODEL = "qwen-plus"
ONEAPI_EMBEDDING_API_KEY = "sk-4Rqi5cIas3Rg5cwh59231eB0B82c42E7AdAe9a17CcE29591"
ONEAPI_EMBEDDING_MODEL = "text-embedding-v1"
redis_url = "redis://127.0.0.1:6379/3"

llm = ChatOllama(model="qwen2", temperature=0.1, stream=True)

messages = [
    SystemMessagePromptTemplate.from_template("你是一位得力的ai助手，可以协助用户处理相关事情"),
    MessagesPlaceholder(variable_name="xbase_chat_history"),
    HumanMessagePromptTemplate.from_template("{use_question}")
]
prompt = ChatPromptTemplate.from_messages(messages)
llm_chain = prompt | llm
chain_with_history = RunnableWithMessageHistory(
    llm_chain,
    #使用redis存储聊天记录
    lambda session_id: RedisChatMessageHistory(session_id, url=redis_url),
    input_messages_key="use_question",
    history_messages_key="xbase_chat_history")
# while True:
#     use_question=input("请输入你要提的问题：")
#     if use_question.lower()=="exit":
#         break

use_question = input("请输入你要提的问题：")
response = chain_with_history.invoke(
    {"use_question": use_question},
    config={"configurable": {
        "session_id": "baily_question"
    }})
print("response==>", response)
