import os

from dotenv import load_dotenv
from langchain.chat_models import init_chat_model
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder

if __name__ == '__main__':
    load_dotenv(override=True)

    prompt = ChatPromptTemplate.from_messages(
        [
            SystemMessage(content="你叫做小智，是一个乐于助人的助手"),
            MessagesPlaceholder(variable_name="messages"),
        ]
    )
    DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
    model = init_chat_model(model="deepseek-chat", model_provider="deepseek")

    parser = StrOutputParser()
    chain = prompt | model | parser
    message_list = []
    print("输入exit结束对话")
    while True:
        user_query = input("你:")
        if user_query.lower() in {"exit", "quit"}:
            break
        # 追加用户信息
        message_list.append(HumanMessage(content=user_query))
        # 调用模型
        assistant_reply = chain.invoke({"messages": message_list})
        print("小智:", assistant_reply)

        # 追加AI回复"
        message_list.append(AIMessage(content=assistant_reply))
        # 保留50条对话
        message_list = message_list[-50:]
        print(message_list)