from datetime import datetime

from langchain.chains.conversation.base import ConversationChain
from langchain.memory import ConversationBufferMemory
from langchain_openai import OpenAI

# 大模型
llm = OpenAI(
    api_key="sk-VowKQBUMIkSND8WScNJtDLqf3FyqWHQ43LMVUXH1m6GZaopA",
    base_url="https://ai.nengyongai.cn/v1",
    temperature=0
)


def test1():
    """

    """
    #
    memory = ConversationBufferMemory()
    # chain
    conversation = ConversationChain(llm=llm, memory=memory)

    # 模拟两轮对话
    input = '你好，LangChain是什么？'
    reply = conversation.predict(input=input)
    print("AI:", reply)

    # 第二轮 问一个上下文相关的问题
    input = '它有哪些核心组件？'
    reply = conversation.predict(input=input)
    print("AI:", reply)

    # 打印
    print(memory.load_memory_variables({}))


def test2():
    """

    """
    #
    memory = ConversationBufferMemory()
    # chain
    conversation = ConversationChain(llm=llm, memory=memory)

    # 模拟两轮对话
    input = '你好，LangChain是什么？'
    reply = conversation.predict(input=input)
    # print("AI:", reply)

    add_timestamp_message(memory, input, reply)

    # 第二轮 问一个上下文相关的问题
    input = '它有哪些核心组件？'
    reply = conversation.predict(input=input)
    # print("AI:", reply)

    add_timestamp_message(memory, input, reply)

    # 打印
    print(memory.load_memory_variables({}))


def add_timestamp_message(memory, user_message, model_message):
    timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    memory.save_context(
        {'input': f'{timestamp} 用户: {user_message}'},
        {'output': f'{timestamp} 模型: {model_message}'}
    )
