from langchain.memory import ConversationBufferMemory, ConversationBufferWindowMemory, ConversationSummaryMemory, ConversationSummaryBufferMemory, ConversationTokenBufferMemory
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain.chains import ConversationChain

import os

from langchain_openai import ChatOpenAI

os.environ["DASHSCOPE_API_KEY"] = 'sk-c44402d7a12c41299bb716af8d7e8bac'

llm = ChatOpenAI(
    api_key=os.getenv("DASHSCOPE_API_KEY"),  # 如果您没有配置环境变量，请在此处用您的API Key进行替换
    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",  # 填写DashScope base_url
    model="qwen-plus"
)

# llm = ChatOpenAI(
#     api_key="sk-7wnDma9l5GUVbq3883C07f50298147148a0809117A1C1Ad", #如果您没有配置环境变量，请在此处用您的API Key进行替换
#     base_url="https://api.aigc369.com/v1", #填写DashScope SDK的base_url
#     model="gpt-3.5-turbo"
# )

prompt_template = ChatPromptTemplate.from_messages(
    [
        ('ai', '请问我有数码可以帮助你的吗？'),
        MessagesPlaceholder(variable_name='history'),
        ('human', '{input}')
    ]
)

# memory = ConversationBufferMemory(
#     return_messages=True,
#     memory_key='history'
# )

# memory = ConversationBufferWindowMemory(
#     return_messages=True,
#     memory_key='history',
#     k=3
# )

memory = ConversationSummaryMemory(
    return_messages=True,
    memory_key='history',
    llm=llm
)

# memory = ConversationSummaryBufferMemory(
#     return_messages=True,
#     memory_key='history',
#     llm=llm,
#     max_token_limit=100
# )

# memory = ConversationTokenBufferMemory(
#     return_messages=True,
#     memory_key='history',
#     llm=llm,
#     max_token_limit=100
# )

print(memory.load_memory_variables({}))

# chain = prompt_template | llm

chain = ConversationChain(
    llm=llm,
    memory=memory
)

print(chain.prompt.template)

# while True:
#     user_input = input('> ')
#     # history = memory.load_memory_variables({})['history']
#     response = chain.invoke({'input': user_input, 'history': history})
#     # print(response.content)
#     # memory.save_context({'input': user_input}, {'output': response.content})
#     # print(memory.load_memory_variables({}))


while True:
    user_input = input('> ')
    response = chain.invoke({'input': user_input})
    print(response)
    print(memory.load_memory_variables({})['history'])
