from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
from langchain.memory import ConversationBufferMemory
import os

from langchain_community.llms.tongyi import Tongyi

os.environ["DASHSCOPE_API_KEY"] = "sk-9d8f1914800e497f8717144e860f99bc"
llm = Tongyi()

template = """
你是一个会话机器人，能够记住上一次会话的内容
上一次会话内容:{chat_history}
新一次对话:{question}
你的回复:
"""
prompt = PromptTemplate.from_template(template)
memory = ConversationBufferMemory(memory_key='chat_history')
chain = LLMChain(
    llm=llm,
    prompt=prompt,
    verbose=True,
    memory=memory,

)
# chains = prompt | llm | memory

chain.invoke({'question': '你好'})
res = chain.invoke({"月球距离地球多多少KM"})
print(res)
