import os

from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate

# 初始化OpenAI模型，并开启流式输出
from langchain_community.llms.tongyi import Tongyi

os.environ["DASHSCOPE_API_KEY"] = "sk-9d8f1914800e497f8717144e860f99bc"

llm = Tongyi(streaming=True, callbacks=[StreamingStdOutCallbackHandler()])

# 创建Prompt模板
template = "Question: {question}"
prompt = PromptTemplate(template=template, input_variables=["question"])

# 创建LLMChain
chain = LLMChain(llm=llm, prompt=prompt)

# 流式输出数据
chain.invoke("What is the meaning of life?")