import os

from langchain.chains import LLMChain
from langchain.chat_models import QianfanChatEndpoint
from langchain_core.callbacks import StreamingStdOutCallbackHandler
from langchain_core.messages import HumanMessage
from langchain_core.prompts import SystemMessagePromptTemplate, HumanMessagePromptTemplate, ChatPromptTemplate

"""Streaming 返回"""
# 可以直接使用LLMChain这个Chain来与LLM对话，它将llm与prompt串联起来。

os.environ["QIANFAN_AK"] = "KBe3HbStsjvnGS9KFrNZFPes"
os.environ["QIANFAN_SK"] = "3yGoXGIdCCy8QLwcLCLdh3UGGlM7PKNn"

# 设置 Streaming
chatllm = QianfanChatEndpoint(streaming=True,
                              temperature=0.3,
                              callbacks=[StreamingStdOutCallbackHandler()])

# 可以通过QianfanChatEndpoint的回调出来实现对话流式输出，它会逐个字符输出结果：
response = chatllm([HumanMessage(content='写一首关于python的歌')])
print(response)
