from langchain_core.messages import HumanMessage, SystemMessage
from langchain_openai import ChatOpenAI
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser

llm = ChatOpenAI(
    api_key="sk-48d8ce274c674dc68e7566f7ecea3476",
    base_url="https://api.deepseek.com",
    model="deepseek-chat"
)
# 创建链
prompt = ChatPromptTemplate.from_messages([
    ("system", "你是一个历史学家。"),
    ("user", "{question}")
])

chain = prompt | llm | StrOutputParser()


# 流式调用
def stream_with_lcel(question):
    langchain_stream = chain.stream({"question": question})
    result = ""
    for chunk in langchain_stream:
        result += chunk
        print(chunk, end='', flush=True)


stream_with_lcel(question="清朝有哪些名人？")
