import asyncio

from langchain.chat_models import init_chat_model
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate

def sync_stream_conversation(chain):
    #方式一
    for chunk in chain.stream({"input":"请介绍一下你自己"}):
        print(chunk, end="",flush=True)


async def stream_conversation(chain):
    #方式二
    async for chunk in chain.astream({"input":"请介绍一下你自己"}):
        print(chunk, end="",flush=True)

if __name__ == "__main__":
    model = init_chat_model(model="deepseek-chat", model_provider="deepseek")

    # 新闻书写chain
    prompt_template = ChatPromptTemplate(
        [
            ("system", "你叫小智，是一名乐于助人的助手。"),
            ("user", "{input}")
        ]
    )
    chain = prompt_template | model | StrOutputParser()

    #方式一：
    sync_stream_conversation(chain)
    #方式二：
    asyncio.run(stream_conversation(chain))
