from langchain_core.output_parsers import StrOutputParser, JsonOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_openai import ChatOpenAI
llm = ChatOpenAI(
    openai_api_base="https://api.siliconflow.cn/v1/",
    openai_api_key="sk-pdfifkpjdlxvyvgkerbluaotktpznsmpbcvskjauotenxgvz",  # app_key
    model_name="deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",  # 模型名称
)
chunks = []
# for chunk in llm.stream("天空是什么颜色的"):
#     print(chunk)
#     chunks.append(chunk)
#     print(chunk.content,end="|",flush=True)

print("ok")

###异步调用
import asyncio

prompt = ChatPromptTemplate.from_template("你是谁")
parser = StrOutputParser()
chain = prompt | llm | parser
async def async_stream():
    # pass
    async for chunk in chain.astream({"topic":"程序员"}):
        print(chunk,end="|",flush=True)

asyncio.run(async_stream())

# loop = asyncio.new_event_loop()
# asyncio.set_event_loop(loop)
# try:
#     loop.run_until_complete(async_stream())
# finally:
#     loop.close()
prompt =  ChatPromptTemplate.from_template("请以json格式返回中国的人国的人口总数，性别比例信息")
jsonOutputParse = JsonOutputParser()

print('\n')
chain = prompt | llm | jsonOutputParse
async def async_stream():
    # pass
    async for chunk in chain.astream({"topic":"程序员"}):
        print(chunk,flush=True)

asyncio.run(async_stream())



# 事件流