import asyncio

from langchain_core.output_parsers import StrOutputParser, JsonOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_deepseek import ChatDeepSeek
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)

llm = ChatDeepSeek(
    model="deepseek-chat",  # 模型名称
    temperature=0,         # 控制生成随机性(0-1)
    max_tokens=None,       # 最大输出token数
    timeout=None,          # 超时设置
    max_retries=2,         # 失败重试次数
    api_key="sk-XXXXXXXX"
)

# chunks = []
# for chunk in llm.stream("what color is the sky in usual?"):
#     chunks.append(chunk)  # Collects each chunk in a list
#     print(chunk.content, end="|", flush=True)  # Prints chunk content with "|" separator

# async def stream_response():
#     chunks = []
#     async for chunk in llm.astream("what color is the sky in usual?"):
#         chunks.append(chunk)
#         print(chunk.content, end="|", flush=True)
#     return chunks

# async def stream_response():
#     prompt = ChatPromptTemplate.from_template("tell me a joke about {topic}")
#     parser = StrOutputParser()
#     chain = prompt | llm | parser
#
#     async for chunk in chain.astream({"topic": "parrot"}):
#         print(chunk, end="|", flush=True)

async def stream_response():
    chain = (
        llm | JsonOutputParser()
    )
    async for text in chain.astream(
        "output a list of the countries france, spain and japan and their populations in JSON format. "
        'Use a dict with an outer key of "countries" which contains a list of countries. '
        "Each country should have the key `name` and `population`"
    ):
        print(text, flush=True)
# Execute the async function
if __name__ == "__main__":
    asyncio.run(stream_response())