
import ollama

# 列出模型
# response  = ollama.list()
#
# print("所有模型：",response)

# 与模型一次聊天
# res = ollama.chat(
#     model="deepseek-r1:1.5b",
#     messages=[{"role":"user","content":"为什么天空是蓝色的？"}],
#     stream=False
# )
#
# # print("res:",res)
# print(res["message"]["content"])

# 模型流式返回结果

res = ollama.chat(
    model="deepseek-r1:1.5b",
    messages=[{"role":"user","content":"为什么天空是蓝色的？"}],
    stream=True
)

for chunk in res:
    print(chunk["message"]["content"],end="",flush=True)




