from langchain_ollama import ChatOllama

model = ChatOllama(model="qwen2.5:7b", temperature=0.7)

messages = [
    ("user", "请列出中国四大名著"),
]

for chunk in model.stream(messages):
    print(chunk.content, end='', flush=True)