from llama_index.llms.openai import OpenAI
import time
import os

llm = OpenAI()


def stream_with_params(prompt, end_char="", flush_state=True, description=""):
    # 流式响应会返回一个生成器
    handle = llm.stream_complete(prompt)
    for token in handle:
        print(token.delta, end=end_char, flush=flush_state)
    print()


def main():
    stream_with_params(
        "介绍一下鲁迅", end_char="", flush_state=True, description="流式输出"
    )


if __name__ == "__main__":
    main()
