from helper import record_start_time, record_end_time, read_file_content, init_openai_client

# 发起流式请求
def send_streaming_request(client, background_content, question_content):
    return client.chat.completions.create(
        model="deepseek-r1-250120",
        messages=[
            {"role": "system", "content": background_content},
            {"role": "user", "content": question_content},
        ],
        stream=True
    )

# 处理流式响应
def handle_streaming_response(stream):

    for chunk in stream:
        if not chunk.choices:
            continue
        print(chunk.choices[0].delta.content, end="")
    print()

# 主函数
def main():
    print("----- streaming request -----")
    client = init_openai_client()
    background_content = read_file_content("background.txt")
    question_content = read_file_content("question.txt")
    start_time = record_start_time()
    stream = send_streaming_request(client, background_content, question_content)
    handle_streaming_response(stream)
    end_time = record_end_time(start_time)

if __name__ == "__main__":
    main()
