import os
from openai import OpenAI

client = OpenAI(
    api_key=os.getenv("DASHSCOPE_API_KEY"),
    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
)


def stream_chat_completion():
    completion = client.chat.completions.create(
        model="qwen-plus",
        messages=[
            {"role": "system", "content": "You are a helpful assistant."},
            {"role": "user", "content": "Django如何集成通义千问大模型"},
        ],
        stream=True,
        temperature=0.9,
        stream_options={"include_usage": True}
    )

    full_response = ""
    for chunk in completion:
        # 处理包含内容的 chunks
        if chunk.choices and len(chunk.choices) > 0:
            choice = chunk.choices[0]
            if hasattr(choice.delta, 'content') and choice.delta.content:
                content = choice.delta.content
                print(content, end='', flush=True)
                full_response += content

        # 处理结束原因和使用情况
        elif chunk.choices and len(chunk.choices) > 0 and chunk.choices[0].finish_reason:
            finish_reason = chunk.choices[0].finish_reason
            print(f"\n[Finished: {finish_reason}]", end='', flush=True)

        # 处理 usage 信息（如果有的话）
        elif hasattr(chunk, 'usage') and chunk.usage:
            usage = chunk.usage
            print(f"\n[Usage: prompt_tokens={usage.prompt_tokens}, completion_tokens={usage.completion_tokens}]",
                  end='', flush=True)

    return full_response


if __name__ == "__main__":
    print("通义千问响应:")
    response = stream_chat_completion()
    print(f"\n\n完整响应: {response}")
