from openai import OpenAI

client = OpenAI(
    api_key="sk-fzaxlslrehzpfsrcdcadupnqkyrudlwodxircbjchaggxrfd",
    # 添加协议前缀
    base_url="https://api.siliconflow.cn/v1",
)


# 发送流式输出的请求
def stream_response():
    response = client.chat.completions.create(
        model="deepseek-ai/DeepSeek-V3",
        messages=[
            {"role": "user", "content": "中国大模型行业2025年将会迎来哪些机遇和挑战"}
        ],
        stream=True,  # 启用流式输出
        temperature=0.7,
        max_tokens=1000,
        top_p=1,
        frequency_penalty=0,
        presence_penalty=0,
    )
    for chunk in response:
        print(chunk.choices[0].delta.content, end="")


def none_stream_response():
    # 发送非流式输出的请求，好象有点问题，暂时不使用
    messages = [{"role": "user", "content": "奥运会的传奇名将有哪些？"}]
    response = client.chat.completions.create(
        model="deepseek-ai/DeepSeek-R1",
        messages=messages,
        stream=False,
        max_tokens=4096,
    )
    content = response.choices[0].message.content
    reasoning_content = response.choices[0].message.reasoning_content
    print(f"content:{content}")

    print(f"reasoning_content:{reasoning_content}")


stream_response()
