from openai import OpenAI
from globle import configs as cfg

# for backward compatibility, you can still use `https://api.deepseek.com/v1` as `base_url`.
client = OpenAI(api_key=cfg.DEEPSEEK_API_KEY, base_url="https://api.deepseek.com")

response = client.chat.completions.create(
    model="deepseek-chat",  # 必填，模型名称
    messages=[  # 必填，包装提示词的列表，有3个角色，system, user, assistant
        {"role": "system", "content": "你是一个可爱的助手"},  # 系统角色，用以全局设置模型行为
        {"role": "user", "content": "写点土味情话"},  # 用户角色，用以设置用户输入
        # {"role": "assistant", "content": "Hi, how can I help you?"}, # 模型角色，用以设置模型输出，上下文对话时使用
    ],
    temperature=1,  # 温度, 取值0~2, 简单理解是温度越高随机性越强
    stream=True,  # 是否流式返回，默认为False
    stream_options={
        "include_usage": False,  # 最后会返usage信息
    }
)

for chunk in response:
    # print(chunk.choices[0])
    print(chunk.choices[0].delta.content, end='')
    # c = chunk
    # print(c)
