from openai import OpenAI

client = OpenAI(
    # base_url='https://api-inference.modelscope.cn/v1/',
    # api_key='88610dc6-18cb-4b48-a4b7-bd029828b35a', # ModelScope Token
    base_url='https://api.deepseek.com/v1',
    api_key='sk-df8bd4536ea340c98f1d34b97419128a'
)

response = client.chat.completions.create(
    model='deepseek-chat', # ModelScope Model-Id
    messages=[
        {
            'role': 'user',
            'content': '你好，你是谁？'
        }
    ],
    stream=False
)

print(response.choices[0].message.content)
done_reasoning = False
# for chunk in response:
#     reasoning_chunk = chunk.choices[0].delta.reasoning_content
#     answer_chunk = chunk.choices[0].delta.content
#     if reasoning_chunk != '':
#         print(reasoning_chunk, end='',flush=True)
#     elif answer_chunk != '':
#         if not done_reasoning:
#             print('\n\n === Final Answer ===\n')
#             done_reasoning = True
#         print(answer_chunk, end='',flush=True)
