from openai import OpenAI



client = OpenAI(api_key="sk-pdfifkpjdlxvyvgkerbluaotktpznsmpbcvskjauotenxgvz", base_url="https://api.siliconflow.cn/v1")

response = client.chat.completions.create(

    model='Qwen/Qwen2.5-7B-Instruct',

    messages=[

        {'role': 'user',

        'content': "你是谁"}

    ],

    stream=True

)


for chunk in response:

    print(chunk.choices[0].delta.content, end='')



# import requests
#
# url = "https://api.siliconflow.cn/v1/chat/completions"
#
# payload = {
#     "model": "Qwen/QwQ-32B",
#     "messages": [
#         {
#             "role": "user",
#             "content": "What opportunities and challenges will the Chinese large model industry face in 2025?"
#         }
#     ],
#     "stream": False,
#     "max_tokens": 512,
#     "stop": None,
#     "temperature": 0.7,
#     "top_p": 0.7,
#     "top_k": 50,
#     "frequency_penalty": 0.5,
#     "n": 1,
#     "response_format": {"type": "text"},
#     "tools": [
#         {
#             "type": "function",
#             "function": {
#                 "description": "<string>",
#                 "name": "<string>",
#                 "parameters": {},
#                 "strict": False
#             }
#         }
#     ]
# }
# headers = {
#     "Authorization": "Bearer <token>",
#     "Content-Type": "application/json"
# }
#
# response = requests.request("POST", url, json=payload, headers=headers)
#
# print(response.text)
#
#
#
