from openai import OpenAI
import os


def get_response():
    client = OpenAI(
        api_key="sk-NYsoG3VBKDiTdsadsa5379aD3854a93602327",
        base_url="https://key.wenwen-ai.com/v1"
        #api_key="sk-kp9S9IWRGNPdsadlbkFJr0HFcuWhifs9bNe0IWsT",
        #base_url="https://ffflichun-goldptarmigan.web.val.run/v1/chat/completions"
        #base_url="https://ffflichun-goldptarmigan.web.val.run/v1"
        #api_key="sk-6b44e32c6cdsadsa1c15d09d6a60a72",
        #base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
    )
    try:
        completion = client.chat.completions.create(
            model="gpt-3.5-turbo",
            messages=[{'role': 'system', 'content': 'You are a helpful assistant.'},
                      {'role': 'user', 'content': '你是gpt'}],
            stream=True,
            # 可选，配置以后会在流式输出的最后一行展示token使用信息
            stream_options={"include_usage": True}
            )
    except Exception as e:
        print(e) 

    #print(completion, type(completion))
    for chunk in completion:
        print(chunk)
        #a = chunk.choices[0].delta.content
        #print(a)



if __name__ == '__main__':
    get_response()
