from openai import OpenAI

def get_response(prompt):
    client = OpenAI(
        api_key="yourkey",
        base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
    )

    # 开启流式响应
    response_stream = client.chat.completions.create(
        model="qwen2-math-72b-instruct",
        messages=[
            {'role': 'system', 'content': 'You are a helpful assistant.'},
            {'role': 'user', 'content': prompt}
        ],
        temperature=0.8,
        top_p=0.8,
        stream=True
    )
    return response_stream 



def main():
    print("Welcome to the chat with the AI assistant! Type 'exit' to quit.")
    while True:
        print()
        user_input = input("You: ")
        if user_input.lower() == 'exit':
            print("Exiting the chat. Goodbye!")
            break
        response_stream = get_response(user_input)
        print(f"Assistant: ",end='')       
        for chunk in response_stream:
            content = chunk.choices[0].delta.content
            if content:  # 确保content不是空的
                print(content, end='', flush=True)

if __name__ == '__main__':
    main()