from openai import OpenAI

# 交流机器人后端
self = OpenAI(
    api_key="sk-on1j51cvbafu31osq7ukpslf3ace04fea1bk7ld3e4g26fjc",
    base_url="https://api.aihao123.cn/luomacode-api/open-api/v1/"
)

messageHistory = []

def chat_stream(self, query, system_message=None, temperature=1.0):
    if system_message:
        messageHistory.append({"role": "system", "content": system_message})
    messageHistory.append({"role": "user", "content": query})
    response = self.chat.completions.create(
        model="gpt-4o",  # 你可以选择其他模型，如"gpt-4"
        messages=messageHistory,
        temperature=temperature,
        stream=True
    )
    return response
response = chat_stream(self, "你好", "1")
for chunk in response:
    print(chunk.choices[0].delta.content, end="", flush=True)