from openai import OpenAI
import os

class ModelClient:
    def __init__(self, api_key=None, base_url=None):
        if api_key is None:
            api_key = os.getenv("DASHSCOPE_API_KEY")
        if base_url is None:
            base_url = "https://dashscope.aliyuncs.com/compatible-mode/v1"
        self.client = OpenAI(api_key=api_key, base_url=base_url)

    def generate_response(self, prompt, model="qwen-plus"):
        try:
            print(f"正在调用模型{model}, 提示内容: {prompt}")
            completions = self.client.chat.completions.create(
                model=model,
                messages=[
                    {"role": "user", "content": prompt},
                    {"role": "assistant", "content": ""}
                ]
            )
            response_content = completions.choices[0].message.content.strip()
            print(f"模型生成的响应内容：{response_content}")
            return response_content
        except Exception as e:
            print(f"调用模型{model}失败，错误信息：{e}")
            raise