from abc import ABC, abstractmethod
import os
from openai import OpenAI # 用于兼容OpenAI的API，如阿里云通义千问
from zhipuai import ZhipuAI as ZhipuAIClientSDK # 重命名以避免与类名冲突

class BaseLLMClient(ABC):
    """LLM客户端的抽象基类"""
    @abstractmethod
    def chat_completions_create(self, messages: list, stream: bool = False, **kwargs) -> dict:
        """
        发起对话补全请求。
        参数与ZhipuAI或OpenAI的create方法类似。
        """
        pass

class ZhipuAIClient(BaseLLMClient):
    """智谱AI spécifique 客户端"""
    def __init__(self, api_key: str, model_name: str):
        if not api_key:
            raise ValueError("智谱AI API Key未提供。")
        self.client = ZhipuAIClientSDK(api_key=api_key)
        self.model_name = model_name

    def chat_completions_create(self, messages: list, stream: bool = False, **kwargs) -> dict:
        # print(f"[ZhipuAIClient] Model: {self.model_name}, Messages: {messages}, Stream: {stream}, Kwargs: {kwargs}")
        response = self.client.chat.completions.create(
            model=self.model_name,
            messages=messages,
            stream=stream,
            **kwargs
        )
        # print(f"[ZhipuAIClient] Raw Response: {response}")
        # 适配返回结构以尽量保持一致性，尽管这里结构已接近
        if stream:
            # 流式响应处理可能需要根据具体业务调整，此处简化
            return response 
        else:
            # 确保返回的是一个包含 choices[0].message.content 的字典结构对象
            # ZhipuAI SDK 直接返回的对象已经是我们需要的结构
            return response

class DashScopeOpenAICompatibleClient(BaseLLMClient):
    """阿里云通义千问（DashScope）OpenAI兼容模式客户端"""
    def __init__(self, api_key: str, model_name: str, base_url: str):
        if not api_key:
            raise ValueError("DashScope API Key未提供。")
        if not base_url:
            raise ValueError("DashScope API Base URL未提供。")
        self.client = OpenAI(
            api_key=api_key,
            base_url=base_url
        )
        self.model_name = model_name

    def chat_completions_create(self, messages: list, stream: bool = False, **kwargs) -> dict:
        # print(f"[DashScopeClient] Model: {self.model_name}, BaseURL: {self.client.base_url}, Messages: {messages}, Stream: {stream}, Kwargs: {kwargs}")
        response = self.client.chat.completions.create(
            model=self.model_name,
            messages=messages,
            stream=stream,
            **kwargs
        )
        # print(f"[DashScopeClient] Raw Response: {response}")
        # OpenAI库返回的结构已经是我们需要的
        return response

# 未来可以添加 DoubaoClient 等其他实现
# class DoubaoClient(BaseLLMClient):
#     def __init__(self, api_key: str, model_name: str, base_url: str | None = None):
#         # ... 实现豆包模型的客户端逻辑 ...
#         pass
#     def chat_completions_create(self, messages: list, stream: bool = False, **kwargs) -> dict:
#         # ... 实现豆包模型的API调用 ...
#         pass 