from abc import ABC, abstractmethod
import json
import httpx
from typing import Dict, Any, Optional, List
import logging

logger = logging.getLogger(__name__)

class BaseLLM(ABC):
    """LLM 基础抽象类"""
    
    @abstractmethod
    async def chat_completion(
        self, 
        messages: List[Dict[str, str]], 
        **kwargs
    ) -> Dict[str, Any]:
        """聊天完成接口"""
        pass

class GuBeeLLM(BaseLLM):
    """硅基流动 AI 接口实现"""
    
    def __init__(self, api_key: str, base_url: str = "https://open.gubee.cn/api/v1"):
        self.api_key = api_key
        self.base_url = base_url
        
    async def chat_completion(
        self, 
        messages: List[Dict[str, str]], 
        **kwargs
    ) -> Dict[str, Any]:
        try:
            async with httpx.AsyncClient() as client:
                response = await client.post(
                    f"{self.base_url}/chat/completions",
                    headers={
                        "Authorization": f"Bearer {self.api_key}",
                        "Content-Type": "application/json"
                    },
                    json={
                        "messages": messages,
                        "model": kwargs.get("model", "gubee-v1"),
                        "temperature": kwargs.get("temperature", 0.7)
                    }
                )
                return response.json()
        except Exception as e:
            logger.error(f"GuBee API 调用失败: {str(e)}")
            raise

class VolcLLM(BaseLLM):
    """火山引擎 AI 接口实现"""
    
    def __init__(self, ak: str, sk: str, base_url: str = "https://api.volc.platform.bytedance.com/"):
        self.ak = ak
        self.sk = sk
        self.base_url = base_url
        
    async def chat_completion(
        self, 
        messages: List[Dict[str, str]], 
        **kwargs
    ) -> Dict[str, Any]:
        try:
            async with httpx.AsyncClient() as client:
                response = await client.post(
                    f"{self.base_url}/ml/v1/chat/completions",
                    headers={
                        "Access-Key-ID": self.ak,
                        "Secret-Access-Key": self.sk,
                        "Content-Type": "application/json"
                    },
                    json={
                        "messages": messages,
                        "model": kwargs.get("model", "chatglm-v1"),
                        "temperature": kwargs.get("temperature", 0.7)
                    }
                )
                return response.json()
        except Exception as e:
            logger.error(f"火山引擎 API 调用失败: {str(e)}")
            raise

class DashScopeLLM(BaseLLM):
    """阿里云 DashScope 接口实现"""
    
    def __init__(self, api_key: str, base_url: str = "https://dashscope.aliyuncs.com/api/v1"):
        self.api_key = api_key
        self.base_url = base_url
        
    async def chat_completion(
        self, 
        messages: List[Dict[str, str]], 
        **kwargs
    ) -> Dict[str, Any]:
        try:
            async with httpx.AsyncClient() as client:
                response = await client.post(
                    f"{self.base_url}/services/aigc/text-generation/generation",
                    headers={
                        "Authorization": f"Bearer {self.api_key}",
                        "Content-Type": "application/json"
                    },
                    json={
                        "messages": messages,
                        "model": kwargs.get("model", "qwen-v1"),
                        "parameters": {
                            "temperature": kwargs.get("temperature", 0.7)
                        }
                    }
                )
                return response.json()
        except Exception as e:
            logger.error(f"DashScope API 调用失败: {str(e)}")
            raise

class OllamaLLM(BaseLLM):
    """Ollama 本地接口实现"""
    
    def __init__(self, base_url: str = "http://localhost:11434"):
        self.base_url = base_url
        
    async def chat_completion(
        self, 
        messages: List[Dict[str, str]], 
        **kwargs
    ) -> Dict[str, Any]:
        try:
            async with httpx.AsyncClient() as client:
                response = await client.post(
                    f"{self.base_url}/api/chat",
                    json={
                        "messages": messages,
                        "model": kwargs.get("model", "llama2"),
                        "options": {
                            "temperature": kwargs.get("temperature", 0.7)
                        }
                    }
                )
                return response.json()
        except Exception as e:
            logger.error(f"Ollama API 调用失败: {str(e)}")
            raise

class LLMFactory:
    """LLM 工厂类"""
    
    @staticmethod
    def create_llm(provider: str, **kwargs) -> BaseLLM:
        """
        创建 LLM 实例
        
        Args:
            provider: 提供商名称 (gubee/volc/dashscope/ollama)
            **kwargs: 具体参数
            
        Returns:
            BaseLLM: LLM 实例
        """
        if provider == "gubee":
            return GuBeeLLM(api_key=kwargs.get("api_key"))
        elif provider == "volc":
            return VolcLLM(ak=kwargs.get("ak"), sk=kwargs.get("sk"))
        elif provider == "dashscope":
            return DashScopeLLM(api_key=kwargs.get("api_key"))
        elif provider == "ollama":
            return OllamaLLM(base_url=kwargs.get("base_url", "http://localhost:11434"))
        else:
            raise ValueError(f"不支持的 LLM 提供商: {provider}")
