"""
LLM提供者模块

定义各种LLM提供者，包括本地和云端模型。
"""

from abc import ABC, abstractmethod
from typing import Dict, Any
from llama_index.llms.ollama import Ollama
from llama_index.core.llms import LLM as BaseLLM

from ..config import config


class BaseLLMProvider(ABC):
    """LLM提供者基类"""

    @abstractmethod
    def get_model(self) -> BaseLLM:
        """获取LLM模型实例"""
        pass

    @abstractmethod
    def get_info(self) -> Dict[str, Any]:
        """获取提供者信息"""
        pass

    @abstractmethod
    def test_connection(self) -> Dict[str, Any]:
        """测试连接"""
        pass


class OllamaLLMProvider(BaseLLMProvider):
    """Ollama LLM提供者"""

    def __init__(
        self,
        model_name: str,
        timeout: float = 360.0,
        temperature: float = 0.1,
        max_tokens: int = None
    ):
        self.model_name = model_name
        self.timeout = timeout
        self.temperature = temperature
        self.max_tokens = max_tokens
        self._model: Ollama = None

    def get_model(self) -> Ollama:
        """获取Ollama模型实例（懒加载）"""
        if self._model is None:
            print(f"正在加载Ollama模型: {self.model_name}")
            print(f"请求超时时间: {self.timeout}秒")

            self._model = Ollama(
                model=self.model_name,
                request_timeout=self.timeout,
                temperature=self.temperature,
                max_tokens=self.max_tokens
            )

            print("Ollama模型加载完成")

        return self._model

    def get_info(self) -> Dict[str, Any]:
        return {
            "provider_type": "ollama",
            "model_name": self.model_name,
            "timeout": self.timeout,
            "temperature": self.temperature,
            "max_tokens": self.max_tokens,
            "is_loaded": self._model is not None
        }

    def test_connection(self) -> Dict[str, Any]:
        """测试Ollama服务连接"""
        try:
            import time
            model = self.get_model()
            start_time = time.time()

            # 发送测试请求
            response = model.complete("Hello, this is a connection test.")

            response_time = time.time() - start_time

            return {
                "success": True,
                "response_time": round(response_time, 3),
                "response_length": len(str(response)) if response else 0,
                "error": None
            }
        except Exception as e:
            return {
                "success": False,
                "response_time": 0.0,
                "response_length": 0,
                "error": str(e)
            }


class CustomOpenAILLMProvider(BaseLLMProvider):
    """自定义OpenAI兼容云端LLM提供者，支持任意模型名称"""

    def __init__(
        self,
        api_key: str,
        base_url: str,
        model_name: str,
        timeout: int = 60,
        max_retries: int = 3,
        temperature: float = 0.1,
        max_tokens: int = None
    ):
        self.api_key = api_key
        self.base_url = base_url
        self.model_name = model_name
        self.timeout = timeout
        self.max_retries = max_retries
        self.temperature = temperature
        self.max_tokens = max_tokens

    def get_model(self) -> BaseLLM:
        """获取自定义OpenAI兼容模型实例"""
        # 直接使用自定义适配器，支持任意模型名称
        from .adapters import CustomOpenAIAPIAdapter
        return CustomOpenAIAPIAdapter(
            api_key=self.api_key,
            api_base=self.base_url,
            model_name=self.model_name,
            timeout=self.timeout,
            max_retries=self.max_retries,
            temperature=self.temperature,
            max_tokens=self.max_tokens
        )

    def get_info(self) -> Dict[str, Any]:
        return {
            "provider_type": "openai",
            "model_name": self.model_name,
            "base_url": self.base_url,
            "timeout": self.timeout,
            "max_retries": self.max_retries,
            "temperature": self.temperature,
            "max_tokens": self.max_tokens,
            "is_loaded": True  # Custom providers are always "loaded" as they don't need initialization
        }

    def test_connection(self) -> Dict[str, Any]:
        """测试云端API连接"""
        try:
            import time
            model = self.get_model()
            start_time = time.time()

            # 发送测试请求
            response = model.complete("Hello, this is a connection test.")

            response_time = time.time() - start_time

            return {
                "success": True,
                "response_time": round(response_time, 3),
                "response_length": len(str(response)) if response else 0,
                "error": None
            }
        except Exception as e:
            return {
                "success": False,
                "response_time": 0.0,
                "response_length": 0,
                "error": str(e)
            }