"""OpenAI LLM Provider实现"""

import os
import time
from typing import Dict, Any, Optional, Generator, List
import numpy as np
import logging
from openai import OpenAI
from backend.llm.base import BaseLLMProvider, LLMProviderFactory

logger = logging.getLogger('llm.openai')


class OpenAIProvider(BaseLLMProvider):
    """OpenAI LLM提供商实现"""
    
    def __init__(self, config: Dict[str, Any]):
        """初始化OpenAI提供商
        
        Args:
            config: 配置参数，包含api_key、model等
        """
        super().__init__(config)
        
        # 支持从配置指定的环境变量中读取API密钥
        api_key_env = config.get('api_key_env', 'OPENAI_API_KEY')
        self.api_key = config.get('api_key', os.getenv(api_key_env, ''))
        
        self.api_base = config.get('api_base', os.getenv('OPENAI_API_BASE', 'https://api.openai.com/v1'))
        self.model_name = config.get('model', 'gpt-3.5-turbo')
        self.max_tokens = config.get('max_tokens', 4096)
        self.temperature = config.get('temperature', 0.7)
        self.top_p = config.get('top_p', 0.95)
        
        # 初始化客户端
        self.client = None
        self.embedding_model_name = config.get('embedding_model', 'text-embedding-ada-002')
        self.embedding_dimension = config.get('embedding_dimension', 1536)
    
    def load_model(self):
        """加载OpenAI模型
        实际上是初始化OpenAI客户端
        """
        try:
            if not self.api_key:
                raise ValueError("OpenAI API密钥未配置")
            
            self.client = OpenAI(
                api_key=self.api_key,
                base_url=self.api_base
            )
            self.model = self.model_name  # 对于API调用，模型名称就是我们使用的标识
            self.tokenizer = None  # OpenAI API不需要本地分词器
            
            logger.info(f"成功初始化OpenAI客户端，模型: {self.model_name}")
            return True
        except Exception as e:
            logger.error(f"初始化OpenAI客户端失败: {str(e)}")
            self.client = None
            self.model = None
            return False
    
    def generate(self, prompt: str, **kwargs) -> str:
        """生成文本响应
        
        Args:
            prompt: 输入提示
            **kwargs: 其他生成参数
        
        Returns:
            生成的文本
        """
        if not self.client:
            if not self.load_model():
                return "错误: OpenAI客户端未初始化"
        
        try:
            # 准备参数
            params = {
                "model": self.model_name,
                "messages": [{"role": "user", "content": prompt}],
                "max_tokens": kwargs.get("max_tokens", self.max_tokens),
                "temperature": kwargs.get("temperature", self.temperature),
                "top_p": kwargs.get("top_p", self.top_p),
                "stream": False
            }
            
            # 发送请求
            response = self.client.chat.completions.create(**params)
            
            # 提取响应内容
            if response and response.choices and len(response.choices) > 0:
                return response.choices[0].message.content
            
            return "错误: 未获得有效响应"
        except Exception as e:
            logger.error(f"OpenAI生成失败: {str(e)}")
            return f"错误: {str(e)}"
    
    def generate_stream(self, prompt: str, **kwargs) -> Generator[str, None, None]:
        """流式生成文本响应
        
        Args:
            prompt: 输入提示
            **kwargs: 其他生成参数
        
        Yields:
            文本片段
        """
        if not self.client:
            if not self.load_model():
                yield "错误: OpenAI客户端未初始化"
                return
        
        try:
            # 准备参数
            params = {
                "model": self.model_name,
                "messages": [{"role": "user", "content": prompt}],
                "max_tokens": kwargs.get("max_tokens", self.max_tokens),
                "temperature": kwargs.get("temperature", self.temperature),
                "top_p": kwargs.get("top_p", self.top_p),
                "stream": True
            }
            
            # 发送请求
            response = self.client.chat.completions.create(**params)
            
            # 流式处理响应
            for chunk in response:
                if chunk and chunk.choices and len(chunk.choices) > 0:
                    delta = chunk.choices[0].delta
                    if delta.content:
                        yield delta.content
        except Exception as e:
            logger.error(f"OpenAI流式生成失败: {str(e)}")
            yield f"错误: {str(e)}"
    
    def generate_embedding(self, text: str) -> np.ndarray:
        """生成文本嵌入
        
        Args:
            text: 输入文本
        
        Returns:
            嵌入向量
        """
        if not self.client:
            if not self.load_model():
                return np.random.rand(self.embedding_dimension)
        
        try:
            # 发送嵌入请求
            response = self.client.embeddings.create(
                input=[text],
                model=self.embedding_model_name
            )
            
            # 提取嵌入向量
            if response and response.data and len(response.data) > 0:
                return np.array(response.data[0].embedding)
            
            # 如果出错，返回随机向量
            return np.random.rand(self.embedding_dimension)
        except Exception as e:
            logger.error(f"OpenAI嵌入生成失败: {str(e)}")
            return np.random.rand(self.embedding_dimension)
    
    def generate_embeddings(self, texts: List[str]) -> List[np.ndarray]:
        """批量生成文本嵌入
        
        Args:
            texts: 输入文本列表
        
        Returns:
            嵌入向量列表
        """
        if not self.client:
            if not self.load_model():
                return [np.random.rand(self.embedding_dimension) for _ in texts]
        
        try:
            # 发送嵌入请求
            response = self.client.embeddings.create(
                input=texts,
                model=self.embedding_model_name
            )
            
            # 提取嵌入向量
            if response and response.data:
                return [np.array(embedding.embedding) for embedding in response.data]
            
            # 如果出错，返回随机向量
            return [np.random.rand(self.embedding_dimension) for _ in texts]
        except Exception as e:
            logger.error(f"OpenAI批量嵌入生成失败: {str(e)}")
            return [np.random.rand(self.embedding_dimension) for _ in texts]
    
    def close(self):
        """关闭资源"""
        # OpenAI客户端不需要显式关闭
        self.client = None
        self.model = None


# 注册OpenAI提供商
LLMProviderFactory.register_provider('openai', OpenAIProvider)