"""
统一的AI服务
整合LLM服务和LangChain功能，消除冗余
"""
import asyncio
import json
from typing import Dict, List, Optional, Any, Union, AsyncGenerator
from dataclasses import dataclass
from abc import ABC, abstractmethod

from openai import AsyncOpenAI
from anthropic import AsyncAnthropic
# 使用 langchain-community 的正确导入路径
from langchain_community.chat_models.openai import ChatOpenAI
from langchain_community.chat_models.anthropic import ChatAnthropic
from langchain.schema import HumanMessage, AIMessage, SystemMessage, BaseMessage
from langchain.memory import ConversationBufferMemory
from langchain.agents import AgentType, initialize_agent, Tool
from langchain.callbacks.base import AsyncCallbackHandler

from app.core.config import settings
from app.core.logging import get_logger

logger = get_logger(__name__)


@dataclass
class AIModelConfig:
    """AI模型配置"""
    provider: str
    model_name: str
    temperature: float = 0.7
    max_tokens: Optional[int] = None
    api_key: Optional[str] = None
    base_url: Optional[str] = None
    stream: bool = False


@dataclass
class ChatMessage:
    """聊天消息"""
    role: str  # user, assistant, system
    content: str
    timestamp: Optional[str] = None


class BaseAIClient(ABC):
    """AI客户端基类"""
    
    def __init__(self, config: AIModelConfig):
        self.config = config
        self.name = config.provider
    
    @abstractmethod
    async def chat(self, messages: List[ChatMessage], stream: bool = False) -> Union[str, AsyncGenerator[str, None]]:
        """聊天对话"""
        pass
    
    @abstractmethod
    async def create_agent(self, tools: List[Tool]) -> Any:
        """创建智能体"""
        pass


class OpenAIClient(BaseAIClient):
    """OpenAI客户端"""
    
    def __init__(self, config: AIModelConfig):
        super().__init__(config)
        api_key = config.api_key or (getattr(settings.llm, 'OPENAI_API_KEY', None) if hasattr(settings, 'llm') else None)
        self.client = AsyncOpenAI(
            api_key=api_key,
            base_url=config.base_url or "https://api.openai.com/v1"
        )
    
    async def chat(self, messages: List[ChatMessage], stream: bool = False) -> Union[str, AsyncGenerator[str, None]]:
        """OpenAI聊天"""
        # 转换消息格式
        openai_messages = []
        for msg in messages:
            openai_messages.append({
                "role": msg.role,
                "content": msg.content
            })
        
        try:
            if stream:
                return self._stream_chat(openai_messages)
            else:
                response = await self.client.chat.completions.create(
                    model=self.config.model_name,
                    messages=openai_messages,  # type: ignore
                    temperature=self.config.temperature,
                    max_tokens=self.config.max_tokens,
                    stream=False
                )
                return response.choices[0].message.content or ""
        except Exception as e:
            logger.error(f"OpenAI chat error: {e}")
            raise
    
    async def _stream_chat(self, messages: List[Dict]) -> AsyncGenerator[str, None]:
        """OpenAI流式聊天"""
        try:
            response = await self.client.chat.completions.create(
                model=self.config.model_name,
                messages=messages,  # type: ignore
                temperature=self.config.temperature,
                max_tokens=self.config.max_tokens,
                stream=True
            )
            
            async for chunk in response:
                if chunk.choices[0].delta.content:
                    yield chunk.choices[0].delta.content
        except Exception as e:
            logger.error(f"OpenAI stream chat error: {e}")
            yield f"Error: {str(e)}"
    
    async def create_agent(self, tools: List[Tool]) -> Any:
        """创建OpenAI智能体"""
        api_key = self.config.api_key or (getattr(settings.llm, 'OPENAI_API_KEY', None) if hasattr(settings, 'llm') else None)
        langchain_model = ChatOpenAI(
            model=self.config.model_name,
            temperature=self.config.temperature,
            max_tokens=self.config.max_tokens,
            api_key=api_key,
            streaming=True  # type: ignore
        )
        
        memory = ConversationBufferMemory(memory_key="chat_history")
        agent = initialize_agent(
            tools, langchain_model, 
            agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
            memory=memory, verbose=True
        )
        return agent


class AnthropicClient(BaseAIClient):
    """Anthropic客户端"""
    
    def __init__(self, config: AIModelConfig):
        super().__init__(config)
        api_key = config.api_key or (getattr(settings.llm, 'ANTHROPIC_API_KEY', None) if hasattr(settings, 'llm') else None)
        self.client = AsyncAnthropic(
            api_key=api_key
        )
    
    async def chat(self, messages: List[ChatMessage], stream: bool = False) -> Union[str, AsyncGenerator[str, None]]:
        """Anthropic聊天"""
        # 转换消息格式
        system_message = ""
        conversation_messages = []
        
        for msg in messages:
            if msg.role == "system":
                system_message = msg.content
            else:
                conversation_messages.append({
                    "role": msg.role,
                    "content": msg.content
                })
        
        try:
            if stream:
                return self._stream_chat(system_message, conversation_messages)
            else:
                response = await self.client.messages.create(
                    model=self.config.model_name,
                    max_tokens=self.config.max_tokens or 4000,
                    temperature=self.config.temperature,
                    system=system_message,
                    messages=conversation_messages  # type: ignore
                )
                # 获取响应文本内容
                if response.content and len(response.content) > 0:
                    # 处理不同类型的content块
                    content_text = ""
                    for block in response.content:
                        # 使用字符串表示来获取内容，避免类型检查错误
                        content_text += str(block)
                    return content_text
                return ""
        except Exception as e:
            logger.error(f"Anthropic chat error: {e}")
            raise
    
    async def _stream_chat(self, system: str, messages: List[Dict]) -> AsyncGenerator[str, None]:
        """Anthropic流式聊天"""
        try:
            async with self.client.messages.stream(
                max_tokens=self.config.max_tokens or 4000,
                model=self.config.model_name,
                temperature=self.config.temperature,
                system=system,
                messages=messages,  # type: ignore
            ) as stream:
                async for text in stream.text_stream:
                    yield text
        except Exception as e:
            logger.error(f"Anthropic stream chat error: {e}")
            yield f"Error: {str(e)}"
    
    async def create_agent(self, tools: List[Tool]) -> Any:
        """创建Anthropic智能体"""
        api_key = self.config.api_key or (getattr(settings.llm, 'ANTHROPIC_API_KEY', None) if hasattr(settings, 'llm') else None)
        langchain_model = ChatAnthropic(
            model_name=self.config.model_name,
            temperature=self.config.temperature,
            max_tokens=self.config.max_tokens or 4000,
            api_key=api_key  # type: ignore
        )
        
        memory = ConversationBufferMemory(memory_key="chat_history")
        agent = initialize_agent(
            tools, langchain_model,
            agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
            memory=memory, verbose=True
        )
        return agent


class AIService:
    """统一的AI服务"""
    
    def __init__(self):
        self.clients: Dict[str, BaseAIClient] = {}
        self.active_models: Dict[str, Any] = {}
        self.conversation_memories: Dict[str, ConversationBufferMemory] = {}
        
        # 注册支持的提供商
        self.providers = {
            "openai": OpenAIClient,
            "anthropic": AnthropicClient,
            "deepseek": OpenAIClient,  # DeepSeek使用OpenAI兼容API
        }
    
    def get_available_providers(self) -> List[str]:
        """获取可用提供商"""
        return list(self.providers.keys())
    
    def get_available_models(self, provider: str) -> List[str]:
        """获取提供商支持的模型"""
        model_map = {
            "openai": ["gpt-4", "gpt-4-turbo-preview", "gpt-3.5-turbo", "gpt-3.5-turbo-16k"],
            "anthropic": ["claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307"],
            "deepseek": ["deepseek-chat", "deepseek-coder"]
        }
        return model_map.get(provider, [])
    
    async def create_client(self, config: AIModelConfig) -> BaseAIClient:
        """创建AI客户端"""
        if config.provider not in self.providers:
            raise ValueError(f"Provider {config.provider} not supported")
        
        client_class = self.providers[config.provider]
        client = client_class(config)
        
        # 缓存客户端
        client_key = f"{config.provider}:{config.model_name}"
        self.clients[client_key] = client
        
        return client
    
    async def chat_with_config(
        self,
        messages: List[ChatMessage],
        config: AIModelConfig
    ) -> Union[str, AsyncGenerator[str, None]]:
        """使用配置对象进行聊天"""
        client_key = f"{config.provider}:{config.model_name}"
        if client_key not in self.clients:
            client = await self.create_client(config)
        else:
            client = self.clients[client_key]
        
        return await client.chat(messages, config.stream)
    
    async def chat(
        self,
        messages: List[ChatMessage],
        provider: str = "openai",
        model_name: str = "gpt-3.5-turbo",
        temperature: float = 0.7,
        max_tokens: Optional[int] = None,
        stream: bool = False
    ) -> Union[str, AsyncGenerator[str, None]]:
        """统一聊天接口"""
        config = AIModelConfig(
            provider=provider,
            model_name=model_name,
            temperature=temperature,
            max_tokens=max_tokens,
            stream=stream
        )
        
        client_key = f"{provider}:{model_name}"
        if client_key not in self.clients:
            client = await self.create_client(config)
        else:
            client = self.clients[client_key]
        
        return await client.chat(messages, stream)
    
    async def create_agent(
        self,
        tools: List[Tool],
        provider: str = "openai",
        model_name: str = "gpt-3.5-turbo",
        temperature: float = 0.7
    ) -> Any:
        """创建智能体"""
        config = AIModelConfig(
            provider=provider,
            model_name=model_name,
            temperature=temperature
        )
        
        client_key = f"{provider}:{model_name}"
        if client_key not in self.clients:
            client = await self.create_client(config)
        else:
            client = self.clients[client_key]
        
        return await client.create_agent(tools)
    
    async def batch_chat(
        self,
        conversations: List[Dict[str, Any]],
        provider: str = "openai",
        model_name: str = "gpt-3.5-turbo"
    ) -> List[Dict[str, Any]]:
        """批量聊天处理"""
        tasks = []
        for conv in conversations:
            # 转换消息格式
            messages = [ChatMessage(**msg) for msg in conv["messages"]]
            
            task = self.chat(
                messages=messages,
                provider=provider,
                model_name=model_name,
                stream=False
            )
            tasks.append(task)
        
        # 并行处理
        results = await asyncio.gather(*tasks, return_exceptions=True)
        
        # 处理结果
        processed_results = []
        for i, result in enumerate(results):
            if isinstance(result, Exception):
                processed_results.append({
                    "success": False,
                    "error": str(result),
                    "conversation_id": conversations[i].get("conversation_id")
                })
            else:
                processed_results.append({
                    "success": True,
                    "response": result,
                    "conversation_id": conversations[i].get("conversation_id")
                })
        
        return processed_results
    
    def get_service_stats(self) -> Dict[str, Any]:
        """获取服务统计"""
        return {
            "active_clients": len(self.clients),
            "providers": self.get_available_providers(),
            "conversations": len(self.conversation_memories)
        }


# 预定义工具函数
def search_web(query: str) -> str:
    """搜索网络信息"""
    return f"搜索结果: {query}"


def calculator(expression: str) -> str:
    """计算器工具"""
    try:
        result = eval(expression)
        return f"{expression} = {result}"
    except Exception as e:
        return f"计算错误: {str(e)}"


def get_current_time(query: str) -> str:
    """获取当前时间"""
    import datetime
    return f"当前时间: {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"


# 默认工具集合
DEFAULT_TOOLS = [
    Tool(
        name="search",
        description="搜索网络信息",
        func=search_web
    ),
    Tool(
        name="calculator",
        description="计算数学表达式",
        func=calculator
    ),
    Tool(
        name="get_time",
        description="获取当前时间",
        func=get_current_time
    ),
]


# 创建全局服务实例
ai_service = AIService()