from abc import ABC, abstractmethod
from typing import Any, Dict, List, Union

from langchain_core.language_models.chat_models import BaseChatModel
from langchain_core.messages import BaseMessage


class BaseLLM(ABC):
    """
    大语言模型基础抽象类
    使用策略模式，定义所有LLM实现必须遵循的接口
    """

    def __init__(self, model_name: str, **kwargs):
        self.model_name = model_name
        self.config = kwargs
        self._llm_instance = None

    @abstractmethod
    def create_llm(self, streaming: bool = False, callbacks=None, **kwargs) -> BaseChatModel:
        """
        创建LLM实例的抽象方法
        
        Args:
            streaming: 是否启用流式输出
            callbacks: 回调函数列表
            **kwargs: 其他配置参数
            
        Returns:
            BaseChatModel: LLM实例
        """
        pass

    @abstractmethod
    def get_model_info(self) -> Dict[str, Any]:
        """
        获取模型信息
        
        Returns:
            Dict[str, Any]: 模型信息字典
        """
        pass

    def invoke(self, messages: Union[str, List[BaseMessage]], **kwargs) -> str:
        """
        调用LLM进行对话
        
        Args:
            messages: 输入消息
            **kwargs: 其他参数
            
        Returns:
            str: LLM响应
        """
        if self._llm_instance is None:
            self._llm_instance = self.create_llm(**kwargs)

        if isinstance(messages, str):
            from langchain_core.messages import HumanMessage
            messages = [HumanMessage(content=messages)]

        response = self._llm_instance.invoke(messages)
        return response.content

    async def ainvoke(self, messages: Union[str, List[BaseMessage]], **kwargs) -> str:
        """
        异步调用LLM进行对话
        
        Args:
            messages: 输入消息
            **kwargs: 其他参数
            
        Returns:
            str: LLM响应
        """
        if self._llm_instance is None:
            self._llm_instance = self.create_llm(**kwargs)

        if isinstance(messages, str):
            from langchain_core.messages import HumanMessage
            messages = [HumanMessage(content=messages)]

        response = await self._llm_instance.ainvoke(messages)
        return response.content

    def stream(self, messages: Union[str, List[BaseMessage]], **kwargs):
        """
        流式调用LLM
        
        Args:
            messages: 输入消息
            **kwargs: 其他参数
            
        Yields:
            str: 流式响应片段
        """
        if self._llm_instance is None:
            self._llm_instance = self.create_llm(streaming=True, **kwargs)

        if isinstance(messages, str):
            from langchain_core.messages import HumanMessage
            messages = [HumanMessage(content=messages)]

        for chunk in self._llm_instance.stream(messages):
            if hasattr(chunk, 'content') and chunk.content:
                yield chunk.content

    async def astream(self, messages: Union[str, List[BaseMessage]], **kwargs):
        """
        异步流式调用LLM
        
        Args:
            messages: 输入消息
            **kwargs: 其他参数
            
        Yields:
            str: 流式响应片段
        """
        if self._llm_instance is None:
            self._llm_instance = self.create_llm(streaming=True, **kwargs)

        if isinstance(messages, str):
            from langchain_core.messages import HumanMessage
            messages = [HumanMessage(content=messages)]

        async for chunk in self._llm_instance.astream(messages):
            if hasattr(chunk, 'content') and chunk.content:
                yield chunk.content

    def get_llm_instance(self, **kwargs) -> BaseChatModel:
        """
        获取LLM实例
        
        Args:
            **kwargs: 配置参数
            
        Returns:
            BaseChatModel: LLM实例
        """
        if self._llm_instance is None:
            self._llm_instance = self.create_llm(**kwargs)
        return self._llm_instance

    def reset_instance(self):
        """重置LLM实例"""
        self._llm_instance = None

    def __str__(self) -> str:
        return f"{self.__class__.__name__}(model={self.model_name})"

    def __repr__(self) -> str:
        return self.__str__()

    def bind_tools(self, tools):
        return self._llm_instance.bind_tools(tools) if self._llm_instance else None
