"""
LLM管理器，用于处理基于会话的LLM实例并避免事件循环冲突。
"""

import asyncio
import uuid
import gc

from typing import Dict, Optional
from langchain_openai import ChatOpenAI
from threading import RLock

from config import Config


class LLMManager:
    """按会话管理大型语言模型实例，以避免事件循环冲突。"""

    _lock = RLock()
    _instance = None

    def __new__(cls, *args, **kwargs):
        with cls._lock:
            if cls._instance is None:
                cls._instance = super(LLMManager, cls).__new__(cls)
                cls._instances: Dict[str, ChatOpenAI] = {}
        return cls._instance

    def get_llm_for_session(self, session_id: Optional[str] = None):
        """获取特定session的llm实例"""
        if not session_id:
            session_id = str(uuid.uuid4())
        
        with self._lock:
            if session_id not in self._instances:
                self._instances[session_id] = ChatOpenAI(
                    model=Config.LLM_MODEL,
                    temperature=0,
                    api_key=Config.LLM_API_KEY,
                    base_url=Config.LLM_URI,
                    extra_body={"enable_thinking": Config.USE_THINKING}
                )
            return self._instances[session_id]

    def cleanup_session(self, session_id: str):
        """删除session对应LLM实例."""
        with self._lock:
            if session_id in self._instances:
                del self._instances[session_id]
    
    def cleanup_all(self):
        """删除所有 LLM 实例."""
        with self._lock:
            self._instances.clear()

    def safe_invoke(self, messages, session_id: Optional[str] = None):
        """安全调用LLM"""
        session_id_copy = session_id if session_id else str(uuid.uuid4())
        try:
            llm = self.get_llm_for_session(session_id_copy)
            response = llm.invoke(messages)
            return response
        except asyncio.TimeoutError:
            raise Exception(f"LLM调用超时: >{Config.LLM_TIMEOUT}秒.")
        except Exception as e:
            raise Exception(f"LLM调用失败: {e}")
        finally:
            if not session_id:
                self.cleanup_session(session_id_copy)
            gc.collect()

    async def safe_ainvoke(self, messages, session_id: Optional[str] = None):
        """异步安全调用LLM"""
        session_id_copy = session_id if session_id else str(uuid.uuid4())
        try:
            llm = self.get_llm_for_session(session_id_copy)
            response = await asyncio.wait_for(llm.ainvoke(messages), timeout=Config.LLM_TIMEOUT)
            return response.content
        except asyncio.TimeoutError:
            raise Exception(f"LLM调用超时: >{Config.LLM_TIMEOUT}秒.")
        except Exception as e:
            raise Exception(f"LLM调用失败: {e}")
        finally:
            if not session_id:
                self.cleanup_session(session_id_copy)
            gc.collect()