"""
LLM管理器，用于处理基于会话的LLM实例并避免事件循环冲突。
"""

import asyncio
import threading
import uuid
import gc

from typing import Dict, Optional
from langchain_openai import ChatOpenAI

from config import Config


class LLMManager:
    """按会话管理大型语言模型实例，以避免事件循环冲突。"""
    def __init__(self):
        self._instances: Dict[str, ChatOpenAI] = {}
        self._lock = threading.Lock()

    def get_llm_for_session(self, session_id: Optional[str] = None):
        """获取特定session的llm实例"""
        if session_id is None:
            session_id = str(uuid.uuid4())
        
        with self._lock:
            if session_id not in self._instances:
                self._instances[session_id] = self.create_llm()
            return self._instances[session_id]

    @staticmethod
    def create_llm():
        """创建LLM实例。"""
        llm = ChatOpenAI(
            model=Config.LLM_MODEL,
            temperature=0,
            api_key=Config.LLM_API_KEY,
            base_url=Config.LLM_URI,
            extra_body={"enable_thinking": Config.USE_THINKING}
        )
        return llm
    
    def cleanup_session(self, session_id: str):
        """删除session对应LLM实例."""
        with self._lock:
            if session_id in self._instances:
                del self._instances[session_id]
    
    def cleanup_all(self):
        """删除所有 LLM 实例."""
        with self._lock:
            self._instances.clear()

    async def ainvoke(self, messages, session_id: Optional[str] = None):
        try:
            llm = self.get_llm_for_session(session_id)
            response = await asyncio.wait_for(llm.ainvoke(messages), timeout=Config.LLM_TIMEOUT)
            return response
        except asyncio.TimeoutError:
            raise Exception(f"LLM调用超时: >{Config.LLM_TIMEOUT}秒.")
        except Exception as e:
            raise Exception(f"LLM调用失败: {e}")
        finally:
            gc.collect()

_llm_manager = None

def get_llm_manager() -> LLMManager:
    """获取全局大型语言模型（LLM）管理器实例."""
    global _llm_manager
    if _llm_manager is None:
        _llm_manager = LLMManager()
    return _llm_manager