"""模型管理器，负责模型的加载、卸载和资源管理"""
import time
from typing import Dict, Any
from threading import Lock
from fin_senti_entity_platform.utils.logger import get_logger
from fin_senti_entity_platform.utils.config_loader import get_config
from fin_senti_entity_platform.model_development.sentiment_analysis.deepseek_sentiment import DeepSeekSentimentModel
from fin_senti_entity_platform.model_development.sentiment_analysis.rnn_trainer import RNNModelTrainer
from fin_senti_entity_platform.model_development.entity_recognition.bert_crf_trainer import BertCRFTrainer
from fin_senti_entity_platform.utils.exceptions import ModelException

logger = get_logger(__name__)

class ModelManager:
    """模型管理器"""
    _instance = None
    _lock = Lock()
    
    def __new__(cls):
        with cls._lock:
            if cls._instance is None:
                cls._instance = super(ModelManager, cls).__new__(cls)
        return cls._instance
    
    def __init__(self):
        if not hasattr(self, '_initialized'):
            self.config = get_config()
            self.models = {}
            self.model_statuses = {}
            self.model_locks = {}
            self._auto_scaling_enabled = self.config.get('service', {}).get('auto_scaling', False)
            self._max_models = self.config.get('service', {}).get('max_models', 3)
            self._model_usage_count = {}
            self._last_used_time = {}
            self._initialized = True
    
    def load_model(self, model_type: str, force_reload: bool = False) -> bool:
        """加载指定类型的模型"""
        with self._lock:
            # 如果模型已加载且不强制重新加载，则直接返回
            if model_type in self.models and not force_reload:
                return True
            
            # 自动扩展：如果已加载的模型数达到上限，卸载使用最少的模型
            if self._auto_scaling_enabled and len(self.models) >= self._max_models and model_type not in self.models:
                self._unload_least_used_model()
            
            # 确保模型锁存在
            if model_type not in self.model_locks:
                self.model_locks[model_type] = Lock()
        
        # 使用模型特定的锁来加载模型
        with self.model_locks[model_type]:
            try:
                start_time = time.time()
                
                if model_type == 'deepseek':
                    self.models[model_type] = DeepSeekSentimentModel()
                elif model_type == 'rnn':
                    trainer = RNNModelTrainer()
                    trainer.load_model(self.config['model']['rnn']['model_path'])
                    self.models[model_type] = trainer
                elif model_type == 'entity':
                    self.models[model_type] = BertCRFTrainer()
                else:
                    raise ModelException(f"不支持的模型类型: {model_type}")
                
                load_time = time.time() - start_time
                self.model_statuses[model_type] = {
                    'status': 'running',
                    'load_time': load_time,
                    'last_loaded': time.time(),
                    'memory_usage': self._get_model_memory_usage(model_type)
                }
                self._model_usage_count[model_type] = 0
                self._last_used_time[model_type] = time.time()
                
                logger.info(f"成功加载模型: {model_type}，耗时: {load_time:.2f}秒")
                return True
            except Exception as e:
                logger.error(f"加载模型 {model_type} 失败: {str(e)}")
                self.model_statuses[model_type] = {
                    'status': 'error',
                    'error_message': str(e)
                }
                return False
    
    def unload_model(self, model_type: str) -> bool:
        """卸载指定类型的模型"""
        with self._lock:
            if model_type in self.models:
                try:
                    # 释放模型资源
                    if hasattr(self.models[model_type], 'unload'):
                        self.models[model_type].unload()
                    
                    del self.models[model_type]
                    if model_type in self.model_statuses:
                        del self.model_statuses[model_type]
                    
                    logger.info(f"成功卸载模型: {model_type}")
                    return True
                except Exception as e:
                    logger.error(f"卸载模型 {model_type} 失败: {str(e)}")
                    return False
            return True
    
    def _unload_least_used_model(self):
        """卸载使用最少的模型"""
        # 找出使用次数最少的模型
        least_used_model = None
        min_usage = float('inf')
        
        for model_type in self.models:
            if model_type != 'entity':  # 保留实体识别模型
                usage = self._model_usage_count.get(model_type, 0)
                if usage < min_usage:
                    min_usage = usage
                    least_used_model = model_type
        
        # 卸载使用最少的模型
        if least_used_model:
            self.unload_model(least_used_model)
    
    def get_model(self, model_type: str) -> Any:
        """获取指定类型的模型实例"""
        # 确保模型已加载
        if model_type not in self.models:
            if not self.load_model(model_type):
                raise ModelException(f"模型 {model_type} 加载失败")
        
        # 更新使用统计
        self._model_usage_count[model_type] = self._model_usage_count.get(model_type, 0) + 1
        self._last_used_time[model_type] = time.time()
        
        return self.models[model_type]
    
    def get_model_status(self, model_type: str) -> Dict[str, Any]:
        """获取指定模型的状态"""
        if model_type not in self.model_statuses:
            return {'status': 'not_loaded'}
        return self.model_statuses[model_type]
    
    def _get_model_memory_usage(self, model_type: str) -> Dict[str, Any]:
        """获取模型内存使用情况"""
        # 这里简化实现，实际应用中可以使用torch.cuda.memory_allocated等方法获取内存使用情况
        return {'estimated_mb': 0}  # 占位符，需要实际实现