import os
import time
import json
import logging
import threading
from typing import Dict, List, Optional, Any
from datetime import datetime, timedelta

from app.utils import ensure_directory_exists, get_file_size_str

logger = logging.getLogger(__name__)

class CacheManager:
    """本地文件缓存管理服务"""
    
    def __init__(self, app=None):
        self.app = app
        self.cache_dir = ""
        self.cache_timeout = 604800  # 7天
        self.max_cache_size = 2 * 1024 * 1024 * 1024  # 2GB
        self.cache_index_file = ""
        self.cache_index = {}
        self.cleanup_interval = 3600  # 1小时
        self.lock = threading.RLock()
        self._cleanup_timer = None
        
        if app:
            self.init_app(app)
    
    def init_app(self, app):
        """初始化应用"""
        self.app = app
        self.cache_dir = app.config.get('CACHE_DIR', '')
        self.cache_timeout = app.config.get('CACHE_TIMEOUT', 604800)
        self.max_cache_size = app.config.get('MAX_CACHE_SIZE', 2) * 1024 * 1024 * 1024
        
        # 缓存索引文件
        self.cache_index_file = os.path.join(self.cache_dir, 'cache_index.json')
        
        # 确保缓存目录存在
        ensure_directory_exists(self.cache_dir)
        
        # 加载缓存索引
        self.load_cache_index()
        
        # 启动定期清理
        self.start_cleanup_timer()
    
    def load_cache_index(self):
        """加载缓存索引"""
        try:
            if os.path.exists(self.cache_index_file):
                with open(self.cache_index_file, 'r', encoding='utf-8') as f:
                    self.cache_index = json.load(f)
                logger.debug(f"加载缓存索引，共 {len(self.cache_index)} 个条目")
            else:
                self.cache_index = {}
                self.save_cache_index()
        except Exception as e:
            logger.error(f"加载缓存索引失败: {e}")
            self.cache_index = {}
    
    def save_cache_index(self):
        """保存缓存索引"""
        try:
            with self.lock:
                with open(self.cache_index_file, 'w', encoding='utf-8') as f:
                    json.dump(self.cache_index, f, ensure_ascii=False, indent=2)
        except Exception as e:
            logger.error(f"保存缓存索引失败: {e}")
    
    def add_to_cache(self, key: str, file_path: str, metadata: Optional[Dict] = None):
        """添加文件到缓存索引"""
        try:
            with self.lock:
                if not os.path.exists(file_path):
                    return
                
                cache_entry = {
                    'file_path': file_path,
                    'created_at': datetime.now().isoformat(),
                    'last_accessed': datetime.now().isoformat(),
                    'file_size': os.path.getsize(file_path),
                    'access_count': 1,
                    'metadata': metadata or {}
                }
                
                # 如果已存在，更新访问信息
                if key in self.cache_index:
                    old_entry = self.cache_index[key]
                    cache_entry['created_at'] = old_entry.get('created_at', cache_entry['created_at'])
                    cache_entry['access_count'] = old_entry.get('access_count', 0) + 1
                
                self.cache_index[key] = cache_entry
                self.save_cache_index()
                
                logger.debug(f"添加到缓存: {key}")
                
        except Exception as e:
            logger.error(f"添加缓存条目失败 {key}: {e}")
    
    def get_from_cache(self, key: str) -> Optional[str]:
        """从缓存获取文件路径"""
        try:
            with self.lock:
                if key not in self.cache_index:
                    return None
                
                cache_entry = self.cache_index[key]
                file_path = cache_entry['file_path']
                
                # 检查文件是否存在
                if not os.path.exists(file_path):
                    # 文件不存在，移除缓存条目
                    del self.cache_index[key]
                    self.save_cache_index()
                    return None
                
                # 检查是否过期
                created_at = datetime.fromisoformat(cache_entry['created_at'])
                if datetime.now() - created_at > timedelta(seconds=self.cache_timeout):
                    # 缓存过期，移除条目
                    self.remove_from_cache(key)
                    return None
                
                # 更新访问信息
                cache_entry['last_accessed'] = datetime.now().isoformat()
                cache_entry['access_count'] = cache_entry.get('access_count', 0) + 1
                self.save_cache_index()
                
                return file_path
                
        except Exception as e:
            logger.error(f"从缓存获取文件失败 {key}: {e}")
            return None
    
    def remove_from_cache(self, key: str, delete_file: bool = True):
        """从缓存移除条目"""
        try:
            with self.lock:
                if key not in self.cache_index:
                    return
                
                cache_entry = self.cache_index[key]
                file_path = cache_entry['file_path']
                
                # 删除文件
                if delete_file and os.path.exists(file_path):
                    try:
                        os.unlink(file_path)
                        logger.debug(f"删除缓存文件: {file_path}")
                    except OSError as e:
                        logger.warning(f"删除缓存文件失败 {file_path}: {e}")
                
                # 移除索引条目
                del self.cache_index[key]
                self.save_cache_index()
                
        except Exception as e:
            logger.error(f"移除缓存条目失败 {key}: {e}")
    
    def is_cached(self, key: str) -> bool:
        """检查是否已缓存"""
        return self.get_from_cache(key) is not None
    
    def clear_cache(self, cache_type: str = 'all'):
        """清理缓存"""
        try:
            with self.lock:
                if cache_type == 'all':
                    # 清理所有缓存
                    for key in list(self.cache_index.keys()):
                        self.remove_from_cache(key, delete_file=True)
                    
                elif cache_type == 'expired':
                    # 只清理过期缓存
                    self.cleanup_expired_cache()
                    
                elif cache_type == 'unused':
                    # 清理长期未使用的缓存
                    self.cleanup_unused_cache()
                
                logger.info(f"缓存清理完成: {cache_type}")
                
        except Exception as e:
            logger.error(f"清理缓存失败: {e}")
    
    def cleanup_expired_cache(self):
        """清理过期缓存"""
        try:
            with self.lock:
                expired_keys = []
                now = datetime.now()
                
                for key, cache_entry in self.cache_index.items():
                    created_at = datetime.fromisoformat(cache_entry['created_at'])
                    if now - created_at > timedelta(seconds=self.cache_timeout):
                        expired_keys.append(key)
                
                for key in expired_keys:
                    self.remove_from_cache(key)
                
                if expired_keys:
                    logger.info(f"清理过期缓存 {len(expired_keys)} 个条目")
                    
        except Exception as e:
            logger.error(f"清理过期缓存失败: {e}")
    
    def cleanup_unused_cache(self, unused_days: int = 30):
        """清理长期未使用的缓存"""
        try:
            with self.lock:
                unused_keys = []
                cutoff_time = datetime.now() - timedelta(days=unused_days)
                
                for key, cache_entry in self.cache_index.items():
                    last_accessed = datetime.fromisoformat(cache_entry['last_accessed'])
                    if last_accessed < cutoff_time:
                        unused_keys.append(key)
                
                for key in unused_keys:
                    self.remove_from_cache(key)
                
                if unused_keys:
                    logger.info(f"清理未使用缓存 {len(unused_keys)} 个条目")
                    
        except Exception as e:
            logger.error(f"清理未使用缓存失败: {e}")
    
    def enforce_cache_size_limit(self):
        """强制执行缓存大小限制"""
        try:
            with self.lock:
                # 计算当前缓存大小
                total_size = sum(
                    entry.get('file_size', 0) 
                    for entry in self.cache_index.values()
                )
                
                if total_size <= self.max_cache_size:
                    return
                
                # 按最后访问时间排序，删除最旧的文件
                sorted_entries = sorted(
                    self.cache_index.items(),
                    key=lambda x: x[1]['last_accessed']
                )
                
                bytes_to_remove = total_size - self.max_cache_size
                removed_size = 0
                removed_count = 0
                
                for key, entry in sorted_entries:
                    if removed_size >= bytes_to_remove:
                        break
                    
                    removed_size += entry.get('file_size', 0)
                    removed_count += 1
                    self.remove_from_cache(key)
                
                logger.info(f"强制清理缓存: 删除 {removed_count} 个文件，释放 {get_file_size_str(removed_size)}")
                
        except Exception as e:
            logger.error(f"强制清理缓存失败: {e}")
    
    def get_cache_stats(self) -> Dict[str, Any]:
        """获取缓存统计信息"""
        try:
            with self.lock:
                total_files = len(self.cache_index)
                total_size = sum(
                    entry.get('file_size', 0) 
                    for entry in self.cache_index.values()
                )
                
                # 计算过期文件数量
                now = datetime.now()
                expired_count = 0
                
                for entry in self.cache_index.values():
                    created_at = datetime.fromisoformat(entry['created_at'])
                    if now - created_at > timedelta(seconds=self.cache_timeout):
                        expired_count += 1
                
                return {
                    'total_files': total_files,
                    'total_size': total_size,
                    'total_size_str': get_file_size_str(total_size),
                    'expired_count': expired_count,
                    'cache_dir': self.cache_dir,
                    'max_cache_size': self.max_cache_size,
                    'max_cache_size_str': get_file_size_str(self.max_cache_size),
                    'cache_timeout': self.cache_timeout,
                    'usage_percentage': (total_size / self.max_cache_size) * 100 if self.max_cache_size > 0 else 0
                }
                
        except Exception as e:
            logger.error(f"获取缓存统计失败: {e}")
            return {}
    
    def get_popular_cache_entries(self, limit: int = 10) -> List[Dict]:
        """获取热门缓存条目"""
        try:
            with self.lock:
                sorted_entries = sorted(
                    self.cache_index.items(),
                    key=lambda x: x[1].get('access_count', 0),
                    reverse=True
                )
                
                return [
                    {
                        'key': key,
                        'access_count': entry.get('access_count', 0),
                        'file_size': entry.get('file_size', 0),
                        'last_accessed': entry.get('last_accessed', ''),
                        'created_at': entry.get('created_at', '')
                    }
                    for key, entry in sorted_entries[:limit]
                ]
                
        except Exception as e:
            logger.error(f"获取热门缓存条目失败: {e}")
            return []
    
    def start_cleanup_timer(self):
        """启动定期清理定时器"""
        def cleanup_task():
            try:
                self.cleanup_expired_cache()
                self.enforce_cache_size_limit()
            except Exception as e:
                logger.error(f"定期清理任务失败: {e}")
            finally:
                # 重新启动定时器
                self.start_cleanup_timer()
        
        if self._cleanup_timer:
            self._cleanup_timer.cancel()
        
        self._cleanup_timer = threading.Timer(self.cleanup_interval, cleanup_task)
        self._cleanup_timer.daemon = True
        self._cleanup_timer.start()
        
        logger.debug(f"启动缓存清理定时器，间隔 {self.cleanup_interval} 秒")
    
    def stop_cleanup_timer(self):
        """停止定期清理定时器"""
        if self._cleanup_timer:
            self._cleanup_timer.cancel()
            self._cleanup_timer = None
            logger.debug("停止缓存清理定时器")
    
    def generate_cache_key(self, *args) -> str:
        """生成缓存键"""
        import hashlib
        key_string = '|'.join(str(arg) for arg in args)
        return hashlib.md5(key_string.encode()).hexdigest()

# 全局实例
cache_manager = CacheManager()