"""
本地缓存管理模块

提供本地缓存目录管理功能，包括缓存同步、文件上传/下载到本地缓存等。
"""

import os
import shutil
from pathlib import Path
from typing import Optional, List
from .client import MinIOClient


class CacheManager:
    """本地缓存管理器
    
    管理 local_cache 目录，提供与 MinIO 对象存储的同步功能。
    """
    
    def __init__(self, cache_root: Optional[str] = None, config_path: Optional[str] = None):
        """初始化缓存管理器
        
        Args:
            cache_root: 缓存根目录，默认从配置文件读取
            config_path: 配置文件路径
        """
        self.minio_client = MinIOClient(config_path)
        
        if cache_root is None:
            cache_root = self.minio_client.config['cache']['root_dir']
        
        self.cache_root = Path(cache_root).resolve()
        
        # 缓存目录映射到 MinIO 桶
        self.bucket_dirs = {
            'datasets': self.cache_root / 'datasets',
            'models': self.cache_root / 'models',
            'results': self.cache_root / 'results'
        }
        
        # 自动创建缓存目录
        if self.minio_client.config['cache'].get('auto_create', True):
            self._ensure_cache_dirs()
    
    def _ensure_cache_dirs(self):
        """确保缓存目录存在"""
        for bucket_type, cache_dir in self.bucket_dirs.items():
            cache_dir.mkdir(parents=True, exist_ok=True)
    
    def get_cache_path(
        self,
        bucket_type: str,
        project_name: str,
        filename: str
    ) -> Path:
        """获取文件的缓存路径
        
        Args:
            bucket_type: 存储桶类型
            project_name: 项目名称
            filename: 文件名
            
        Returns:
            缓存文件路径
        """
        if bucket_type not in self.bucket_dirs:
            raise ValueError(f"无效的存储桶类型: {bucket_type}")
        
        return self.bucket_dirs[bucket_type] / project_name / filename
    
    def upload_to_cache(
        self,
        bucket_type: str,
        project_name: str,
        remote_filename: str,
        show_progress: bool = True
    ) -> bool:
        """从 MinIO 下载文件到本地缓存
        
        Args:
            bucket_type: 存储桶类型
            project_name: 项目名称
            remote_filename: 远程文件名
            show_progress: 是否显示进度条
            
        Returns:
            True 表示成功
        """
        cache_path = self.get_cache_path(bucket_type, project_name, remote_filename)
        
        # 确保目录存在
        cache_path.parent.mkdir(parents=True, exist_ok=True)
        
        return self.minio_client.download_file(
            bucket_type=bucket_type,
            project_name=project_name,
            remote_filename=remote_filename,
            local_file_path=str(cache_path),
            show_progress=show_progress
        )
    
    def upload_from_cache(
        self,
        bucket_type: str,
        project_name: str,
        filename: str,
        show_progress: bool = True
    ) -> bool:
        """从本地缓存上传文件到 MinIO
        
        Args:
            bucket_type: 存储桶类型
            project_name: 项目名称
            filename: 文件名
            show_progress: 是否显示进度条
            
        Returns:
            True 表示成功
        """
        cache_path = self.get_cache_path(bucket_type, project_name, filename)
        
        if not cache_path.exists():
            print(f"✗ 缓存文件不存在: {cache_path}")
            return False
        
        return self.minio_client.upload_file(
            bucket_type=bucket_type,
            project_name=project_name,
            local_file_path=str(cache_path),
            remote_filename=filename,
            show_progress=show_progress
        )
    
    def sync_project_to_cache(
        self,
        bucket_type: str,
        project_name: str,
        overwrite: bool = False
    ) -> int:
        """同步整个项目到本地缓存
        
        Args:
            bucket_type: 存储桶类型
            project_name: 项目名称
            overwrite: 是否覆盖已存在的文件
            
        Returns:
            成功下载的文件数量
        """
        print(f"正在同步项目到本地缓存: {bucket_type}/{project_name}")
        
        # 获取 MinIO 中的文件列表
        objects = self.minio_client.list_objects(bucket_type, project_name)
        
        if not objects:
            print("未找到文件")
            return 0
        
        success_count = 0
        
        for obj in objects:
            # 提取文件名（去掉路径前缀）
            object_name = obj['name']
            # 从 object_name 中提取实际文件名
            # 格式: {env}/{project_name}/{filename}
            parts = object_name.split('/')
            if len(parts) >= 3:
                filename = '/'.join(parts[2:])  # 支持子目录
            else:
                filename = parts[-1]
            
            cache_path = self.get_cache_path(bucket_type, project_name, filename)
            
            # 检查是否需要下载
            if cache_path.exists() and not overwrite:
                print(f"⊙ 跳过已存在: {filename}")
                continue
            
            # 下载文件
            if self.upload_to_cache(bucket_type, project_name, filename, show_progress=False):
                success_count += 1
        
        print(f"✓ 同步完成，下载了 {success_count}/{len(objects)} 个文件")
        return success_count
    
    def sync_project_from_cache(
        self,
        bucket_type: str,
        project_name: str,
        overwrite: bool = False
    ) -> int:
        """同步本地缓存项目到 MinIO
        
        Args:
            bucket_type: 存储桶类型
            project_name: 项目名称
            overwrite: 是否覆盖 MinIO 中已存在的文件
            
        Returns:
            成功上传的文件数量
        """
        print(f"正在同步本地缓存到 MinIO: {bucket_type}/{project_name}")
        
        project_cache_dir = self.bucket_dirs[bucket_type] / project_name
        
        if not project_cache_dir.exists():
            print(f"✗ 缓存目录不存在: {project_cache_dir}")
            return 0
        
        # 获取本地文件列表
        local_files = []
        for root, dirs, files in os.walk(project_cache_dir):
            for file in files:
                file_path = Path(root) / file
                # 计算相对路径
                rel_path = file_path.relative_to(project_cache_dir)
                local_files.append(rel_path)
        
        if not local_files:
            print("未找到本地文件")
            return 0
        
        success_count = 0
        
        for rel_path in local_files:
            filename = str(rel_path)
            
            # 检查 MinIO 中是否已存在
            if not overwrite and self.minio_client.object_exists(
                bucket_type, project_name, filename
            ):
                print(f"⊙ 跳过已存在: {filename}")
                continue
            
            # 上传文件
            if self.upload_from_cache(bucket_type, project_name, filename, show_progress=False):
                success_count += 1
        
        print(f"✓ 同步完成，上传了 {success_count}/{len(local_files)} 个文件")
        return success_count
    
    def clear_cache(
        self,
        bucket_type: Optional[str] = None,
        project_name: Optional[str] = None,
        confirm: bool = True
    ) -> bool:
        """清理缓存
        
        Args:
            bucket_type: 存储桶类型，为 None 时清理所有缓存
            project_name: 项目名称，为 None 时清理整个桶的缓存
            confirm: 是否需要确认
            
        Returns:
            True 表示成功
        """
        if bucket_type is None:
            # 清理所有缓存
            target_dir = self.cache_root
            target_desc = "所有缓存"
        elif project_name is None:
            # 清理整个桶的缓存
            target_dir = self.bucket_dirs[bucket_type]
            target_desc = f"{bucket_type} 桶的所有缓存"
        else:
            # 清理特定项目的缓存
            target_dir = self.bucket_dirs[bucket_type] / project_name
            target_desc = f"{bucket_type}/{project_name} 的缓存"
        
        if not target_dir.exists():
            print(f"缓存目录不存在: {target_dir}")
            return True
        
        # 确认删除
        if confirm:
            response = input(f"确认要删除 {target_desc} 吗？(y/N): ")
            if response.lower() != 'y':
                print("取消清理")
                return False
        
        try:
            shutil.rmtree(target_dir)
            # 重新创建目录结构（如果是清理项目或桶）
            if bucket_type is not None:
                self._ensure_cache_dirs()
            print(f"✓ 已清理 {target_desc}")
            return True
        except Exception as e:
            print(f"✗ 清理失败: {e}")
            return False
    
    def list_cache(
        self,
        bucket_type: Optional[str] = None,
        project_name: Optional[str] = None
    ) -> List[Path]:
        """列出缓存文件
        
        Args:
            bucket_type: 存储桶类型，为 None 时列出所有缓存
            project_name: 项目名称，为 None 时列出整个桶的缓存
            
        Returns:
            缓存文件路径列表
        """
        if bucket_type is None:
            # 列出所有缓存
            target_dir = self.cache_root
        elif project_name is None:
            # 列出整个桶的缓存
            target_dir = self.bucket_dirs[bucket_type]
        else:
            # 列出特定项目的缓存
            target_dir = self.bucket_dirs[bucket_type] / project_name
        
        if not target_dir.exists():
            return []
        
        files = []
        for root, dirs, filenames in os.walk(target_dir):
            for filename in filenames:
                file_path = Path(root) / filename
                files.append(file_path)
        
        return files
    
    def get_cache_size(
        self,
        bucket_type: Optional[str] = None,
        project_name: Optional[str] = None
    ) -> int:
        """获取缓存大小
        
        Args:
            bucket_type: 存储桶类型，为 None 时计算所有缓存
            project_name: 项目名称，为 None 时计算整个桶的缓存
            
        Returns:
            缓存大小（字节）
        """
        files = self.list_cache(bucket_type, project_name)
        total_size = sum(f.stat().st_size for f in files if f.exists())
        return total_size

