"""
MinIO 客户端核心实现

提供与MinIO对象存储交互的核心功能，包括文件上传、下载、列表、删除等操作。
"""

import os
import sys
from pathlib import Path
from typing import List, Optional, Dict, Any
import fnmatch
from minio import Minio
from minio.error import S3Error
import yaml
from tqdm import tqdm


class MinIOClient:
    """MinIO 客户端类
    
    提供文件上传、下载、列表、删除等功能，支持多桶管理和环境区分。
    """
    
    def __init__(self, config_path: Optional[str] = None):
        """初始化 MinIO 客户端
        
        Args:
            config_path: 配置文件路径，支持多种查找方式：
                        1. 用户指定的路径
                        2. 项目根目录的 config/config.yaml
                        3. 模块内置的默认配置
        """
        self.config = self._load_config(config_path)
        self.client = self._create_client()
        self.buckets = self.config['buckets']
        self.environment = self._get_environment()
        self.use_env_prefix = self.config['environment'].get('use_env_prefix', True)
        
    def _load_config(self, config_path: Optional[str] = None) -> Dict[str, Any]:
        """加载配置文件，支持环境变量覆盖
        
        配置加载优先级：
        1. 用户指定的配置文件路径
        2. 项目根目录的 config/config.yaml
        3. 模块内置的默认配置
        
        Args:
            config_path: 配置文件路径
            
        Returns:
            配置字典
        """
        config_file = None
        
        # 1. 用户指定的配置路径
        if config_path and Path(config_path).exists():
            config_file = Path(config_path)
        
        # 2. 项目根目录的配置
        elif Path('config/config.yaml').exists():
            config_file = Path('config/config.yaml')
        
        # 3. 模块内置的默认配置
        else:
            # 获取模块目录
            module_dir = Path(__file__).parent
            default_config = module_dir / 'config' / 'config.yaml'
            if default_config.exists():
                config_file = default_config
            else:
                raise FileNotFoundError(
                    "无法找到配置文件。请提供配置文件路径或确保 config/config.yaml 存在。"
                )
        
        # 加载配置文件
        with open(config_file, 'r', encoding='utf-8') as f:
            config = yaml.safe_load(f)
        
        # 支持环境变量覆盖
        if 'MINIO_ENDPOINT' in os.environ:
            config['minio']['endpoint'] = os.environ['MINIO_ENDPOINT']
        if 'MINIO_ACCESS_KEY' in os.environ:
            config['minio']['access_key'] = os.environ['MINIO_ACCESS_KEY']
        if 'MINIO_SECRET_KEY' in os.environ:
            config['minio']['secret_key'] = os.environ['MINIO_SECRET_KEY']
        if 'MINIO_SECURE' in os.environ:
            config['minio']['secure'] = os.environ['MINIO_SECURE'].lower() == 'true'
        if 'ENV' in os.environ:
            config['environment']['mode'] = os.environ['ENV']
            
        return config
    
    def _create_client(self) -> Minio:
        """创建 MinIO 客户端实例
        
        Returns:
            Minio 客户端实例
        """
        return Minio(
            endpoint=self.config['minio']['endpoint'],
            access_key=self.config['minio']['access_key'],
            secret_key=self.config['minio']['secret_key'],
            secure=self.config['minio']['secure']
        )
    
    def _get_environment(self) -> str:
        """获取当前环境
        
        Returns:
            环境标识（dev 或 prod）
        """
        return self.config['environment']['mode']
    
    def _build_object_path(self, project_name: str, filename: str) -> str:
        """构建对象存储路径
        
        Args:
            project_name: 项目名称
            filename: 文件名
            
        Returns:
            对象存储路径
        """
        if self.use_env_prefix:
            return f"{self.environment}/{project_name}/{filename}"
        else:
            return f"{project_name}/{filename}"
    
    def ensure_bucket_exists(self, bucket_name: str) -> bool:
        """确保存储桶存在，不存在则创建
        
        Args:
            bucket_name: 存储桶名称
            
        Returns:
            True 表示成功
        """
        try:
            if not self.client.bucket_exists(bucket_name):
                self.client.make_bucket(bucket_name)
                print(f"✓ 创建存储桶: {bucket_name}")
            return True
        except S3Error as e:
            print(f"✗ 存储桶操作失败 {bucket_name}: {e}")
            return False
    
    def init_buckets(self) -> bool:
        """初始化所有配置的存储桶
        
        Returns:
            True 表示全部成功
        """
        print("初始化 MinIO 存储桶...")
        success = True
        for bucket_type, bucket_name in self.buckets.items():
            if not self.ensure_bucket_exists(bucket_name):
                success = False
        if success:
            print("✓ 所有存储桶初始化完成")
        return success
    
    def upload_file(
        self,
        bucket_type: str,
        project_name: str,
        local_file_path: str,
        remote_filename: Optional[str] = None,
        show_progress: bool = True
    ) -> bool:
        """上传文件到 MinIO
        
        Args:
            bucket_type: 存储桶类型（datasets, models, results）
            project_name: 项目名称
            local_file_path: 本地文件路径
            remote_filename: 远程文件名，默认使用本地文件名
            show_progress: 是否显示进度条
            
        Returns:
            True 表示上传成功
        """
        if bucket_type not in self.buckets:
            print(f"✗ 无效的存储桶类型: {bucket_type}")
            return False
        
        bucket_name = self.buckets[bucket_type]
        
        # 确保存储桶存在
        self.ensure_bucket_exists(bucket_name)
        
        # 检查本地文件是否存在
        if not os.path.exists(local_file_path):
            print(f"✗ 本地文件不存在: {local_file_path}")
            return False
        
        # 确定远程文件名
        if remote_filename is None:
            remote_filename = os.path.basename(local_file_path)
        
        # 构建对象路径
        object_name = self._build_object_path(project_name, remote_filename)
        
        try:
            file_size = os.path.getsize(local_file_path)
            
            # 如果显示进度条
            if show_progress and self.config['transfer'].get('show_progress', True):
                with tqdm(total=file_size, unit='B', unit_scale=True, desc=f"上传 {remote_filename}") as pbar:
                    def progress_callback(bytes_amount):
                        pbar.update(bytes_amount)
                    
                    # 注意：minio-py 的进度回调可能不完全支持，这里提供基本实现
                    self.client.fput_object(
                        bucket_name,
                        object_name,
                        local_file_path
                    )
                    pbar.update(file_size - pbar.n)  # 确保进度条完成
            else:
                self.client.fput_object(
                    bucket_name,
                    object_name,
                    local_file_path
                )
            
            print(f"✓ 上传成功: {bucket_name}/{object_name}")
            return True
            
        except S3Error as e:
            print(f"✗ 上传失败: {e}")
            return False
        except Exception as e:
            print(f"✗ 上传出错: {e}")
            return False
    
    def download_file(
        self,
        bucket_type: str,
        project_name: str,
        remote_filename: str,
        local_file_path: str,
        show_progress: bool = True
    ) -> bool:
        """从 MinIO 下载文件
        
        Args:
            bucket_type: 存储桶类型（datasets, models, results）
            project_name: 项目名称
            remote_filename: 远程文件名
            local_file_path: 本地保存路径
            show_progress: 是否显示进度条
            
        Returns:
            True 表示下载成功
        """
        if bucket_type not in self.buckets:
            print(f"✗ 无效的存储桶类型: {bucket_type}")
            return False
        
        bucket_name = self.buckets[bucket_type]
        object_name = self._build_object_path(project_name, remote_filename)
        
        try:
            # 确保本地目录存在
            os.makedirs(os.path.dirname(os.path.abspath(local_file_path)), exist_ok=True)
            
            # 获取对象信息以显示进度
            if show_progress and self.config['transfer'].get('show_progress', True):
                try:
                    stat = self.client.stat_object(bucket_name, object_name)
                    file_size = stat.size
                    
                    with tqdm(total=file_size, unit='B', unit_scale=True, desc=f"下载 {remote_filename}") as pbar:
                        self.client.fget_object(
                            bucket_name,
                            object_name,
                            local_file_path
                        )
                        pbar.update(file_size)
                except:
                    # 如果获取大小失败，直接下载
                    self.client.fget_object(
                        bucket_name,
                        object_name,
                        local_file_path
                    )
            else:
                self.client.fget_object(
                    bucket_name,
                    object_name,
                    local_file_path
                )
            
            print(f"✓ 下载成功: {local_file_path}")
            return True
            
        except S3Error as e:
            print(f"✗ 下载失败: {e}")
            return False
        except Exception as e:
            print(f"✗ 下载出错: {e}")
            return False
    
    def list_objects(
        self,
        bucket_type: str,
        project_name: Optional[str] = None,
        prefix: Optional[str] = None
    ) -> List[Dict[str, Any]]:
        """列出对象
        
        Args:
            bucket_type: 存储桶类型（datasets, models, results）
            project_name: 项目名称，为 None 时列出所有项目
            prefix: 自定义前缀，优先级高于 project_name
            
        Returns:
            对象列表，每个对象包含 name, size, last_modified 等信息
        """
        if bucket_type not in self.buckets:
            print(f"✗ 无效的存储桶类型: {bucket_type}")
            return []
        
        bucket_name = self.buckets[bucket_type]
        
        # 构建前缀
        if prefix is None and project_name is not None:
            if self.use_env_prefix:
                prefix = f"{self.environment}/{project_name}/"
            else:
                prefix = f"{project_name}/"
        elif prefix is None:
            if self.use_env_prefix:
                prefix = f"{self.environment}/"
            else:
                prefix = ""
        
        try:
            objects = self.client.list_objects(bucket_name, prefix=prefix, recursive=True)
            result = []
            for obj in objects:
                result.append({
                    'name': obj.object_name,
                    'size': obj.size,
                    'last_modified': obj.last_modified,
                    'etag': obj.etag
                })
            return result
            
        except S3Error as e:
            print(f"✗ 列出对象失败: {e}")
            return []
    
    def delete_object(
        self,
        bucket_type: str,
        project_name: str,
        remote_filename: str
    ) -> bool:
        """删除对象
        
        Args:
            bucket_type: 存储桶类型（datasets, models, results）
            project_name: 项目名称
            remote_filename: 远程文件名
            
        Returns:
            True 表示删除成功
        """
        if bucket_type not in self.buckets:
            print(f"✗ 无效的存储桶类型: {bucket_type}")
            return False
        
        bucket_name = self.buckets[bucket_type]
        object_name = self._build_object_path(project_name, remote_filename)
        
        try:
            self.client.remove_object(bucket_name, object_name)
            print(f"✓ 删除成功: {bucket_name}/{object_name}")
            return True
            
        except S3Error as e:
            print(f"✗ 删除失败: {e}")
            return False
    
    def object_exists(
        self,
        bucket_type: str,
        project_name: str,
        remote_filename: str
    ) -> bool:
        """检查对象是否存在
        
        Args:
            bucket_type: 存储桶类型（datasets, models, results）
            project_name: 项目名称
            remote_filename: 远程文件名
            
        Returns:
            True 表示对象存在
        """
        if bucket_type not in self.buckets:
            return False
        
        bucket_name = self.buckets[bucket_type]
        object_name = self._build_object_path(project_name, remote_filename)
        
        try:
            self.client.stat_object(bucket_name, object_name)
            return True
        except:
            return False
    
    def get_object_info(
        self,
        bucket_type: str,
        project_name: str,
        remote_filename: str
    ) -> Optional[Dict[str, Any]]:
        """获取对象信息
        
        Args:
            bucket_type: 存储桶类型（datasets, models, results）
            project_name: 项目名称
            remote_filename: 远程文件名
            
        Returns:
            对象信息字典，如果对象不存在则返回 None
        """
        if bucket_type not in self.buckets:
            return None
        
        bucket_name = self.buckets[bucket_type]
        object_name = self._build_object_path(project_name, remote_filename)
        
        try:
            stat = self.client.stat_object(bucket_name, object_name)
            return {
                'name': object_name,
                'size': stat.size,
                'last_modified': stat.last_modified,
                'etag': stat.etag,
                'content_type': stat.content_type
            }
        except S3Error:
            return None
    
    def upload_directory(
        self,
        bucket_type: str,
        project_name: str,
        local_dir_path: str,
        remote_dir_prefix: str = "",
        pattern: str = "*",
        recursive: bool = True,
        show_progress: bool = True
    ) -> Dict[str, Any]:
        """批量上传目录到 MinIO
        
        Args:
            bucket_type: 存储桶类型（datasets, models, results）
            project_name: 项目名称
            local_dir_path: 本地目录路径
            remote_dir_prefix: 远程目录前缀（可选）
            pattern: 文件匹配模式，支持 glob 通配符（如 "*.mp4", "**/*.txt"）
            recursive: 是否递归处理子目录
            show_progress: 是否显示进度条
            
        Returns:
            结果字典，包含 success（成功数）、failed（失败数）、files（文件列表）
        """
        if bucket_type not in self.buckets:
            print(f"✗ 无效的存储桶类型: {bucket_type}")
            return {'success': 0, 'failed': 0, 'files': []}
        
        local_path = Path(local_dir_path)
        if not local_path.exists():
            print(f"✗ 目录不存在: {local_dir_path}")
            return {'success': 0, 'failed': 0, 'files': []}
        
        if not local_path.is_dir():
            print(f"✗ 不是目录: {local_dir_path}")
            return {'success': 0, 'failed': 0, 'files': []}
        
        # 收集要上传的文件
        files_to_upload = []
        
        if recursive:
            # 递归搜索
            for file_path in local_path.rglob(pattern):
                if file_path.is_file():
                    files_to_upload.append(file_path)
        else:
            # 只搜索当前目录
            for file_path in local_path.glob(pattern):
                if file_path.is_file():
                    files_to_upload.append(file_path)
        
        if not files_to_upload:
            print(f"✗ 未找到匹配的文件: {pattern}")
            return {'success': 0, 'failed': 0, 'files': []}
        
        print(f"找到 {len(files_to_upload)} 个文件待上传")
        
        success_count = 0
        failed_count = 0
        uploaded_files = []
        
        # 使用进度条
        iterator = tqdm(files_to_upload, desc="上传文件") if show_progress else files_to_upload
        
        for file_path in iterator:
            # 计算相对路径
            rel_path = file_path.relative_to(local_path)
            
            # 构建远程文件名
            if remote_dir_prefix:
                remote_filename = f"{remote_dir_prefix}/{rel_path}"
            else:
                remote_filename = str(rel_path)
            
            # 上传文件
            try:
                success = self.upload_file(
                    bucket_type=bucket_type,
                    project_name=project_name,
                    local_file_path=str(file_path),
                    remote_filename=remote_filename,
                    show_progress=False  # 单个文件不显示进度
                )
                
                if success:
                    success_count += 1
                    uploaded_files.append(str(rel_path))
                else:
                    failed_count += 1
            except Exception as e:
                print(f"✗ 上传失败 {file_path}: {e}")
                failed_count += 1
        
        print(f"\n上传完成: 成功 {success_count}, 失败 {failed_count}")
        
        return {
            'success': success_count,
            'failed': failed_count,
            'files': uploaded_files
        }
    
    def download_directory(
        self,
        bucket_type: str,
        project_name: str,
        local_dir_path: str,
        remote_dir_prefix: str = "",
        pattern: str = "*",
        show_progress: bool = True
    ) -> Dict[str, Any]:
        """批量下载目录从 MinIO
        
        Args:
            bucket_type: 存储桶类型（datasets, models, results）
            project_name: 项目名称
            local_dir_path: 本地保存目录路径
            remote_dir_prefix: 远程目录前缀（可选）
            pattern: 文件匹配模式，支持 glob 通配符（如 "*.mp4", "**/*.txt"）
            show_progress: 是否显示进度条
            
        Returns:
            结果字典，包含 success（成功数）、failed（失败数）、files（文件列表）
        """
        if bucket_type not in self.buckets:
            print(f"✗ 无效的存储桶类型: {bucket_type}")
            return {'success': 0, 'failed': 0, 'files': []}
        
        # 确保本地目录存在
        local_path = Path(local_dir_path)
        local_path.mkdir(parents=True, exist_ok=True)
        
        # 列出远程文件
        objects = self.list_objects(bucket_type, project_name)
        
        if not objects:
            print(f"✗ 未找到文件")
            return {'success': 0, 'failed': 0, 'files': []}
        
        # 过滤符合条件的文件
        files_to_download = []
        for obj in objects:
            object_name = obj['name']
            
            # 提取相对路径（去掉环境和项目前缀）
            # 格式: {env}/{project_name}/{relative_path}
            parts = object_name.split('/')
            if self.use_env_prefix and len(parts) >= 3:
                relative_path = '/'.join(parts[2:])
            elif not self.use_env_prefix and len(parts) >= 2:
                relative_path = '/'.join(parts[1:])
            else:
                relative_path = parts[-1]
            
            # 检查是否在指定的远程目录前缀下
            if remote_dir_prefix:
                if not relative_path.startswith(remote_dir_prefix + '/'):
                    continue
                # 移除前缀
                relative_path = relative_path[len(remote_dir_prefix) + 1:]
            
            # 匹配模式
            if fnmatch.fnmatch(relative_path, pattern) or fnmatch.fnmatch(relative_path, f"**/{pattern}"):
                files_to_download.append({
                    'object_name': object_name,
                    'relative_path': relative_path,
                    'size': obj['size']
                })
        
        if not files_to_download:
            print(f"✗ 未找到匹配的文件: {pattern}")
            return {'success': 0, 'failed': 0, 'files': []}
        
        print(f"找到 {len(files_to_download)} 个文件待下载")
        
        success_count = 0
        failed_count = 0
        downloaded_files = []
        
        # 使用进度条
        iterator = tqdm(files_to_download, desc="下载文件") if show_progress else files_to_download
        
        for file_info in iterator:
            relative_path = file_info['relative_path']
            
            # 构建本地文件路径
            local_file_path = local_path / relative_path
            
            # 确保目录存在
            local_file_path.parent.mkdir(parents=True, exist_ok=True)
            
            # 从 object_name 提取实际的文件名
            parts = file_info['object_name'].split('/')
            if self.use_env_prefix and len(parts) >= 3:
                actual_filename = '/'.join(parts[2:])
            elif not self.use_env_prefix and len(parts) >= 2:
                actual_filename = '/'.join(parts[1:])
            else:
                actual_filename = parts[-1]
            
            # 下载文件
            try:
                success = self.download_file(
                    bucket_type=bucket_type,
                    project_name=project_name,
                    remote_filename=actual_filename,
                    local_file_path=str(local_file_path),
                    show_progress=False  # 单个文件不显示进度
                )
                
                if success:
                    success_count += 1
                    downloaded_files.append(relative_path)
                else:
                    failed_count += 1
            except Exception as e:
                print(f"✗ 下载失败 {relative_path}: {e}")
                failed_count += 1
        
        print(f"\n下载完成: 成功 {success_count}, 失败 {failed_count}")
        
        return {
            'success': success_count,
            'failed': failed_count,
            'files': downloaded_files
        }

