"""
增强版云盘客户端

为各个SDK实现内部管理逻辑的封装，提供高级功能如缓存、重试、批量操作等。
"""

from typing import Dict, List, Optional, Any, Union, Callable
import os
import time
import hashlib
import threading
from pathlib import Path
from concurrent.futures import ThreadPoolExecutor, Future, as_completed
import logging

from .base import BaseCloudProvider, CloudFile, UploadResult, DownloadResult, FileType
from .unified_client import UnifiedCloudClient


class CacheEntry:
    """缓存条目"""
    
    def __init__(self, data: Any, ttl: int = 300):
        self.data = data
        self.created_at = time.time()
        self.ttl = ttl
        self.access_count = 0
        self.last_access = time.time()
    
    def is_expired(self) -> bool:
        """检查是否过期"""
        return time.time() - self.created_at > self.ttl
    
    def access(self) -> Any:
        """访问数据"""
        self.access_count += 1
        self.last_access = time.time()
        return self.data


class FileCache:
    """文件列表缓存"""
    
    def __init__(self, max_size: int = 1000, default_ttl: int = 300):
        self.max_size = max_size
        self.default_ttl = default_ttl
        self._cache: Dict[str, CacheEntry] = {}
        self._lock = threading.RLock()
    
    def get(self, key: str) -> Optional[Any]:
        """获取缓存数据"""
        with self._lock:
            entry = self._cache.get(key)
            if entry and not entry.is_expired():
                return entry.access()
            elif entry:
                # 删除过期条目
                del self._cache[key]
            return None
    
    def set(self, key: str, data: Any, ttl: Optional[int] = None):
        """设置缓存数据"""
        with self._lock:
            # 清理过期条目
            self._cleanup_expired()
            
            # 如果缓存已满，删除最旧的条目
            if len(self._cache) >= self.max_size:
                self._evict_oldest()
            
            ttl = ttl or self.default_ttl
            self._cache[key] = CacheEntry(data, ttl)
    
    def invalidate(self, key: str):
        """使缓存失效"""
        with self._lock:
            self._cache.pop(key, None)
    
    def invalidate_prefix(self, prefix: str):
        """使前缀匹配的缓存失效"""
        with self._lock:
            keys_to_remove = [k for k in self._cache.keys() if k.startswith(prefix)]
            for key in keys_to_remove:
                del self._cache[key]
    
    def clear(self):
        """清空缓存"""
        with self._lock:
            self._cache.clear()
    
    def _cleanup_expired(self):
        """清理过期条目"""
        expired_keys = [
            key for key, entry in self._cache.items()
            if entry.is_expired()
        ]
        for key in expired_keys:
            del self._cache[key]
    
    def _evict_oldest(self):
        """删除最旧的条目"""
        if not self._cache:
            return
        
        oldest_key = min(
            self._cache.keys(),
            key=lambda k: self._cache[k].created_at
        )
        del self._cache[oldest_key]
    
    def get_stats(self) -> Dict[str, Any]:
        """获取缓存统计"""
        with self._lock:
            total_access = sum(entry.access_count for entry in self._cache.values())
            return {
                "size": len(self._cache),
                "max_size": self.max_size,
                "total_access": total_access,
                "hit_rate": 0 if total_access == 0 else len(self._cache) / total_access
            }


class RetryConfig:
    """重试配置"""
    
    def __init__(self, max_attempts: int = 3, base_delay: float = 1.0, 
                 max_delay: float = 60.0, backoff_factor: float = 2.0):
        self.max_attempts = max_attempts
        self.base_delay = base_delay
        self.max_delay = max_delay
        self.backoff_factor = backoff_factor
    
    def get_delay(self, attempt: int) -> float:
        """获取重试延迟"""
        delay = self.base_delay * (self.backoff_factor ** (attempt - 1))
        return min(delay, self.max_delay)


class BatchOperation:
    """批量操作"""
    
    def __init__(self, operation_type: str, max_workers: int = 5):
        self.operation_type = operation_type
        self.max_workers = max_workers
        self.tasks: List[Dict[str, Any]] = []
        self.results: List[Any] = []
        self.errors: List[Exception] = []
    
    def add_task(self, **kwargs):
        """添加任务"""
        self.tasks.append(kwargs)
    
    def execute(self, client: 'EnhancedCloudClient') -> List[Any]:
        """执行批量操作"""
        if not self.tasks:
            return []
        
        with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
            # 提交所有任务
            futures = []
            for task in self.tasks:
                if self.operation_type == "upload":
                    future = executor.submit(
                        client.upload_file,
                        task["local_path"],
                        task["remote_path"],
                        task.get("callback")
                    )
                elif self.operation_type == "download":
                    future = executor.submit(
                        client.download_file,
                        task["remote_path"],
                        task["local_path"],
                        task.get("callback")
                    )
                elif self.operation_type == "delete":
                    future = executor.submit(
                        client.delete_file,
                        task["remote_path"]
                    )
                else:
                    continue
                
                futures.append(future)
            
            # 收集结果
            for future in as_completed(futures):
                try:
                    result = future.result()
                    self.results.append(result)
                except Exception as e:
                    self.errors.append(e)
        
        return self.results


class EnhancedCloudClient:
    """增强版云盘客户端
    
    在统一客户端基础上提供高级功能：
    1. 文件列表缓存
    2. 自动重试机制
    3. 批量操作支持
    4. 进度监控
    5. 文件完整性校验
    """
    
    def __init__(self, max_workers: int = 5, enable_cache: bool = True,
                 cache_ttl: int = 300, retry_config: Optional[RetryConfig] = None):
        """
        初始化增强版客户端
        
        Args:
            max_workers: 最大工作线程数
            enable_cache: 是否启用缓存
            cache_ttl: 缓存TTL（秒）
            retry_config: 重试配置
        """
        self.logger = logging.getLogger(self.__class__.__name__)
        
        # 基础客户端
        self.client = UnifiedCloudClient(max_workers=max_workers)
        
        # 缓存
        self.enable_cache = enable_cache
        if enable_cache:
            self.cache = FileCache(default_ttl=cache_ttl)
        
        # 重试配置
        self.retry_config = retry_config or RetryConfig()
        
        # 统计信息
        self._stats = {
            "cache_hits": 0,
            "cache_misses": 0,
            "retry_attempts": 0,
            "successful_operations": 0,
            "failed_operations": 0
        }
        self._stats_lock = threading.Lock()
        
        self.logger.info("增强版云盘客户端初始化完成")
    
    def mount_provider(self, mount_point: str, provider_type: str, 
                      config: Dict[str, Any], auto_login: bool = True) -> bool:
        """挂载provider"""
        return self.client.mount_provider(mount_point, provider_type, config, auto_login)
    
    def unmount_provider(self, mount_point: str) -> bool:
        """卸载provider"""
        success = self.client.unmount_provider(mount_point)
        if success and self.enable_cache:
            # 清理相关缓存
            self.cache.invalidate_prefix(mount_point)
        return success
    
    def list_directory(self, path: str = "/", use_cache: bool = True) -> List[CloudFile]:
        """
        列出目录内容（支持缓存）
        
        Args:
            path: 目录路径
            use_cache: 是否使用缓存
            
        Returns:
            List[CloudFile]: 文件列表
        """
        cache_key = f"list:{path}"
        
        # 尝试从缓存获取
        if self.enable_cache and use_cache:
            cached_result = self.cache.get(cache_key)
            if cached_result is not None:
                with self._stats_lock:
                    self._stats["cache_hits"] += 1
                return cached_result
        
        # 缓存未命中，执行实际操作
        if self.enable_cache:
            with self._stats_lock:
                self._stats["cache_misses"] += 1
        
        def operation():
            return self.client.list_directory(path)
        
        result = self._execute_with_retry(operation, f"list_directory({path})")
        
        # 缓存结果
        if self.enable_cache and result:
            self.cache.set(cache_key, result)
        
        return result or []
    
    def upload_file(self, local_path: str, remote_path: str, 
                   callback: Optional[callable] = None, 
                   verify_checksum: bool = True) -> UploadResult:
        """
        上传文件（支持重试和校验）
        
        Args:
            local_path: 本地文件路径
            remote_path: 远程文件路径
            callback: 进度回调函数
            verify_checksum: 是否验证校验和
            
        Returns:
            UploadResult: 上传结果
        """
        # 计算本地文件校验和
        local_checksum = None
        if verify_checksum and os.path.exists(local_path):
            local_checksum = self._calculate_file_checksum(local_path)
        
        def operation():
            result = self.client.upload_file(local_path, remote_path, callback)
            
            # 验证校验和
            if verify_checksum and result.success and local_checksum:
                # 这里可以添加远程文件校验和验证逻辑
                pass
            
            return result
        
        result = self._execute_with_retry(operation, f"upload_file({local_path} -> {remote_path})")
        
        # 清理相关缓存
        if self.enable_cache and result and result.success:
            parent_path = str(Path(remote_path).parent)
            self.cache.invalidate_prefix(f"list:{parent_path}")
        
        return result
    
    def download_file(self, remote_path: str, local_path: str, 
                     callback: Optional[callable] = None,
                     verify_checksum: bool = True) -> DownloadResult:
        """
        下载文件（支持重试和校验）
        
        Args:
            remote_path: 远程文件路径
            local_path: 本地保存路径
            callback: 进度回调函数
            verify_checksum: 是否验证校验和
            
        Returns:
            DownloadResult: 下载结果
        """
        def operation():
            result = self.client.download_file(remote_path, local_path, callback)
            
            # 验证校验和
            if verify_checksum and result.success and os.path.exists(local_path):
                # 这里可以添加文件完整性验证逻辑
                pass
            
            return result
        
        return self._execute_with_retry(operation, f"download_file({remote_path} -> {local_path})")
    
    def delete_file(self, remote_path: str) -> bool:
        """
        删除文件（支持重试）
        
        Args:
            remote_path: 远程文件路径
            
        Returns:
            bool: 删除是否成功
        """
        def operation():
            return self.client.delete_file(remote_path)
        
        result = self._execute_with_retry(operation, f"delete_file({remote_path})")
        
        # 清理相关缓存
        if self.enable_cache and result:
            parent_path = str(Path(remote_path).parent)
            self.cache.invalidate_prefix(f"list:{parent_path}")
        
        return result
    
    def create_directory(self, remote_path: str) -> bool:
        """创建目录（支持重试）"""
        def operation():
            return self.client.create_directory(remote_path)
        
        result = self._execute_with_retry(operation, f"create_directory({remote_path})")
        
        # 清理相关缓存
        if self.enable_cache and result:
            parent_path = str(Path(remote_path).parent)
            self.cache.invalidate_prefix(f"list:{parent_path}")
        
        return result
    
    def batch_upload(self, file_pairs: List[tuple], max_workers: int = 3) -> BatchOperation:
        """
        批量上传文件
        
        Args:
            file_pairs: 文件对列表 [(local_path, remote_path), ...]
            max_workers: 最大并发数
            
        Returns:
            BatchOperation: 批量操作结果
        """
        batch = BatchOperation("upload", max_workers)
        
        for local_path, remote_path in file_pairs:
            batch.add_task(local_path=local_path, remote_path=remote_path)
        
        batch.execute(self)
        return batch
    
    def batch_download(self, file_pairs: List[tuple], max_workers: int = 3) -> BatchOperation:
        """
        批量下载文件
        
        Args:
            file_pairs: 文件对列表 [(remote_path, local_path), ...]
            max_workers: 最大并发数
            
        Returns:
            BatchOperation: 批量操作结果
        """
        batch = BatchOperation("download", max_workers)
        
        for remote_path, local_path in file_pairs:
            batch.add_task(remote_path=remote_path, local_path=local_path)
        
        batch.execute(self)
        return batch
    
    def batch_delete(self, remote_paths: List[str], max_workers: int = 3) -> BatchOperation:
        """
        批量删除文件
        
        Args:
            remote_paths: 远程文件路径列表
            max_workers: 最大并发数
            
        Returns:
            BatchOperation: 批量操作结果
        """
        batch = BatchOperation("delete", max_workers)
        
        for remote_path in remote_paths:
            batch.add_task(remote_path=remote_path)
        
        batch.execute(self)
        return batch
    
    def _execute_with_retry(self, operation: Callable, operation_name: str) -> Any:
        """
        执行操作并支持重试
        
        Args:
            operation: 要执行的操作
            operation_name: 操作名称
            
        Returns:
            Any: 操作结果
        """
        last_exception = None
        
        for attempt in range(1, self.retry_config.max_attempts + 1):
            try:
                result = operation()
                
                # 记录成功
                with self._stats_lock:
                    self._stats["successful_operations"] += 1
                
                return result
                
            except Exception as e:
                last_exception = e
                
                # 记录重试
                with self._stats_lock:
                    self._stats["retry_attempts"] += 1
                
                if attempt < self.retry_config.max_attempts:
                    delay = self.retry_config.get_delay(attempt)
                    self.logger.warning(
                        f"操作失败 {operation_name} (尝试 {attempt}/{self.retry_config.max_attempts}): {e}, "
                        f"{delay}秒后重试"
                    )
                    time.sleep(delay)
                else:
                    self.logger.error(f"操作最终失败 {operation_name}: {e}")
        
        # 记录失败
        with self._stats_lock:
            self._stats["failed_operations"] += 1
        
        # 重新抛出最后的异常
        if last_exception:
            raise last_exception
        
        return None
    
    def _calculate_file_checksum(self, file_path: str) -> str:
        """计算文件MD5校验和"""
        hash_md5 = hashlib.md5()
        try:
            with open(file_path, "rb") as f:
                for chunk in iter(lambda: f.read(4096), b""):
                    hash_md5.update(chunk)
            return hash_md5.hexdigest()
        except Exception as e:
            self.logger.error(f"计算文件校验和失败 {file_path}: {e}")
            return ""
    
    def clear_cache(self):
        """清空缓存"""
        if self.enable_cache:
            self.cache.clear()
            self.logger.info("缓存已清空")
    
    def get_cache_stats(self) -> Dict[str, Any]:
        """获取缓存统计"""
        if self.enable_cache:
            return self.cache.get_stats()
        return {}
    
    def get_stats(self) -> Dict[str, Any]:
        """获取统计信息"""
        with self._stats_lock:
            stats = self._stats.copy()
        
        # 添加基础客户端统计
        base_stats = self.client.get_provider_stats()
        stats.update(base_stats)
        
        # 添加缓存统计
        if self.enable_cache:
            stats["cache"] = self.get_cache_stats()
        
        return stats
    
    def health_check(self) -> Dict[str, Any]:
        """健康检查"""
        base_health = self.client.health_check()
        
        # 添加增强功能健康状态
        enhanced_health = {
            "cache_enabled": self.enable_cache,
            "retry_config": {
                "max_attempts": self.retry_config.max_attempts,
                "base_delay": self.retry_config.base_delay
            }
        }
        
        base_health["enhanced_features"] = enhanced_health
        return base_health
    
    def shutdown(self):
        """关闭客户端"""
        self.logger.info("正在关闭增强版云盘客户端...")
        
        if self.enable_cache:
            self.cache.clear()
        
        self.client.shutdown()
        self.logger.info("增强版云盘客户端已关闭")
    
    def __enter__(self):
        return self
    
    def __exit__(self, exc_type, exc_val, exc_tb):
        self.shutdown()