from sqlalchemy.orm import Session
from app.database.models import BackupTask, BackupFile, File, Directory
from app.utils.minio_client import minio_client
from datetime import datetime
from typing import List, Dict, Any, Optional
import logging
import json
import os
import uuid
from sqlalchemy import func, and_, or_
from sqlalchemy.exc import SQLAlchemyError
from fastapi import HTTPException
from app.core.config import settings
import io
import math

# 配置日志记录器
logger = logging.getLogger(__name__)

def create_backup_task(
    db: Session, 
    user_id: int, 
    task_name: str, 
    source_directory_ids: List[int] = None, 
    schedule_type: str = "manual",
    is_incremental: bool = True
) -> BackupTask:
    """创建新的备份任务
    
    Args:
        db: 数据库会话
        user_id: 用户ID
        task_name: 任务名称
        source_directory_ids: 源目录ID列表，为空则备份所有文件
        schedule_type: 计划类型(manual、daily、weekly、monthly)
        is_incremental: 是否为增量备份
        
    Returns:
        BackupTask: 创建的备份任务对象
    """
    try:
        # 创建备份路径
        task_uuid = str(uuid.uuid4())
        backup_path = f"backup/{user_id}/{task_uuid}"
        
        # 确保源目录存在且属于该用户
        if source_directory_ids:
            # 查询这些目录是否存在且属于该用户
            valid_directories = db.query(Directory).filter(
                and_(
                    Directory.id.in_(source_directory_ids),
                    Directory.user_id == user_id,
                    Directory.is_deleted == False
                )
            ).all()
            
            # 获取有效的目录ID
            valid_directory_ids = [dir.id for dir in valid_directories]
            
            # 如果有无效的目录ID，则记录警告
            if len(valid_directory_ids) != len(source_directory_ids):
                invalid_ids = set(source_directory_ids) - set(valid_directory_ids)
                logger.warning(f"用户 {user_id} 尝试备份不存在或无权限的目录: {invalid_ids}")
                source_directory_ids = valid_directory_ids
        
        # 创建备份任务记录
        backup_task = BackupTask(
            user_id=user_id,
            task_name=task_name,
            backup_path=backup_path,
            status="pending",
            schedule_type=schedule_type,
            is_incremental=is_incremental,
            source_directory_ids=json.dumps(source_directory_ids) if source_directory_ids else "[]"
        )
        
        db.add(backup_task)
        db.commit()
        db.refresh(backup_task)
        
        # 确保MinIO中存在对应的目录
        try:
            if not minio_client.bucket_exists(settings.MINIO_BUCKET_NAME):
                minio_client.make_bucket(settings.MINIO_BUCKET_NAME)
                
            # 创建一个空的标记文件，用于标识这个目录
            minio_client.put_object(
                bucket_name=settings.MINIO_BUCKET_NAME,
                object_name=f"{backup_path}/.metadata",
                data=io.BytesIO(b"backup task metadata"),
                length=18
            )
            
        except Exception as e:
            logger.error(f"创建备份存储路径失败: {str(e)}")
            db.query(BackupTask).filter(BackupTask.id == backup_task.id).update({"status": "failed"})
            db.commit()
            raise HTTPException(status_code=500, detail=f"创建备份存储失败: {str(e)}")
        
        return backup_task
        
    except SQLAlchemyError as e:
        db.rollback()
        logger.error(f"创建备份任务数据库错误: {str(e)}")
        raise HTTPException(status_code=500, detail=f"创建备份任务失败: {str(e)}")

def get_backup_task(db: Session, task_id: int, user_id: int) -> BackupTask:
    """获取指定的备份任务
    
    Args:
        db: 数据库会话
        task_id: 备份任务ID
        user_id: 用户ID，用于权限检查
        
    Returns:
        BackupTask: 备份任务对象
    
    Raises:
        HTTPException: 如果任务不存在或用户无权限访问
    """
    task = db.query(BackupTask).filter(BackupTask.id == task_id).first()
    
    if not task:
        raise HTTPException(status_code=404, detail="备份任务不存在")
        
    if task.user_id != user_id:
        raise HTTPException(status_code=403, detail="无权访问该备份任务")
        
    return task

def update_backup_task(
    db: Session, 
    task_id: int, 
    user_id: int, 
    task_name: Optional[str] = None, 
    source_directory_ids: Optional[List[int]] = None, 
    schedule_type: Optional[str] = None,
    is_incremental: Optional[bool] = None
) -> BackupTask:
    """更新备份任务
    
    Args:
        db: 数据库会话
        task_id: 备份任务ID
        user_id: 用户ID，用于权限检查
        task_name: 新任务名称
        source_directory_ids: 新的源目录ID列表
        schedule_type: 新的计划类型
        is_incremental: 是否为增量备份
        
    Returns:
        BackupTask: 更新后的备份任务对象
    
    Raises:
        HTTPException: 如果任务不存在或用户无权限访问
    """
    task = get_backup_task(db, task_id, user_id)
    
    # 准备更新数据
    update_data = {}
    
    if task_name is not None:
        update_data["task_name"] = task_name
        
    if schedule_type is not None:
        update_data["schedule_type"] = schedule_type
        
    if is_incremental is not None:
        update_data["is_incremental"] = is_incremental
        
    if source_directory_ids is not None:
        # 检查这些目录是否存在且属于该用户
        valid_directories = db.query(Directory).filter(
            and_(
                Directory.id.in_(source_directory_ids),
                Directory.user_id == user_id,
                Directory.is_deleted == False
            )
        ).all()
        
        valid_directory_ids = [dir.id for dir in valid_directories]
        
        # 记录无效的目录ID
        if len(valid_directory_ids) != len(source_directory_ids):
            invalid_ids = set(source_directory_ids) - set(valid_directory_ids)
            logger.warning(f"用户 {user_id} 尝试更新备份任务，包含不存在或无权限的目录: {invalid_ids}")
            
        update_data["source_directory_ids"] = json.dumps(valid_directory_ids)
    
    if update_data:
        try:
            db.query(BackupTask).filter(BackupTask.id == task_id).update(update_data)
            db.commit()
            db.refresh(task)
        except SQLAlchemyError as e:
            db.rollback()
            logger.error(f"更新备份任务数据库错误: {str(e)}")
            raise HTTPException(status_code=500, detail=f"更新备份任务失败: {str(e)}")
    
    return task

def delete_backup_task(db: Session, task_id: int, user_id: int, delete_backup_files: bool = False) -> Dict[str, Any]:
    """删除备份任务
    
    Args:
        db: 数据库会话
        task_id: 备份任务ID
        user_id: 用户ID，用于权限检查
        delete_backup_files: 是否同时删除MinIO中的备份文件
        
    Returns:
        Dict: 包含删除结果的字典
    
    Raises:
        HTTPException: 如果任务不存在或用户无权限访问
    """
    task = get_backup_task(db, task_id, user_id)
    
    try:
        # 先删除相关的备份文件记录
        backup_files = db.query(BackupFile).filter(BackupFile.backup_task_id == task_id).all()
        
        # 如果需要删除MinIO中的备份文件
        if delete_backup_files and task.backup_path:
            try:
                # 列出所有相关的对象
                objects = minio_client.list_objects(
                    bucket_name=settings.MINIO_BUCKET_NAME,
                    prefix=task.backup_path,
                    recursive=True
                )
                
                # 删除对象
                for obj in objects:
                    minio_client.remove_object(
                        bucket_name=settings.MINIO_BUCKET_NAME,
                        object_name=obj.object_name
                    )
                    
            except Exception as e:
                logger.error(f"删除备份文件存储错误: {str(e)}")
        
        # 删除备份文件记录
        for backup_file in backup_files:
            db.delete(backup_file)
            
        # 删除备份任务
        db.delete(task)
        db.commit()
        
        return {
            "task_id": task_id,
            "deleted": True,
            "backup_files_deleted": delete_backup_files,
            "deleted_file_records": len(backup_files)
        }
        
    except SQLAlchemyError as e:
        db.rollback()
        logger.error(f"删除备份任务数据库错误: {str(e)}")
        raise HTTPException(status_code=500, detail=f"删除备份任务失败: {str(e)}")

def list_backup_tasks(
    db: Session, 
    user_id: int, 
    page: int = 1, 
    page_size: int = 20,
    schedule_type: Optional[str] = None,
    status: Optional[str] = None
) -> Dict[str, Any]:
    """获取用户的备份任务列表
    
    Args:
        db: 数据库会话
        user_id: 用户ID
        page: 页码，从1开始
        page_size: 每页数量
        schedule_type: 可选的过滤计划类型
        status: 可选的过滤状态
        
    Returns:
        Dict: 包含分页结果的字典
    """
    # 构建查询
    query = db.query(BackupTask).filter(BackupTask.user_id == user_id)
    
    # 应用过滤条件
    if schedule_type:
        query = query.filter(BackupTask.schedule_type == schedule_type)
        
    if status:
        query = query.filter(BackupTask.status == status)
    
    # 计算总数
    total = query.count()
    
    # 计算分页
    total_pages = math.ceil(total / page_size) if total > 0 else 1
    
    # 限制 page 和 page_size 的合理范围
    page = max(min(page, total_pages), 1)
    page_size = max(min(page_size, 100), 1)
    
    # 获取分页结果
    tasks = query.order_by(BackupTask.created_at.desc()) \
                 .offset((page - 1) * page_size) \
                 .limit(page_size) \
                 .all()
    
    # 获取源目录信息
    for task in tasks:
        try:
            # 解析源目录ID列表
            source_directory_ids = json.loads(task.source_directory_ids) if task.source_directory_ids else []
            
            if source_directory_ids:
                # 查询目录信息
                directories = db.query(Directory).filter(
                    Directory.id.in_(source_directory_ids)
                ).all()
                
                # 将目录信息附加到任务对象
                task.source_directories = [
                    {
                        "id": dir.id,
                        "name": dir.name,
                        "path": dir.path
                    }
                    for dir in directories
                ]
            else:
                task.source_directories = []
                
        except Exception as e:
            logger.error(f"解析备份任务源目录信息错误: {str(e)}")
            task.source_directories = []
    
    return {
        "items": tasks,
        "total": total,
        "page": page,
        "page_size": page_size,
        "total_pages": total_pages
    }

def execute_backup(db: Session, task_id: int, user_id: int, force_full_backup: bool = False) -> Dict[str, Any]:
    """执行备份任务
    
    Args:
        db: 数据库会话
        task_id: 备份任务ID
        user_id: 用户ID，用于权限检查
        force_full_backup: 是否强制执行全量备份，忽略增量设置
        
    Returns:
        Dict: 包含备份结果的字典
    
    Raises:
        HTTPException: 如果任务不存在或用户无权限访问
    """
    task = get_backup_task(db, task_id, user_id)
    
    # 检查任务状态
    if task.status == "running":
        raise HTTPException(status_code=400, detail="备份任务正在执行中")
        
    # 更新任务状态为执行中
    try:
        db.query(BackupTask).filter(BackupTask.id == task_id).update({"status": "running"})
        db.commit()
        db.refresh(task)
    except SQLAlchemyError as e:
        db.rollback()
        logger.error(f"更新备份任务状态错误: {str(e)}")
        raise HTTPException(status_code=500, detail=f"更新备份任务状态失败: {str(e)}")
    
    try:
        # 增量备份策略：如果是增量备份且非强制全量，则仅备份上次备份后修改的文件
        is_incremental = task.is_incremental and not force_full_backup and task.last_backup_time is not None
        
        # 准备文件查询
        query = db.query(File).filter(File.user_id == user_id, File.is_deleted == False)
        
        # 解析源目录ID列表
        source_directory_ids = json.loads(task.source_directory_ids) if task.source_directory_ids else []
        
        if source_directory_ids:
            # 如果指定了源目录，则只备份这些目录中的文件
            query = query.filter(File.directory_id.in_(source_directory_ids))
        
        # 如果是增量备份，则只备份上次备份后修改的文件
        if is_incremental and task.last_backup_time:
            query = query.filter(File.upload_time > task.last_backup_time)
            
        # 获取需要备份的文件列表
        files_to_backup = query.all()
        
        # 如果没有文件需要备份
        if not files_to_backup:
            # 更新任务状态
            db.query(BackupTask).filter(BackupTask.id == task_id).update({
                "status": "completed",
                "last_backup_time": datetime.now()
            })
            db.commit()
            
            return {
                "task_id": task_id,
                "status": "completed",
                "files_processed": 0,
                "backup_type": "incremental" if is_incremental else "full",
                "message": "没有新文件需要备份"
            }
            
        # 创建备份结果统计
        backup_results = {
            "success": [],
            "failed": [],
            "total_size": 0
        }
        
        # 执行备份操作
        for file in files_to_backup:
            try:
                # 构建源对象路径和目标对象路径
                source_object = file.file_path
                target_object = f"{task.backup_path}/{file.id}_{os.path.basename(file.file_path)}"
                
                # 检查源文件在MinIO中是否存在
                try:
                    minio_client.stat_object(settings.MINIO_BUCKET_NAME, source_object)
                except Exception as e:
                    logger.error(f"源文件不存在: {source_object}, 错误: {str(e)}")
                    backup_results["failed"].append({
                        "file_id": file.id,
                        "file_name": file.file_name,
                        "error": f"源文件不存在: {str(e)}"
                    })
                    continue
                
                # 复制对象
                minio_client.copy_object(
                    bucket_name=settings.MINIO_BUCKET_NAME,
                    object_name=target_object,
                    source_bucket_name=settings.MINIO_BUCKET_NAME,
                    source_object_name=source_object
                )
                
                # 创建备份文件记录
                backup_file = BackupFile(
                    backup_task_id=task.id,
                    file_id=file.id,
                    backup_path=target_object,
                    file_size=file.file_size,
                    backup_type="incremental" if is_incremental else "full",
                    status="completed"
                )
                
                db.add(backup_file)
                
                # 更新统计信息
                backup_results["success"].append({
                    "file_id": file.id,
                    "file_name": file.file_name,
                    "backup_path": target_object
                })
                backup_results["total_size"] += file.file_size
                
            except Exception as e:
                logger.error(f"备份文件 {file.id} 错误: {str(e)}")
                backup_results["failed"].append({
                    "file_id": file.id,
                    "file_name": file.file_name,
                    "error": str(e)
                })
        
        # 提交所有备份文件记录
        db.commit()
        
        # 更新任务状态和统计信息
        db.query(BackupTask).filter(BackupTask.id == task_id).update({
            "status": "completed",
            "last_backup_time": datetime.now(),
            "total_file_count": db.query(BackupFile).filter(BackupFile.backup_task_id == task_id).count(),
            "total_file_size": db.query(func.sum(BackupFile.file_size)) \
                               .filter(BackupFile.backup_task_id == task_id) \
                               .scalar() or 0
        })
        db.commit()
        
        return {
            "task_id": task_id,
            "status": "completed",
            "backup_type": "incremental" if is_incremental else "full",
            "files_processed": len(files_to_backup),
            "success_count": len(backup_results["success"]),
            "failed_count": len(backup_results["failed"]),
            "total_backup_size": backup_results["total_size"],
            "failed_files": backup_results["failed"] if backup_results["failed"] else None
        }
        
    except Exception as e:
        # 发生错误，更新任务状态
        logger.error(f"执行备份任务 {task_id} 错误: {str(e)}")
        db.query(BackupTask).filter(BackupTask.id == task_id).update({"status": "failed"})
        db.commit()
        
        raise HTTPException(status_code=500, detail=f"执行备份任务失败: {str(e)}")

def list_backup_files(
    db: Session, 
    task_id: int, 
    user_id: int, 
    page: int = 1, 
    page_size: int = 20,
    backup_type: Optional[str] = None,
    status: Optional[str] = None
) -> Dict[str, Any]:
    """获取备份任务的文件列表
    
    Args:
        db: 数据库会话
        task_id: 备份任务ID
        user_id: 用户ID，用于权限检查
        page: 页码，从1开始
        page_size: 每页数量
        backup_type: 可选的过滤备份类型
        status: 可选的过滤状态
        
    Returns:
        Dict: 包含分页结果的字典
    
    Raises:
        HTTPException: 如果任务不存在或用户无权限访问
    """
    # 检查任务所属
    task = get_backup_task(db, task_id, user_id)
    
    # 构建查询
    query = db.query(BackupFile).filter(BackupFile.backup_task_id == task_id)
    
    # 应用过滤条件
    if backup_type:
        query = query.filter(BackupFile.backup_type == backup_type)
        
    if status:
        query = query.filter(BackupFile.status == status)
    
    # 计算总数
    total = query.count()
    
    # 计算分页
    total_pages = math.ceil(total / page_size) if total > 0 else 1
    
    # 限制 page 和 page_size 的合理范围
    page = max(min(page, total_pages), 1)
    page_size = max(min(page_size, 100), 1)
    
    # 获取分页结果
    backup_files = query.order_by(BackupFile.backup_time.desc()) \
                       .offset((page - 1) * page_size) \
                       .limit(page_size) \
                       .all()
    
    # 获取原始文件信息
    file_ids = [bf.file_id for bf in backup_files]
    files = db.query(File).filter(File.id.in_(file_ids)).all()
    file_map = {f.id: f for f in files}
    
    # 将原始文件信息附加到备份文件对象
    for bf in backup_files:
        if bf.file_id in file_map:
            file = file_map[bf.file_id]
            bf.file_info = {
                "id": file.id,
                "file_name": file.file_name,
                "file_size": file.file_size,
                "file_type": file.file_type,
                "upload_time": file.upload_time,
                "exists": True
            }
        else:
            bf.file_info = {
                "id": bf.file_id,
                "exists": False,
                "message": "原始文件已不存在"
            }
    
    return {
        "items": backup_files,
        "total": total,
        "page": page,
        "page_size": page_size,
        "total_pages": total_pages
    }

def get_backup_file_url(db: Session, backup_file_id: int, user_id: int) -> Dict[str, Any]:
    """获取备份文件的临时URL
    
    Args:
        db: 数据库会话
        backup_file_id: 备份文件记录ID
        user_id: 用户ID，用于权限检查
        
    Returns:
        Dict: 包含URL的字典
    
    Raises:
        HTTPException: 如果文件不存在或用户无权限访问
    """
    # 查询备份文件记录
    backup_file = db.query(BackupFile).filter(BackupFile.id == backup_file_id).first()
    
    if not backup_file:
        raise HTTPException(status_code=404, detail="备份文件记录不存在")
    
    # 检查权限
    task = db.query(BackupTask).filter(BackupTask.id == backup_file.backup_task_id).first()
    
    if not task:
        raise HTTPException(status_code=404, detail="备份任务不存在")
        
    if task.user_id != user_id:
        raise HTTPException(status_code=403, detail="无权访问该备份文件")
    
    try:
        # 获取临时URL
        url = minio_client.presigned_get_object(
            bucket_name=settings.MINIO_BUCKET_NAME,
            object_name=backup_file.backup_path,
            expires=timedelta(hours=1)
        )
        
        return {
            "url": url,
            "expires": 3600,  # 1小时
            "file_info": {
                "id": backup_file.id,
                "backup_path": backup_file.backup_path,
                "file_size": backup_file.file_size,
                "backup_time": backup_file.backup_time
            }
        }
        
    except Exception as e:
        logger.error(f"获取备份文件URL错误: {str(e)}")
        raise HTTPException(status_code=500, detail=f"获取备份文件访问URL失败: {str(e)}")

def restore_from_backup(
    db: Session, 
    backup_file_id: int, 
    user_id: int, 
    overwrite_existing: bool = False
) -> Dict[str, Any]:
    """从备份恢复文件
    
    Args:
        db: 数据库会话
        backup_file_id: 备份文件记录ID
        user_id: 用户ID，用于权限检查
        overwrite_existing: 是否覆盖现有文件
        
    Returns:
        Dict: 包含恢复结果的字典
    
    Raises:
        HTTPException: 如果文件不存在或用户无权限访问
    """
    # 查询备份文件记录
    backup_file = db.query(BackupFile).filter(BackupFile.id == backup_file_id).first()
    
    if not backup_file:
        raise HTTPException(status_code=404, detail="备份文件记录不存在")
    
    # 检查权限
    task = db.query(BackupTask).filter(BackupTask.id == backup_file.backup_task_id).first()
    
    if not task:
        raise HTTPException(status_code=404, detail="备份任务不存在")
        
    if task.user_id != user_id:
        raise HTTPException(status_code=403, detail="无权访问该备份文件")
    
    # 查询原始文件
    original_file = db.query(File).filter(File.id == backup_file.file_id).first()
    
    # 判断是恢复原有文件还是创建新文件
    is_new_file = False
    if not original_file:
        is_new_file = True
    elif not overwrite_existing:
        is_new_file = True
    
    try:
        # 恢复文件到MinIO
        if is_new_file:
            # 创建新文件记录
            # 从备份文件名中提取原始文件名
            file_name = os.path.basename(backup_file.backup_path)
            # 去除可能添加的ID前缀
            if "_" in file_name:
                file_name = file_name.split("_", 1)[1]
                
            # 构建新的存储路径
            from app.services.file_service import build_storage_path
            new_path = build_storage_path(
                user_id=user_id,
                file_type="document",  # 默认为文档类型，可能需要从文件扩展名推断
                create_time=datetime.now(),
                original_filename=file_name,
                file_hash=str(uuid.uuid4()),  # 生成新的唯一标识
                directory_id=0  # 默认放在根目录
            )
            
            # 复制对象
            minio_client.copy_object(
                bucket_name=settings.MINIO_BUCKET_NAME,
                object_name=new_path,
                source_bucket_name=settings.MINIO_BUCKET_NAME,
                source_object_name=backup_file.backup_path
            )
            
            # 创建新文件记录
            new_file = File(
                user_id=user_id,
                file_name=file_name,
                file_path=new_path,
                file_type="document",  # 默认为文档类型
                file_size=backup_file.file_size,
                hash=str(uuid.uuid4()),
                extension=os.path.splitext(file_name)[1][1:] if "." in file_name else "",
                mime_type="application/octet-stream",  # 默认MIME类型
                directory_id=0  # 默认放在根目录
            )
            
            db.add(new_file)
            db.commit()
            db.refresh(new_file)
            
            return {
                "success": True,
                "message": "文件已从备份恢复为新文件",
                "is_new_file": True,
                "file_id": new_file.id,
                "file_name": new_file.file_name,
                "file_path": new_file.file_path
            }
            
        else:
            # 覆盖现有文件
            # 复制对象
            minio_client.copy_object(
                bucket_name=settings.MINIO_BUCKET_NAME,
                object_name=original_file.file_path,
                source_bucket_name=settings.MINIO_BUCKET_NAME,
                source_object_name=backup_file.backup_path
            )
            
            # 更新文件记录的时间戳
            db.query(File).filter(File.id == original_file.id).update({
                "upload_time": datetime.now()
            })
            db.commit()
            
            return {
                "success": True,
                "message": "文件已从备份恢复（覆盖原文件）",
                "is_new_file": False,
                "file_id": original_file.id,
                "file_name": original_file.file_name,
                "file_path": original_file.file_path
            }
            
    except Exception as e:
        db.rollback()
        logger.error(f"从备份恢复文件错误: {str(e)}")
        raise HTTPException(status_code=500, detail=f"从备份恢复文件失败: {str(e)}")

def batch_restore_from_backup(
    db: Session, 
    task_id: int, 
    user_id: int, 
    backup_file_ids: List[int] = None,
    overwrite_existing: bool = False
) -> Dict[str, Any]:
    """批量从备份恢复文件
    
    Args:
        db: 数据库会话
        task_id: 备份任务ID
        user_id: 用户ID，用于权限检查
        backup_file_ids: 要恢复的备份文件ID列表，为空则恢复任务中的所有文件
        overwrite_existing: 是否覆盖现有文件
        
    Returns:
        Dict: 包含恢复结果的字典
    
    Raises:
        HTTPException: 如果任务不存在或用户无权限访问
    """
    # 检查任务所属
    task = get_backup_task(db, task_id, user_id)
    
    # 查询要恢复的备份文件
    query = db.query(BackupFile).filter(BackupFile.backup_task_id == task_id)
    
    if backup_file_ids:
        query = query.filter(BackupFile.id.in_(backup_file_ids))
    
    backup_files = query.all()
    
    if not backup_files:
        return {
            "success": True,
            "message": "没有找到需要恢复的备份文件",
            "restored_count": 0,
            "failed_count": 0
        }
    
    # 批量恢复结果
    results = {
        "success": [],
        "failed": []
    }
    
    for backup_file in backup_files:
        try:
            # 恢复单个文件
            result = restore_from_backup(
                db=db,
                backup_file_id=backup_file.id,
                user_id=user_id,
                overwrite_existing=overwrite_existing
            )
            
            results["success"].append({
                "backup_file_id": backup_file.id,
                "original_file_id": backup_file.file_id,
                "new_file_id": result.get("file_id"),
                "file_name": result.get("file_name"),
                "is_new_file": result.get("is_new_file", True)
            })
            
        except Exception as e:
            logger.error(f"恢复备份文件 {backup_file.id} 错误: {str(e)}")
            results["failed"].append({
                "backup_file_id": backup_file.id,
                "original_file_id": backup_file.file_id,
                "error": str(e)
            })
    
    return {
        "success": True,
        "message": f"批量恢复完成: {len(results['success'])} 成功, {len(results['failed'])} 失败",
        "restored_count": len(results["success"]),
        "failed_count": len(results["failed"]),
        "restored_files": results["success"],
        "failed_files": results["failed"]
    } 