# scripts/backup.py - 数据备份脚本
#!/usr/bin/env python3
"""
数据备份脚本
"""

import os
import shutil
import json
import zipfile
from datetime import datetime
from pathlib import Path
import argparse

def create_backup(backup_name=None, include_logs=True, include_config=True, include_data=True):
    """创建备份"""
    
    if backup_name is None:
        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        backup_name = f"backup_{timestamp}"
    
    backup_dir = Path(f"backups/{backup_name}")
    backup_dir.mkdir(parents=True, exist_ok=True)
    
    print(f"🔄 开始创建备份: {backup_name}")
    
    backup_info = {
        "backup_name": backup_name,
        "created_at": datetime.now().isoformat(),
        "included_items": []
    }
    
    # 备份配置文件
    if include_config and Path("config").exists():
        shutil.copytree("config", backup_dir / "config")
        backup_info["included_items"].append("config")
        print("✅ 配置文件已备份")
    
    # 备份数据文件
    if include_data and Path("data").exists():
        shutil.copytree("data", backup_dir / "data") 
        backup_info["included_items"].append("data")
        print("✅ 数据文件已备份")
    
    # 备份日志文件
    if include_logs and Path("logs").exists():
        shutil.copytree("logs", backup_dir / "logs")
        backup_info["included_items"].append("logs")
        print("✅ 日志文件已备份")
    
    # 备份应用程序文件
    app_files = ["main.py", "requirements.txt", "README.md"]
    app_backup_dir = backup_dir / "app"
    app_backup_dir.mkdir(exist_ok=True)
    
    for file_name in app_files:
        if Path(file_name).exists():
            shutil.copy2(file_name, app_backup_dir / file_name)
    
    # 备份pages目录
    if Path("pages").exists():
        shutil.copytree("pages", app_backup_dir / "pages")
    
    # 备份utils目录  
    if Path("utils").exists():
        shutil.copytree("utils", app_backup_dir / "utils")
    
    backup_info["included_items"].append("app")
    print("✅ 应用程序文件已备份")
    
    # 保存备份信息
    with open(backup_dir / "backup_info.json", 'w', encoding='utf-8') as f:
        json.dump(backup_info, f, ensure_ascii=False, indent=2)
    
    # 计算备份大小
    backup_size = get_directory_size(backup_dir)
    
    print(f"✅ 备份完成!")
    print(f"📁 备份位置: {backup_dir}")
    print(f"📊 备份大小: {format_size(backup_size)}")
    
    return backup_dir

def compress_backup(backup_dir, delete_original=True):
    """压缩备份"""
    backup_path = Path(backup_dir)
    zip_path = backup_path.with_suffix('.zip')
    
    print(f"🗜️ 正在压缩备份: {backup_path.name}")
    
    with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
        for file_path in backup_path.rglob('*'):
            if file_path.is_file():
                arcname = file_path.relative_to(backup_path.parent)
                zipf.write(file_path, arcname)
    
    if delete_original:
        shutil.rmtree(backup_path)
        print(f"🗑️ 原始备份文件夹已删除")
    
    compressed_size = zip_path.stat().st_size
    print(f"✅ 压缩完成: {zip_path}")
    print(f"📊 压缩后大小: {format_size(compressed_size)}")
    
    return zip_path

def list_backups():
    """列出所有备份"""
    backups_dir = Path("backups")
    
    if not backups_dir.exists():
        print("📁 备份目录不存在")
        return []
    
    backups = []
    
    print("📋 现有备份列表:")
    print("-" * 60)
    
    for item in sorted(backups_dir.iterdir()):
        if item.is_dir():
            # 文件夹备份
            info_file = item / "backup_info.json"
            if info_file.exists():
                with open(info_file, 'r', encoding='utf-8') as f:
                    info = json.load(f)
                
                size = get_directory_size(item)
                created_at = datetime.fromisoformat(info['created_at'])
                
                print(f"📁 {item.name}")
                print(f"   📅 创建时间: {created_at.strftime('%Y-%m-%d %H:%M:%S')}")
                print(f"   📊 大小: {format_size(size)}")
                print(f"   📦 包含: {', '.join(info['included_items'])}")
                print()
                
                backups.append({
                    'name': item.name,
                    'type': 'directory',
                    'path': str(item),
                    'size': size,
                    'created_at': created_at,
                    'items': info['included_items']
                })
        
        elif item.suffix == '.zip':
            # 压缩包备份
            size = item.stat().st_size
            created_at = datetime.fromtimestamp(item.stat().st_ctime)
            
            print(f"🗜️ {item.name}")
            print(f"   📅 创建时间: {created_at.strftime('%Y-%m-%d %H:%M:%S')}")
            print(f"   📊 大小: {format_size(size)}")
            print()
            
            backups.append({
                'name': item.name,
                'type': 'zip',
                'path': str(item),
                'size': size,
                'created_at': created_at
            })
    
    if not backups:
        print("📭 没有找到备份文件")
    
    return backups

def restore_backup(backup_path, target_dir="."):
    """恢复备份"""
    backup_path = Path(backup_path)
    target_dir = Path(target_dir)
    
    if not backup_path.exists():
        print(f"❌ 备份文件不存在: {backup_path}")
        return False
    
    print(f"🔄 正在恢复备份: {backup_path.name}")
    
    if backup_path.is_dir():
        # 恢复文件夹备份
        restore_directory_backup(backup_path, target_dir)
    elif backup_path.suffix == '.zip':
        # 恢复压缩包备份
        restore_zip_backup(backup_path, target_dir)
    else:
        print(f"❌ 不支持的备份格式")
        return False
    
    print("✅ 备份恢复完成!")
    return True

def restore_directory_backup(backup_path, target_dir):
    """恢复文件夹备份"""
    for item in backup_path.iterdir():
        if item.name == "backup_info.json":
            continue
        
        target_path = target_dir / item.name
        
        if item.is_dir():
            if target_path.exists():
                print(f"⚠️ 目录已存在，将被覆盖: {target_path}")
                shutil.rmtree(target_path)
            shutil.copytree(item, target_path)
        else:
            if target_path.exists():
                print(f"⚠️ 文件已存在，将被覆盖: {target_path}")
            shutil.copy2(item, target_path)
        
        print(f"✅ 已恢复: {item.name}")

def restore_zip_backup(backup_path, target_dir):
    """恢复压缩包备份"""
    with zipfile.ZipFile(backup_path, 'r') as zipf:
        zipf.extractall(target_dir)
    
    print(f"✅ 已解压到: {target_dir}")

def cleanup_old_backups(keep_count=10):
    """清理旧备份"""
    backups = list_backups()
    
    if len(backups) <= keep_count:
        print(f"📋 备份数量 ({len(backups)}) 未超过保留数量 ({keep_count})")
        return
    
    # 按创建时间排序，保留最新的
    backups.sort(key=lambda x: x['created_at'], reverse=True)
    to_delete = backups[keep_count:]
    
    print(f"🗑️ 准备删除 {len(to_delete)} 个旧备份:")
    
    for backup in to_delete:
        backup_path = Path(backup['path'])
        if backup_path.exists():
            if backup_path.is_dir():
                shutil.rmtree(backup_path)
            else:
                backup_path.unlink()
            print(f"   ❌ 已删除: {backup['name']}")

def get_directory_size(directory):
    """获取目录大小"""
    total_size = 0
    for file_path in Path(directory).rglob('*'):
        if file_path.is_file():
            total_size += file_path.stat().st_size
    return total_size

def format_size(size_bytes):
    """格式化文件大小"""
    if size_bytes == 0:
        return "0B"
    
    size_names = ["B", "KB", "MB", "GB", "TB"]
    import math
    i = int(math.floor(math.log(size_bytes, 1024)))
    p = math.pow(1024, i)
    s = round(size_bytes / p, 2)
    return f"{s} {size_names[i]}"

def main():
    """主函数"""
    parser = argparse.ArgumentParser(description='Streamlit应用数据备份工具')
    parser.add_argument('action', choices=['create', 'list', 'restore', 'cleanup'], 
                       help='操作类型')
    parser.add_argument('--name', '-n', help='备份名称')
    parser.add_argument('--compress', '-c', action='store_true', help='压缩备份')
    parser.add_argument('--path', '-p', help='备份路径(用于恢复)')
    parser.add_argument('--keep', '-k', type=int, default=10, help='保留备份数量')
    parser.add_argument('--no-logs', action='store_true', help='不备份日志')
    parser.add_argument('--no-config', action='store_true', help='不备份配置')
    parser.add_argument('--no-data', action='store_true', help='不备份数据')
    
    args = parser.parse_args()
    
    if args.action == 'create':
        backup_dir = create_backup(
            backup_name=args.name,
            include_logs=not args.no_logs,
            include_config=not args.no_config, 
            include_data=not args.no_data
        )
        
        if args.compress:
            compress_backup(backup_dir)
    
    elif args.action == 'list':
        list_backups()
    
    elif args.action == 'restore':
        if not args.path:
            print("❌ 请指定要恢复的备份路径 (--path)")
            return
        
        restore_backup(args.path)
    
    elif args.action == 'cleanup':
        cleanup_old_backups(args.keep)

if __name__ == "__main__":
    main()