#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
项目备份管理器
提供完整项目备份和恢复功能
"""

import os
import shutil
import json
import hashlib
import tarfile
import zipfile
from datetime import datetime
from pathlib import Path
from typing import Dict, List, Optional, Tuple
from ..builtin_config_module.builtin_config import BuiltInConfig
from ..utils_module.logger import ObfuscationLogger

class ProjectBackupManager:
    """
    项目备份管理器
    处理完整项目的备份和恢复
    """

    def __init__(self, config: BuiltInConfig, logger: ObfuscationLogger):
        self.config = config
        self.logger = logger
        self.backup_info = {}

    def backup_single_file(self, file_path: str) -> Optional[str]:
        """备份单个文件"""
        if not os.path.exists(file_path):
            return None

        backup_dir = os.path.join(self.config.backup_dir, "single_files")
        os.makedirs(backup_dir, exist_ok=True)

        filename = os.path.basename(file_path)
        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        backup_filename = f"{timestamp}_{filename}"
        backup_path = os.path.join(backup_dir, backup_filename)

        shutil.copy2(file_path, backup_path)
        return backup_path

    def restore_single_file(self, backup_path: str, target_path: str) -> bool:
        """
        恢复单个文件
        :param backup_path: 备份文件路径
        :param target_path: 恢复目标路径
        :return: 是否恢复成功
        """
        try:
            if os.path.exists(backup_path):
                shutil.copy2(backup_path, target_path)
                return True
        except Exception:
            pass
        return False
        
    def create_full_backup(self, project_path: str, backup_name: Optional[str] = None) -> str:
        """
        创建完整项目备份
        备份项目目录下的所有文件，排除不必要的文件
        """
        if not os.path.exists(project_path):
            raise FileNotFoundError(f"项目路径不存在: {project_path}")
            
        # 生成备份名称
        if not backup_name:
            timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
            project_name = Path(project_path).name
            backup_name = f"{project_name}_backup_{timestamp}"
        
        # 创建备份目录
        backup_base_dir = os.path.join(self.config.backup_dir, "full_backups")
        os.makedirs(backup_base_dir, exist_ok=True)
        backup_path = os.path.join(backup_base_dir, backup_name)
        
        self.logger.log_operation("开始完整备份", f"项目: {project_path}")
        
        try:
            # 计算项目大小
            total_size = self._calculate_directory_size(project_path)
            self.logger.log_operation("项目大小", f"{self._format_size(total_size)}")
            
            # 获取要备份的文件列表（排除不需要的文件）
            files_to_backup = self._get_files_to_backup(project_path)
            
            # 创建备份信息
            backup_info = {
                'source_path': project_path,
                'backup_path': backup_path,
                'timestamp': datetime.now().isoformat(),
                'file_count': len(files_to_backup),
                'total_size': total_size,
                'project_structure': self._analyze_project_structure(project_path)
            }
            
            # 执行备份
            if self.config.dry_run:
                self.logger.log_operation("模拟备份", f"将备份 {len(files_to_backup)} 个文件")
            else:
                # 使用tar.gz格式压缩备份
                archive_path = f"{backup_path}.tar.gz"
                with tarfile.open(archive_path, 'w:gz') as tar:
                    for file_path in files_to_backup:
                        rel_path = os.path.relpath(file_path, project_path)
                        tar.add(file_path, arcname=rel_path)
                        
                # 保存备份信息
                info_file = f"{backup_path}_info.json"
                with open(info_file, 'w', encoding='utf-8') as f:
                    json.dump(backup_info, f, indent=2, ensure_ascii=False)
                
                self.logger.log_operation("备份完成", f"备份文件: {archive_path}")
                return archive_path
                
        except Exception as e:
            self.logger.log_error(f"创建备份失败: {str(e)}")
            raise
            
        return backup_path
    
    def restore_from_backup(self, backup_path: str, restore_path: Optional[str] = None) -> str:
        """
        从备份恢复项目
        :param backup_path: 备份文件路径
        :param restore_path: 恢复目标路径，若未指定则恢复到原路径
        :return: 恢复后的项目路径
        """
        if not backup_path.endswith('.tar.gz'):
            backup_path = f"{backup_path}.tar.gz"
            
        if not os.path.exists(backup_path):
            raise FileNotFoundError(f"备份文件不存在: {backup_path}")
        
        # 读取备份信息
        info_file = backup_path.replace('.tar.gz', '_info.json')
        if os.path.exists(info_file):
            with open(info_file, 'r', encoding='utf-8') as f:
                backup_info = json.load(f)
        else:
            backup_info = {}
        
        # 确定恢复路径
        if not restore_path:
            restore_path = backup_info.get('source_path', '')
            if not restore_path:
                raise ValueError("无法确定恢复路径，请指定restore_path参数")
        
        self.logger.log_operation("开始恢复", f"从备份: {backup_path}")
        
        try:
            if self.config.dry_run:
                self.logger.log_operation("模拟恢复", f"将恢复到: {restore_path}")
            else:
                # 如果恢复路径存在，先备份当前版本
                if os.path.exists(restore_path):
                    temp_backup = f"{restore_path}_temp_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
                    shutil.move(restore_path, temp_backup)
                    self.logger.log_operation("临时备份", f"当前版本已移动到: {temp_backup}")
                
                # 创建恢复目录
                os.makedirs(restore_path, exist_ok=True)
                
                # 解压备份文件
                with tarfile.open(backup_path, 'r:gz') as tar:
                    tar.extractall(restore_path)
                
                self.logger.log_operation("恢复完成", f"项目已恢复到: {restore_path}")
                return restore_path
                
        except Exception as e:
            self.logger.log_error(f"恢复失败: {str(e)}")
            raise
    
    def list_backups(self) -> List[Dict]:
        """
        列出所有备份
        :return: 备份列表
        """
        backup_base_dir = os.path.join(self.config.backup_dir, "full_backups")
        if not os.path.exists(backup_base_dir):
            return []
        
        backups = []
        for file in os.listdir(backup_base_dir):
            if file.endswith('.tar.gz'):
                backup_path = os.path.join(backup_base_dir, file)
                info_file = backup_path.replace('.tar.gz', '_info.json')
                
                backup_data = {
                    'filename': file,
                    'path': backup_path,
                    'size': os.path.getsize(backup_path),
                    'modified': datetime.fromtimestamp(os.path.getmtime(backup_path)).isoformat()
                }
                
                # 读取备份信息
                if os.path.exists(info_file):
                    with open(info_file, 'r', encoding='utf-8') as f:
                        backup_data['info'] = json.load(f)
                
                backups.append(backup_data)
        
        return sorted(backups, key=lambda x: x['modified'], reverse=True)
    
    def clean_old_backups(self, keep_count: int = 5):
        """
        清理旧备份，保留最近的N个
        :param keep_count: 保留的备份数量
        """
        backups = self.list_backups()
        
        if len(backups) <= keep_count:
            self.logger.log_operation("备份清理", f"当前备份数量: {len(backups)}，无需清理")
            return
        
        # 删除旧备份
        backups_to_delete = backups[keep_count:]
        for backup in backups_to_delete:
            try:
                os.remove(backup['path'])
                info_file = backup['path'].replace('.tar.gz', '_info.json')
                if os.path.exists(info_file):
                    os.remove(info_file)
                self.logger.log_operation("删除旧备份", backup['filename'])
            except Exception as e:
                self.logger.log_error(f"删除备份失败: {backup['filename']}, {str(e)}")
    
    def create_incremental_backup(self, project_path: str, base_backup_path: str) -> str:
        """
        创建增量备份（只备份修改过的文件）
        :param project_path: 项目路径
        :param base_backup_path: 基础备份路径
        :return: 增量备份路径
        """
        if not os.path.exists(base_backup_path):
            raise FileNotFoundError(f"基础备份不存在: {base_backup_path}")
        
        # 读取基础备份信息
        info_file = base_backup_path.replace('.tar.gz', '_info.json')
        with open(info_file, 'r', encoding='utf-8') as f:
            base_info = json.load(f)
        
        # 获取修改过的文件
        modified_files = self._get_modified_files(project_path, base_info['timestamp'])
        
        if not modified_files:
            self.logger.log_operation("增量备份", "没有文件被修改，跳过备份")
            return base_backup_path
        
        # 创建增量备份
        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        incremental_name = f"incremental_{timestamp}.tar.gz"
        backup_base_dir = os.path.join(self.config.backup_dir, "full_backups")
        incremental_path = os.path.join(backup_base_dir, incremental_name)
        
        with tarfile.open(incremental_path, 'w:gz') as tar:
            for file_path in modified_files:
                rel_path = os.path.relpath(file_path, project_path)
                tar.add(file_path, arcname=rel_path)
        
        # 保存增量备份信息
        incremental_info = {
            'type': 'incremental',
            'base_backup': base_backup_path,
            'timestamp': datetime.now().isoformat(),
            'modified_files': len(modified_files),
            'files': [os.path.relpath(f, project_path) for f in modified_files]
        }
        
        info_file = incremental_path.replace('.tar.gz', '_info.json')
        with open(info_file, 'w', encoding='utf-8') as f:
            json.dump(incremental_info, f, indent=2, ensure_ascii=False)
        
        self.logger.log_operation("增量备份完成", f"备份了 {len(modified_files)} 个修改的文件")
        return incremental_path
    
    def _get_files_to_backup(self, project_path: str) -> List[str]:
        """
        获取需要备份的文件列表
        排除不必要的文件和目录
        """
        files_to_backup = []
        exclude_patterns = [
            '.git', '.svn', '.hg',
            'build', 'DerivedData', 'Pods',
            'node_modules', '.gradle', '.idea',
            '*.pyc', '__pycache__', '.DS_Store',
            'xcuserdata', '*.xcworkspace/xcuserdata',
            '*.xcodeproj/xcuserdata'
        ]
        
        for root, dirs, files in os.walk(project_path):
            # 过滤目录
            dirs[:] = [d for d in dirs if not any(pattern in d for pattern in exclude_patterns)]
            
            for file in files:
                # 过滤文件
                if not any(pattern in file for pattern in exclude_patterns):
                    file_path = os.path.join(root, file)
                    files_to_backup.append(file_path)
        
        return files_to_backup
    
    def _get_modified_files(self, project_path: str, base_timestamp: str) -> List[str]:
        """
        获取在基础备份后修改过的文件
        :param project_path: 项目路径
        :param base_timestamp: 基础备份的时间戳
        :return: 修改过的文件列表
        """
        base_time = datetime.fromisoformat(base_timestamp).timestamp()
        modified_files = []
        
        for root, dirs, files in os.walk(project_path):
            for file in files:
                file_path = os.path.join(root, file)
                if os.path.getmtime(file_path) > base_time:
                    modified_files.append(file_path)
        
        return modified_files
    
    def _calculate_directory_size(self, path: str) -> int:
        """
        计算目录大小
        :param path: 目录路径
        :return: 目录大小（字节）
        """
        total_size = 0
        for dirpath, dirnames, filenames in os.walk(path):
            for filename in filenames:
                file_path = os.path.join(dirpath, filename)
                if os.path.exists(file_path):
                    total_size += os.path.getsize(file_path)
        return total_size
    
    def _format_size(self, size: int) -> str:
        """
        格式化文件大小
        :param size: 文件大小（字节）
        :return: 格式化后的字符串
        """
        for unit in ['B', 'KB', 'MB', 'GB']:
            if size < 1024.0:
                return f"{size:.2f} {unit}"
            size /= 1024.0
        return f"{size:.2f} TB"
    
    def _analyze_project_structure(self, project_path: str) -> Dict:
        """
        分析项目结构
        :param project_path: 项目路径
        :return: 项目结构信息
        """
        structure = {
            'type': 'unknown',
            'main_files': [],
            'config_files': []
        }
        
        # iOS项目特征
        for file in os.listdir(project_path):
            if file.endswith('.xcworkspace'):
                structure['type'] = 'ios'
                structure['workspace'] = file
            elif file.endswith('.xcodeproj'):
                if structure['type'] != 'ios':
                    structure['type'] = 'ios'
                structure['project'] = file
            elif file == 'Podfile':
                structure['config_files'].append('Podfile')
            elif file == 'Cartfile':
                structure['config_files'].append('Cartfile')
            # Android项目特征
            elif file == 'build.gradle' or file == 'build.gradle.kts':
                structure['type'] = 'android'
                structure['config_files'].append(file)
            elif file == 'settings.gradle' or file == 'settings.gradle.kts':
                structure['config_files'].append(file)
            elif file == 'gradle.properties':
                structure['config_files'].append(file)
        
        return structure