#!/usr/bin/env python
# -*- coding: utf-8 -*-

import os
import sys
import json
import time
import shutil
import subprocess
import threading
from datetime import datetime, timedelta
import hashlib
import logging

class BackupTask:
    """备份任务类"""
    
    def __init__(self, task_id=None, name="", source_path="", repo_url="", 
                 branch="main", scheduled=False, schedule_time="", 
                 username="", email="", exclude_patterns="",
                 incremental=True, resume_transfer=True, large_file_chunk=True,
                 created_at=None, last_backup=None):
        self.id = task_id or str(int(time.time()))
        self.name = name
        self.source_path = source_path
        self.repo_url = repo_url
        self.branch = branch
        self.scheduled = scheduled
        self.schedule_time = schedule_time
        self.username = username
        self.email = email
        self.exclude_patterns = exclude_patterns
        self.incremental = incremental
        self.resume_transfer = resume_transfer
        self.large_file_chunk = large_file_chunk
        self.created_at = created_at or datetime.now().isoformat()
        self.last_backup = last_backup
        self.status = "idle"  # idle, running, success, failed
        self.progress = 0
        self.message = ""

class BackupManager:
    """备份管理器"""
    
    def __init__(self):
        self.tasks = []
        self.running_tasks = {}
        self.tasks_file = os.path.join(os.path.dirname(os.path.dirname(__file__)), "config", "tasks.json")
        self.temp_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), "temp")
        self.logs_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), "logs")
        
        # 确保目录存在
        os.makedirs(self.temp_dir, exist_ok=True)
        os.makedirs(self.logs_dir, exist_ok=True)
        
        # 配置日志
        self.setup_logging()
    
    def setup_logging(self):
        """设置日志"""
        log_file = os.path.join(self.logs_dir, "backup_manager.log")
        
        logging.basicConfig(
            level=logging.INFO,
            format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
            handlers=[
                logging.FileHandler(log_file, encoding='utf-8'),
                logging.StreamHandler()
            ]
        )
        
        self.logger = logging.getLogger("BackupManager")
    
    def load_tasks(self):
        """加载任务列表"""
        try:
            if os.path.exists(self.tasks_file):
                with open(self.tasks_file, 'r', encoding='utf-8') as f:
                    tasks_data = json.load(f)
                    self.tasks = []
                    for task_data in tasks_data:
                        task = BackupTask()
                        task.__dict__.update(task_data)
                        self.tasks.append(task)
            
            self.logger.info(f"加载了 {len(self.tasks)} 个备份任务")
            return True
        except Exception as e:
            self.logger.error(f"加载任务列表失败: {e}")
            return False
    
    def save_tasks(self):
        """保存任务列表"""
        try:
            # 确保配置目录存在
            os.makedirs(os.path.dirname(self.tasks_file), exist_ok=True)
            
            with open(self.tasks_file, 'w', encoding='utf-8') as f:
                json.dump([task.__dict__ for task in self.tasks], f, 
                         indent=4, ensure_ascii=False)
            
            self.logger.info("任务列表保存成功")
            return True
        except Exception as e:
            self.logger.error(f"保存任务列表失败: {e}")
            return False
    
    def add_task(self, name, source_path, repo_url, **kwargs):
        """添加新任务"""
        # 验证参数
        if not name or not source_path or not repo_url:
            return False, "任务名称、源目录和仓库URL不能为空"
        
        if not os.path.exists(source_path):
            return False, "源目录不存在"
        
        # 检查任务名称是否已存在
        for task in self.tasks:
            if task.name == name:
                return False, "任务名称已存在"
        
        # 创建新任务
        task = BackupTask(
            name=name,
            source_path=source_path,
            repo_url=repo_url,
            branch=kwargs.get('branch', 'main'),
            scheduled=kwargs.get('scheduled', False),
            schedule_time=kwargs.get('schedule_time', ''),
            username=kwargs.get('username', ''),
            email=kwargs.get('email', ''),
            exclude_patterns=kwargs.get('exclude_patterns', ''),
            incremental=kwargs.get('incremental', True),
            resume_transfer=kwargs.get('resume_transfer', True),
            large_file_chunk=kwargs.get('large_file_chunk', True)
        )
        
        self.tasks.append(task)
        self.save_tasks()
        
        self.logger.info(f"添加新任务: {name}")
        return True, "任务添加成功"
    
    def delete_task(self, task_id):
        """删除任务"""
        for i, task in enumerate(self.tasks):
            if task.id == task_id:
                # 如果任务正在运行，停止它
                if task_id in self.running_tasks:
                    self.stop_task(task_id)
                
                # 删除任务
                task_name = self.tasks[i].name
                del self.tasks[i]
                self.save_tasks()
                
                self.logger.info(f"删除任务: {task_name}")
                return True, "任务删除成功"
        
        return False, "任务不存在"
    
    def update_task(self, task_id, **kwargs):
        """更新任务"""
        for task in self.tasks:
            if task.id == task_id:
                # 更新任务属性
                for key, value in kwargs.items():
                    if hasattr(task, key):
                        setattr(task, key, value)
                
                self.save_tasks()
                return True, "任务更新成功"
        
        return False, "任务不存在"
    
    def get_task(self, task_id):
        """获取任务"""
        for task in self.tasks:
            if task.id == task_id:
                return task
        return None
    
    def run_task(self, task_id, progress_callback=None):
        """运行备份任务"""
        task = self.get_task(task_id)
        if not task:
            return False, "任务不存在"
        
        if task.status == "running":
            return False, "任务正在运行中"
        
        # 在后台线程中运行备份
        backup_thread = threading.Thread(
            target=self._run_backup_task,
            args=(task, progress_callback),
            daemon=True
        )
        backup_thread.start()
        
        return True, "任务已开始执行"
    
    def stop_task(self, task_id):
        """停止备份任务"""
        if task_id not in self.running_tasks:
            return False, "任务未在运行"
        
        task = self.running_tasks[task_id]['task']
        process = self.running_tasks[task_id]['process']
        
        # 终止进程
        try:
            process.terminate()
            process.wait(timeout=5)
        except:
            try:
                process.kill()
                process.wait(timeout=5)
            except:
                pass
        
        # 更新任务状态
        task.status = "idle"
        task.message = "任务已停止"
        
        # 从运行列表中移除
        del self.running_tasks[task_id]
        
        self.logger.info(f"停止任务: {task.name}")
        return True, "任务已停止"
    
    def _run_backup_task(self, task, progress_callback=None):
        """在后台线程中运行备份任务"""
        task.status = "running"
        task.message = "准备开始备份..."
        task.progress = 0
        
        if progress_callback:
            progress_callback(task)
        
        # 记录开始时间
        start_time = datetime.now()
        
        # 创建临时目录 - 使用时间戳确保每次备份都使用新的临时目录
        temp_task_dir = os.path.join(self.temp_dir, f"{task.id}_{int(time.time())}")
        os.makedirs(temp_task_dir, exist_ok=True)
        
        try:
            # 克隆或初始化仓库
            repo_path = os.path.join(temp_task_dir, "repo")
            
            # 每次备份都使用新的临时目录，避免权限问题
            if os.path.exists(repo_path):
                def handle_remove_readonly(func, path, exc):
                    import stat
                    if not os.access(path, os.W_OK):
                        os.chmod(path, stat.S_IWUSR)
                        func(path)
                
                shutil.rmtree(repo_path, onerror=handle_remove_readonly)
            
            # 尝试克隆现有仓库
            clone_success = False
            if task.resume_transfer:
                try:
                    self._clone_repo(task.repo_url, repo_path, task)
                    clone_success = True
                except Exception as e:
                    self.logger.warning(f"克隆仓库失败: {e}")
                    task.message = f"克隆仓库失败: {e}"
            
            # 如果克隆失败，初始化新仓库
            if not clone_success:
                os.makedirs(repo_path, exist_ok=True)
                self._init_repo(repo_path, task)
            
            # 配置Git用户信息
            self._configure_git(repo_path, task)
            
            # 复制文件到仓库目录
            task.message = "正在复制文件..."
            task.progress = 10
            
            if progress_callback:
                progress_callback(task)
            
            self._copy_files(task.source_path, repo_path, task)
            
            # 提交更改
            task.message = "正在提交更改..."
            task.progress = 80
            
            if progress_callback:
                progress_callback(task)
            
            commit_message = f"备份于 {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
            self._commit_changes(repo_path, task, commit_message)
            
            # 推送到远程仓库
            task.message = "正在推送到远程仓库..."
            task.progress = 90
            
            if progress_callback:
                progress_callback(task)
            
            self._push_to_remote(repo_path, task)
            
            # 更新任务状态
            task.status = "success"
            task.message = "备份完成"
            task.progress = 100
            task.last_backup = datetime.now().isoformat()
            
            # 记录日志
            end_time = datetime.now()
            duration = (end_time - start_time).total_seconds()
            
            log_file = os.path.join(self.logs_dir, f"{task.id}.log")
            with open(log_file, 'w', encoding='utf-8') as f:
                f.write(f"备份任务: {task.name}\n")
                f.write(f"开始时间: {start_time.strftime('%Y-%m-%d %H:%M:%S')}\n")
                f.write(f"结束时间: {end_time.strftime('%Y-%m-%d %H:%M:%S')}\n")
                f.write(f"耗时: {duration:.2f} 秒\n")
                f.write(f"状态: 成功\n")
                f.write(f"源目录: {task.source_path}\n")
                f.write(f"目标仓库: {task.repo_url}\n")
                f.write(f"分支: {task.branch}\n")
            
            self.logger.info(f"备份任务 {task.name} 完成，耗时 {duration:.2f} 秒")
            
        except Exception as e:
            task.status = "failed"
            task.message = f"备份失败: {e}"
            task.progress = 0
            
            # 记录错误日志
            log_file = os.path.join(self.logs_dir, f"{task.id}.log")
            with open(log_file, 'w', encoding='utf-8') as f:
                f.write(f"备份任务: {task.name}\n")
                f.write(f"开始时间: {start_time.strftime('%Y-%m-%d %H:%M:%S')}\n")
                f.write(f"结束时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
                f.write(f"状态: 失败\n")
                f.write(f"错误信息: {e}\n")
                f.write(f"源目录: {task.source_path}\n")
                f.write(f"目标仓库: {task.repo_url}\n")
            
            self.logger.error(f"备份任务 {task.name} 失败: {e}")
        
        finally:
            # 清理临时目录
            try:
                if os.path.exists(temp_task_dir):
                    # 在Windows系统上，可能需要特殊处理只读文件
                    def handle_remove_readonly(func, path, exc):
                        import stat
                        if not os.access(path, os.W_OK):
                            os.chmod(path, stat.S_IWUSR)
                            func(path)
                    
                    shutil.rmtree(temp_task_dir, onerror=handle_remove_readonly)
            except Exception as e:
                self.logger.error(f"清理临时目录失败: {e}")
            
            # 从运行列表中移除
            if task.id in self.running_tasks:
                del self.running_tasks[task.id]
            
            if progress_callback:
                progress_callback(task)
    
    def _clone_repo(self, repo_url, repo_path, task):
        """克隆现有仓库"""
        process = subprocess.Popen(
            ["git", "clone", repo_url, repo_path],
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            text=True,
            encoding='utf-8'
        )
        
        # 保存进程引用以便停止任务
        self.running_tasks[task.id] = {
            'task': task,
            'process': process
        }
        
        output, _ = process.communicate()
        
        if process.returncode != 0:
            raise Exception(f"克隆仓库失败: {output}")
    
    def _init_repo(self, repo_path, task):
        """初始化新仓库"""
        process = subprocess.Popen(
            ["git", "init"],
            cwd=repo_path,
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            text=True,
            encoding='utf-8'
        )
        
        # 保存进程引用以便停止任务
        self.running_tasks[task.id] = {
            'task': task,
            'process': process
        }
        
        output, _ = process.communicate()
        
        if process.returncode != 0:
            raise Exception(f"初始化仓库失败: {output}")
        
        # 添加远程仓库
        process = subprocess.Popen(
            ["git", "remote", "add", "origin", task.repo_url],
            cwd=repo_path,
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            text=True,
            encoding='utf-8'
        )
        
        output, _ = process.communicate()
        
        if process.returncode != 0:
            # 可能远程仓库已存在，忽略错误
            pass
    
    def _configure_git(self, repo_path, task):
        """配置Git用户信息"""
        if task.username:
            process = subprocess.Popen(
                ["git", "config", "user.name", task.username],
                cwd=repo_path,
                stdout=subprocess.PIPE,
                stderr=subprocess.STDOUT,
                text=True,
                encoding='utf-8'
            )
            process.communicate()
        
        if task.email:
            process = subprocess.Popen(
                ["git", "config", "user.email", task.email],
                cwd=repo_path,
                stdout=subprocess.PIPE,
                stderr=subprocess.STDOUT,
                text=True,
                encoding='utf-8'
            )
            process.communicate()
    
    def _copy_files(self, source_path, repo_path, task):
        """复制文件到仓库目录"""
        # 准备排除模式
        exclude_patterns = []
        if task.exclude_patterns:
            exclude_patterns = [pattern.strip() for pattern in task.exclude_patterns.split('\n') if pattern.strip()]
        
        # 添加默认排除模式
        exclude_patterns.extend([
            '.git', '.svn', '.idea', '.vscode', '.DS_Store',
            '__pycache__', 'node_modules', 'vendor', 'cache', 'tmp',
            'logs', 'uploads', 'backup', 'temp', 'runtime', 'storage',
            '*.zip', '*.rar', '*.7z', '*.tar.*', '*.gz', '*.bz2',
            '*.log', '*.tmp', '*.sql', '*.bak', '*.cache',
            '*.swp', '*.pid', '*.lock', '*.sock', '*.session'
        ])
        
        # 复制文件
        copied_count = 0
        total_files = 0
        
        # 先计算总文件数
        for root, dirs, files in os.walk(source_path):
            # 检查是否需要排除当前目录
            dirs[:] = [d for d in dirs if not self._should_exclude(d, exclude_patterns, is_dir=True)]
            
            # 过滤文件
            valid_files = [f for f in files if not self._should_exclude(f, exclude_patterns)]
            total_files += len(valid_files)
        
        if total_files == 0:
            return  # 没有文件需要复制
        
        # 复制文件
        for root, dirs, files in os.walk(source_path):
            # 检查是否需要排除当前目录
            dirs[:] = [d for d in dirs if not self._should_exclude(d, exclude_patterns, is_dir=True)]
            
            for file in files:
                if self._should_exclude(file, exclude_patterns):
                    continue
                
                source_file = os.path.join(root, file)
                
                # 计算相对路径
                rel_path = os.path.relpath(source_file, source_path)
                dest_file = os.path.join(repo_path, rel_path)
                
                # 确保目标目录存在
                dest_dir = os.path.dirname(dest_file)
                os.makedirs(dest_dir, exist_ok=True)
                
                # 如果是增量备份，检查文件是否已更改
                if task.incremental and os.path.exists(dest_file):
                    try:
                        source_hash = self._get_file_hash(source_file)
                        dest_hash = self._get_file_hash(dest_file)
                        
                        if source_hash == dest_hash:
                            continue  # 文件未更改，跳过
                    except:
                        pass  # 如果计算哈希失败，复制文件
                
                # 复制文件
                shutil.copy2(source_file, dest_file)
                copied_count += 1
                
                # 更新进度
                progress = 10 + int((copied_count / total_files) * 70)
                task.progress = progress
                task.message = f"正在复制文件 ({copied_count}/{total_files})"
                
                # 这里无法直接调用进度回调，因为在线程中
                # 进度更新将在主线程中通过定期检查完成
    
    def _should_exclude(self, name, exclude_patterns, is_dir=False):
        """检查文件或目录是否应该被排除"""
        for pattern in exclude_patterns:
            if not pattern:
                continue
            
            # 处理通配符
            if '*' in pattern or '?' in pattern:
                import fnmatch
                if fnmatch.fnmatch(name, pattern):
                    return True
            else:
                # 简单字符串匹配
                if pattern == name or (is_dir and name == pattern):
                    return True
        
        return False
    
    def _get_file_hash(self, file_path):
        """获取文件哈希值"""
        hash_md5 = hashlib.md5()
        with open(file_path, "rb") as f:
            for chunk in iter(lambda: f.read(4096), b""):
                hash_md5.update(chunk)
        return hash_md5.hexdigest()
    
    def _commit_changes(self, repo_path, task, message):
        """提交更改"""
        # 添加所有文件
        process = subprocess.Popen(
            ["git", "add", "."],
            cwd=repo_path,
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            text=True,
            encoding='utf-8'
        )
        
        output, _ = process.communicate()
        
        if process.returncode != 0:
            raise Exception(f"添加文件到暂存区失败: {output}")
        
        # 检查是否有更改需要提交
        process = subprocess.Popen(
            ["git", "status", "--porcelain"],
            cwd=repo_path,
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            text=True,
            encoding='utf-8'
        )
        
        output, _ = process.communicate()
        
        if not output.strip():
            return  # 没有更改需要提交
        
        # 提交更改
        process = subprocess.Popen(
            ["git", "commit", "-m", message],
            cwd=repo_path,
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            text=True,
            encoding='utf-8'
        )
        
        output, _ = process.communicate()
        
        if process.returncode != 0:
            raise Exception(f"提交更改失败: {output}")
    
    def _push_to_remote(self, repo_path, task):
        """推送到远程仓库"""
        # 检查是否存在远程分支
        process = subprocess.Popen(
            ["git", "ls-remote", "--exit-code", "origin", task.branch],
            cwd=repo_path,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            text=True,
            encoding='utf-8'
        )
        
        output, error = process.communicate()
        
        if process.returncode == 0:
            # 分支存在，使用 git push
            process = subprocess.Popen(
                ["git", "push", "origin", task.branch],
                cwd=repo_path,
                stdout=subprocess.PIPE,
                stderr=subprocess.STDOUT,
                text=True,
                encoding='utf-8'
            )
        else:
            # 分支不存在，使用 git push -u origin branch
            process = subprocess.Popen(
                ["git", "push", "-u", "origin", task.branch],
                cwd=repo_path,
                stdout=subprocess.PIPE,
                stderr=subprocess.STDOUT,
                text=True,
                encoding='utf-8'
            )
        
        output, _ = process.communicate()
        
        if process.returncode != 0:
            raise Exception(f"推送到远程仓库失败: {output}")
    
    def check_scheduled_tasks(self):
        """检查需要执行的计划任务"""
        current_time = datetime.now().strftime("%H:%M")
        current_date = datetime.now().strftime("%Y-%m-%d")
        
        for task in self.tasks:
            if not task.scheduled or task.status == "running":
                continue
            
            if not task.schedule_time:
                continue
            
            # 检查时间是否匹配
            if task.schedule_time != current_time:
                continue
            
            # 检查今天是否已经执行过
            if task.last_backup:
                last_backup_date = task.last_backup.split(" ")[0]
                if last_backup_date == current_date:
                    continue  # 今天已经执行过
            
            # 执行任务
            self.logger.info(f"开始执行计划任务: {task.name}")
            self.run_task(task.id)
    
    def get_task_logs(self, task_id):
        """获取任务日志"""
        log_file = os.path.join(self.logs_dir, f"{task_id}.log")
        
        if not os.path.exists(log_file):
            return "日志文件不存在"
        
        try:
            with open(log_file, 'r', encoding='utf-8') as f:
                return f.read()
        except Exception as e:
            return f"读取日志文件失败: {e}"