# autosync.py
import os
import shutil
import time
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import filecmp
from concurrent.futures import ThreadPoolExecutor
import threading


class SyncHandler(FileSystemEventHandler):

    def __init__(self, source_folder, destination_folder):
        self.source_folder = source_folder
        self.destination_folder = destination_folder
        self.executor = ThreadPoolExecutor(max_workers=4)  # 创建线程池
        # 添加去抖动机制，记录文件最后修改时间
        self.last_processed = {}
        self.debounce_time = 1.0  # 1秒去抖动时间

    # def on_modified(self, event):
    #     if not event.is_directory:
    #         current_time = time.time()
    #         # 使用文件路径作为键值
    #         file_path = event.src_path

    #         # 检查是否需要去抖动处理
    #         last_time = self.last_processed.get(file_path, 0)
    #         if current_time - last_time > self.debounce_time:
    #             self.last_processed[file_path] = current_time
    #             print(f"已修改文件: {event.src_path}")
    #             self.executor.submit(self.sync_file, event.src_path)
    # else: 忽略此次事件，因为距离上次处理时间太短

    def on_created(self, event):
        if not event.is_directory:
            print(f"已创建文件: {event.src_path}")
            self.executor.submit(self.sync_file, event.src_path)

    def sync_file(self, file_path):
        try:
            relative_path = os.path.relpath(file_path, self.source_folder)
            destination_path = os.path.join(self.destination_folder,
                                            relative_path)

            # 检查目标文件是否存在且相同
            if os.path.exists(destination_path) and filecmp.cmp(
                    file_path, destination_path, shallow=False):
                print(f"文件相同，跳过同步: {relative_path}")
                return

            # 创建目标目录（如果不存在）
            os.makedirs(os.path.dirname(destination_path), exist_ok=True)

            # 复制文件
            shutil.copy2(file_path, destination_path)
            print(f"已同步文件: {relative_path}")
        except Exception as e:
            print(f"同步文件失败 {file_path}: {e}")

    def close(self):
        self.executor.shutdown(wait=True)  # 关闭线程池
        # 清理去抖动记录
        self.last_processed.clear()


def sync_file_worker(args):
    """
    工作函数，用于线程池处理单个文件的同步
    """
    source_file, destination_file, relative_path, progress_lock, synced_files, total_files = args
    try:
        # 检查目标文件是否存在且相同
        if os.path.exists(destination_file) and filecmp.cmp(
                source_file, destination_file, shallow=False):
            # 文件相同，跳过
            pass
        else:
            # 文件不同或不存在，需要同步
            shutil.copy2(source_file, destination_file)

        # 更新进度
        with progress_lock:
            synced_files[0] += 1
            progress = (synced_files[0] / total_files) * 100
            print(f"\r同步进度: {progress:.1f}% ({synced_files[0]}/{total_files})",
                  end='',
                  flush=True)
    except Exception as e:
        print(f"\n同步文件失败 {relative_path}: {e}")


def sync_folder(source, destination):
    """
    同步整个文件夹内容（使用多线程）
    """
    try:
        # 收集所有需要同步的文件
        files_to_sync = []
        for root, dirs, files in os.walk(source):
            for file in files:
                source_file = os.path.join(root, file)
                relative_path = os.path.relpath(source_file, source)
                destination_file = os.path.join(destination, relative_path)
                files_to_sync.append(
                    (source_file, destination_file, relative_path))

        total_files = len(files_to_sync)
        if total_files == 0:
            print("没有文件需要同步")
            return

        synced_files = [0]  # 使用列表以便在内部函数中修改
        progress_lock = threading.Lock()  # 进度更新锁

        # 创建目标目录结构
        for root, dirs, files in os.walk(source):
            relative_path = os.path.relpath(root, source)
            destination_dir = os.path.join(
                destination,
                relative_path) if relative_path != '.' else destination
            if not os.path.exists(destination_dir):
                os.makedirs(destination_dir)

        # 使用线程池并行同步文件
        with ThreadPoolExecutor(max_workers=4) as executor:
            # 准备参数
            args_list = [(src, dst, rel, progress_lock, synced_files,
                          total_files) for src, dst, rel in files_to_sync]
            # 提交所有任务
            futures = [
                executor.submit(sync_file_worker, args) for args in args_list
            ]
            # 等待所有任务完成
            for future in futures:
                future.result()

        print("\n完整同步完成")
    except Exception as e:
        print(f"\n同步失败: {e}")


def main():
    # 配置源文件夹和目标文件夹
    # 修改: 交换源和目标路径，现在是从网络路径同步到本地
    source_folder = r"\\192.168.11.123\yolo-universal-training\my_dataset"  # 网络路径作为源文件夹
    destination_folder = r"D:\my_dataset_from_server"  # 本地路径作为目标文件夹

    # 首次进行完整同步
    # print("开始初始同步...")
    # sync_folder(source_folder, destination_folder)

    # 设置文件监控
    event_handler = SyncHandler(source_folder, destination_folder)
    observer = Observer()
    observer.schedule(event_handler, source_folder, recursive=True)

    # 启动监控
    observer.start()
    print(f"开始监控 {source_folder} 并同步到 {destination_folder}")

    try:
        while True:
            time.sleep(1)
    except KeyboardInterrupt:
        observer.stop()
        print("同步服务已停止")
        event_handler.close()  # 关闭线程池

    observer.join()


if __name__ == "__main__":
    main()
