import os
import pathlib
import threading
import concurrent.futures
from typing import List, Dict, Any, Optional, Callable
import logging

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    handlers=[
        logging.FileHandler('logs/parallel_scanner.log'),
        logging.StreamHandler()
    ]
)
logger = logging.getLogger('parallel_scanner')

from scanner.scanner import Scanner


class ParallelScanner(Scanner):
    def __init__(self,
                 progress_callback: Optional[Callable[[int, int], None]] = None,
                 file_callback: Optional[Callable[[Dict[str, Any]], None]] = None,
                 max_workers: int = None):
        """
        初始化并行扫描器
        :param progress_callback: 进度回调函数，接收已扫描文件数和总文件数
        :param file_callback: 文件信息回调函数，接收文件信息字典
        :param max_workers: 最大工作线程数，默认为CPU核心数
        """
        super().__init__(progress_callback=progress_callback, file_callback=file_callback)
        self.max_workers = max_workers or os.cpu_count()
        self._lock = threading.Lock()  # 用于线程安全操作
        self._executor = None  # 线程池执行器

    def _safe_increment_scanned_files(self) -> None:
        """线程安全地增加已扫描文件计数"""
        with self._lock:
            self._scanned_files += 1
            if self.progress_callback:
                self.progress_callback(self._scanned_files, self._total_files)

    def _safe_add_file(self, file_info: Dict[str, Any]) -> None:
        """线程安全地添加文件信息并更新进度"""
        with self._lock:
            if self.file_callback:
                self.file_callback(file_info)
            self._safe_increment_scanned_files()

    def scan_empty_directories(self, 
                              folders: List[str], 
                              recursive: bool = True,
                              ignore_patterns: Optional[List[str]] = None,
                              max_depth: Optional[int] = None,
                              stop_event: Optional[Callable[[], bool]] = None) -> List[Dict[str, Any]]:
        """
        扫描空目录（并行版本）
        :param folders: 要扫描的根目录列表
        :param recursive: 是否递归扫描子目录
        :param ignore_patterns: 要忽略的文件/目录模式
        :param max_depth: 最大扫描深度
        :param stop_event: 停止事件回调函数
        :return: 空目录信息列表
        """
        if self._is_scanning:
            logger.warning('扫描已在进行中')
            return []

        self._is_scanning = True
        self._is_paused = False
        self._cancelled = False
        self._total_files = 0
        self._scanned_files = 0

        try:
            empty_dirs = []
            all_futures = []
            self._stop_event = stop_event
            
            # 首先并行计算总的目录数量用于进度显示
            with concurrent.futures.ThreadPoolExecutor(max_workers=min(self.max_workers, len(folders))) as executor:
                for folder in folders:
                    if not os.path.exists(folder):
                        logger.warning(f'文件夹不存在: {folder}')
                        continue
                    future = executor.submit(
                        self._count_directories_for_empty_scan,
                        folder,
                        recursive,
                        ignore_patterns,
                        max_depth
                    )
                    all_futures.append(future)

                # 等待所有计算目录数量的任务完成
                concurrent.futures.wait(all_futures)

            # 并行扫描空目录
            all_futures = []
            with concurrent.futures.ThreadPoolExecutor(max_workers=min(self.max_workers, len(folders))) as executor:
                for folder in folders:
                    if not os.path.exists(folder):
                        continue
                    future = executor.submit(
                        self._scan_empty_directories_recursive,
                        folder,
                        recursive,
                        ignore_patterns,
                        max_depth,
                        0
                    )
                    all_futures.append(future)

                # 收集所有结果
                for future in concurrent.futures.as_completed(all_futures):
                    try:
                        sub_empty_dirs = future.result()
                        empty_dirs.extend(sub_empty_dirs)
                    except Exception as e:
                        logger.error(f'扫描目录时出错: {str(e)}')

            logger.info(f'空目录扫描完成，共找到 {len(empty_dirs)} 个空目录')
            return empty_dirs

        finally:
            self._is_scanning = False
            self._stop_event = None
            return [file_info]  # 返回单元素列表以便与递归调用保持一致

    def _count_directories_for_empty_scan(self, folder: str, recursive: bool, ignore_patterns: Optional[List[str]], max_depth: Optional[int] = None, current_depth: int = 0) -> None:
        """
        计算空目录扫描的总目录数（用于进度显示）
        使用并行方式计算
        """
        try:
            # 计算当前目录
            with self._lock:
                self._total_files += 1

            if recursive:
                # 获取所有子目录
                subdirs = []
                for item in os.listdir(folder):
                    item_path = os.path.join(folder, item)
                    if self._should_ignore(item_path, ignore_patterns):
                        continue
                    if os.path.isdir(item_path):
                        if max_depth is None or current_depth < max_depth:
                            subdirs.append(item_path)

                # 并行计算子目录
                if subdirs:
                    with concurrent.futures.ThreadPoolExecutor(max_workers=min(self.max_workers, len(subdirs))) as executor:
                        futures = []
                        for subdir in subdirs:
                            future = executor.submit(
                                self._count_directories_for_empty_scan,
                                subdir,
                                recursive,
                                ignore_patterns,
                                max_depth,
                                current_depth + 1
                            )
                            futures.append(future)

                        # 等待所有任务完成
                        concurrent.futures.wait(futures)
        except PermissionError:
            logger.warning(f'无权限访问文件夹: {folder}')
        except Exception as e:
            logger.error(f'计算目录数量时出错: {str(e)}')

    def _scan_empty_directories_recursive(self, folder: str, recursive: bool, ignore_patterns: Optional[List[str]], max_depth: Optional[int] = None, current_depth: int = 0) -> List[Dict[str, Any]]:
        """
        递归扫描空目录（并行版本）
        """
        empty_dirs = []

        # 检查是否停止
        if self._cancelled or (hasattr(self, '_stop_event') and self._stop_event and self._stop_event()):
            return empty_dirs

        while self._is_paused:
            # 检查是否停止
            if self._cancelled or (hasattr(self, '_stop_event') and self._stop_event and self._stop_event()):
                return empty_dirs
            import time
            time.sleep(0.1)

        try:
            # 对于浅层扫描或第一层，检查目录下的直接子目录
            subdirs = []
            for item in os.listdir(folder):
                item_path = os.path.join(folder, item)

                # 检查是否需要忽略
                if self._should_ignore(item_path, ignore_patterns):
                    continue

                if os.path.isdir(item_path):
                    subdirs.append(item_path)

            # 并行检查子目录是否为空
            if subdirs:
                with concurrent.futures.ThreadPoolExecutor(max_workers=min(self.max_workers, len(subdirs))) as executor:
                    futures = {}
                    for subdir in subdirs:
                        # 提交检查目录是否为空的任务
                        future = executor.submit(self._is_directory_empty, subdir, ignore_patterns)
                        futures[future] = subdir

                    # 处理结果
                    for future in concurrent.futures.as_completed(futures):
                        subdir = futures[future]
                        try:
                            is_empty = future.result()
                            if is_empty:
                                # 确保路径使用Windows标准格式
                                normalized_subdir = os.path.normpath(subdir)
                                folder_stats = os.stat(normalized_subdir)
                                empty_dir_info = {
                                    'path': normalized_subdir,
                                    'name': os.path.basename(normalized_subdir),
                                    'size': 0,  # 空目录大小为0
                                    'modified_time': folder_stats.st_mtime,
                                    'created_time': folder_stats.st_ctime,
                                    'accessed_time': folder_stats.st_atime,
                                    'type': 'empty_directory'
                                }
                                empty_dirs.append(empty_dir_info)
                                self._safe_add_file(empty_dir_info)
                        except Exception as e:
                            logger.error(f'处理目录 {subdir} 时出错: {str(e)}')

            # 递归扫描非空子目录（如果需要）
            if recursive and (max_depth is None or current_depth < max_depth):
                with concurrent.futures.ThreadPoolExecutor(max_workers=min(self.max_workers, len(subdirs))) as executor:
                    futures = {}
                    for subdir in subdirs:
                        # 提交递归扫描任务
                        future = executor.submit(
                            self._scan_empty_directories_recursive,
                            subdir,
                            recursive,
                            ignore_patterns,
                            max_depth,
                            current_depth + 1
                        )
                        futures[future] = subdir

                    # 处理结果
                    for future in concurrent.futures.as_completed(futures):
                        try:
                            sub_empty_dirs = future.result()
                            empty_dirs.extend(sub_empty_dirs)
                        except Exception as e:
                            subdir = futures[future]
                            logger.error(f'递归扫描目录 {subdir} 时出错: {str(e)}')

            # 更新进度（完成当前目录的扫描）
            self._safe_increment_scanned_files()

        except PermissionError:
            logger.warning(f'无权限访问文件夹: {folder}')
            # 权限错误也要更新进度
            self._safe_increment_scanned_files()
        except Exception as e:
            logger.error(f'扫描空目录时出错: {folder}, 错误: {str(e)}')
            # 错误情况也要更新进度
            self._safe_increment_scanned_files()

        return empty_dirs

    def _count_files(self, folder: str, recursive: bool, ignore_patterns: Optional[List[str]], max_depth: Optional[int] = None, current_depth: int = 0) -> None:
        """并行计算文件夹中的文件数量"""
        try:
            # 统计当前目录下的文件
            items = os.listdir(folder)
            file_count = 0
            subdirs = []

            for item in items:
                item_path = os.path.join(folder, item)
                if self._should_ignore(item_path, ignore_patterns):
                    continue
                if os.path.isfile(item_path):
                    file_count += 1
                elif recursive and os.path.isdir(item_path):
                    if max_depth is None or current_depth < max_depth:
                        subdirs.append(item_path)

            # 更新总文件数
            with self._lock:
                self._total_files += file_count

            # 并行计算子目录中的文件
            if subdirs:
                with concurrent.futures.ThreadPoolExecutor(max_workers=min(self.max_workers, len(subdirs))) as executor:
                    futures = []
                    for subdir in subdirs:
                        future = executor.submit(
                            self._count_files,
                            subdir,
                            recursive,
                            ignore_patterns,
                            max_depth,
                            current_depth + 1
                        )
                        futures.append(future)

                    # 等待所有任务完成
                    concurrent.futures.wait(futures)
        except PermissionError:
            logger.warning(f'无权限访问文件夹: {folder}')
        except Exception as e:
            logger.error(f'计算文件数量时出错: {str(e)}')

    def _scan_folder(self, folder: str, recursive: bool, ignore_patterns: Optional[List[str]], max_depth: Optional[int] = None, current_depth: int = 0) -> List[Dict[str, Any]]:
        """并行扫描文件夹并返回文件信息列表"""
        file_info_list = []

        if self._cancelled:
            return file_info_list

        while self._is_paused:
            if self._cancelled:
                return file_info_list
            import time
            time.sleep(0.1)

        try:
            items = os.listdir(folder)
            files = []
            subdirs = []

            # 分离文件和目录
            for item in items:
                item_path = os.path.join(folder, item)
                if self._should_ignore(item_path, ignore_patterns):
                    continue
                if os.path.isfile(item_path):
                    files.append(item_path)
                elif recursive and os.path.isdir(item_path):
                    if max_depth is None or current_depth < max_depth:
                        subdirs.append(item_path)

            # 并行处理文件
            if files:
                with concurrent.futures.ThreadPoolExecutor(max_workers=min(self.max_workers, len(files))) as executor:
                    futures = {}
                    for file_path in files:
                        future = executor.submit(self._get_file_info, file_path)
                        futures[future] = file_path

                    for future in concurrent.futures.as_completed(futures):
                        file_path = futures[future]
                        try:
                            file_info = future.result()
                            if file_info:
                                file_info_list.append(file_info)
                                self._safe_add_file(file_info)
                        except Exception as e:
                            logger.error(f'处理文件 {file_path} 时出错: {str(e)}')

            # 并行扫描子目录
            if subdirs:
                with concurrent.futures.ThreadPoolExecutor(max_workers=min(self.max_workers, len(subdirs))) as executor:
                    futures = {}
                    for subdir in subdirs:
                        future = executor.submit(
                            self._scan_folder,
                            subdir,
                            recursive,
                            ignore_patterns,
                            max_depth,
                            current_depth + 1
                        )
                        futures[future] = subdir

                    for future in concurrent.futures.as_completed(futures):
                        try:
                            sub_files = future.result()
                            file_info_list.extend(sub_files)
                        except Exception as e:
                            subdir = futures[future]
                            logger.error(f'扫描子目录 {subdir} 时出错: {str(e)}')

            if self._cancelled:
                return file_info_list

        except PermissionError:
            logger.warning(f'无权限访问文件夹: {folder}')
        except Exception as e:
            logger.error(f'扫描文件夹时出错: {str(e)}')

        return file_info_list

    def start_scan(self, *args, **kwargs):
        """启动扫描，重写父类方法以确保线程池正确创建和销毁"""
        try:
            return super().start_scan(*args, **kwargs)
        finally:
            # 确保扫描结束后清理资源
            if hasattr(self, '_executor') and self._executor:
                self._executor.shutdown(wait=False)

    def cancel_scan(self):
        """取消扫描，确保所有线程正确终止"""
        super().cancel_scan()
        # 这里可以添加额外的清理逻辑


if __name__ == '__main__':
    # 测试并行扫描器
    def progress_callback(scanned: int, total: int):
        print(f'已扫描: {scanned}/{total} 文件')

    def file_callback(file_info: Dict[str, Any]):
        print(f'发现文件: {file_info["path"]}, 大小: {file_info["size"]} 字节')

    scanner = ParallelScanner(progress_callback=progress_callback, file_callback=file_callback, max_workers=4)
    folders = ['.']  # 扫描当前目录
    ignore_patterns = ['.git/', '.trae/', '*.log', 'tmp/']
    files = scanner.start_scan(folders, recursive=True, ignore_patterns=ignore_patterns)
    print(f'扫描完成，共找到 {len(files)} 个文件')
    print(f'并行扫描器测试完成')

# 性能优化说明:
# 1. 使用ThreadPoolExecutor实现并行处理
# 2. 对I/O密集型操作（如文件系统访问）并行化以提高性能
# 3. 使用锁机制确保线程安全
# 4. 对目录和文件处理分别进行并行化
# 5. 根据CPU核心数动态调整线程数
# 6. 实现了暂停、继续和取消功能，确保资源正确释放
# 7. 通过回调函数保持与UI的交互
# 8. 避免了过多的线程创建，通过线程池复用线程
# 9. 针对大目录结构优化了任务分配策略
# 10. 处理了异常和边缘情况，确保稳定性