import os
import time
import logging
from typing import List, Dict, Any, Optional, Callable, Union
from .parallel_scanner import ParallelScanner
from .parallel_hash_calculator import ParallelHashCalculator

logger = logging.getLogger(__name__)


class PipelineManager:
    """
    并行处理流水线管理器，协调并行扫描和哈希计算操作
    """
    def __init__(self, max_workers: Optional[int] = None, scan_depth: Union[str, int] = -1, hash_algorithm: str = 'md5'):
        """
        初始化流水线管理器
        :param max_workers: 最大工作线程数，如果为None则使用默认值
        :param scan_depth: 扫描深度，-1表示无限制
        :param hash_algorithm: 哈希算法，默认为'md5'
        """
        self.max_workers = max_workers or os.cpu_count() or 4
        self.scan_depth = scan_depth
        self.hash_algorithm = hash_algorithm
        
        # 处理scan_depth参数类型转换
        if isinstance(scan_depth, str):
            if scan_depth == 'recursive':
                numeric_scan_depth = -1
            elif scan_depth == 'shallow':
                numeric_scan_depth = 0
            else:
                # 尝试将字符串转换为整数
                try:
                    numeric_scan_depth = int(scan_depth)
                except ValueError:
                    numeric_scan_depth = -1
        else:
            numeric_scan_depth = scan_depth
        
        # 初始化并行组件
        self.scanner = ParallelScanner(max_workers=self.max_workers, scan_depth=numeric_scan_depth)
        self.hash_calculator = ParallelHashCalculator(max_workers=self.max_workers, hash_algorithm=self.hash_algorithm)
        
        # 处理状态
        self._is_processing = False
        self._total_files = 0
        self._processed_files = 0
        
    @property
    def is_processing(self) -> bool:
        """获取当前是否正在处理"""
        return self._is_processing
        
    def _combined_progress_callback(self, stage: str, progress: int, total: int, 
                                  user_callback: Optional[Callable[[str, int, int], None]] = None):
        """
        组合进度回调函数
        :param stage: 当前阶段（'scan'或'hash'）
        :param progress: 当前进度
        :param total: 总进度
        :param user_callback: 用户提供的进度回调函数
        """
        if stage == 'scan':
            # 扫描阶段，更新总文件数
            self._total_files = total
            self._processed_files = progress
        elif stage == 'hash':
            # 哈希计算阶段，更新已处理文件数
            self._processed_files = progress
        
        # 调用用户回调函数
        if user_callback:
            user_callback(stage, self._processed_files, self._total_files)
            
    def process_directories(self, directories: List[str], 
                          progress_callback: Optional[Callable[[str, int, int], None]] = None, 
                          stop_event: Optional[Callable[[], bool]] = None) -> List[Dict[str, Any]]:
        """
        处理多个目录，执行扫描和哈希计算
        :param directories: 要处理的目录列表
        :param progress_callback: 进度回调函数，接收阶段、已处理文件数和总文件数
        :param stop_event: 停止事件函数，返回True表示需要停止处理
        :return: 处理后的文件信息列表
        """
        if not directories:
            logger.warning('没有目录需要处理')
            return []
            
        self._is_processing = True
        self._total_files = 0
        self._processed_files = 0
        
        try:
            logger.info(f'开始处理目录，共 {len(directories)} 个目录，线程数: {self.max_workers}')
            start_time = time.time()
            
            all_files_info = []
            
            # 1. 扫描所有目录
            for directory in directories:
                if stop_event and stop_event():
                    logger.info('处理操作已停止')
                    break
                    
                logger.info(f'正在扫描目录: {directory}')
                
                # 扫描目录，获取文件信息
                scan_progress_callback = lambda progress, total: \
                    self._combined_progress_callback('scan', progress, total, progress_callback)
                
                files_info = self.scanner.scan_directory(
                    directory, scan_progress_callback, stop_event
                )
                
                if stop_event and stop_event():
                    logger.info('处理操作已停止')
                    break
                    
                all_files_info.extend(files_info)
            
            # 更新总文件数
            self._total_files = len(all_files_info)
            
            if stop_event and stop_event():
                logger.info('处理操作已停止')
                return []
                
            if not all_files_info:
                logger.warning('未找到任何文件')
                return []
                
            logger.info(f'目录扫描完成，共找到 {len(all_files_info)} 个文件')
            
            # 2. 计算文件哈希值
            logger.info(f'开始计算文件哈希值，算法: {self.hash_algorithm}')
            
            # 重置处理计数
            self._processed_files = 0
            
            # 创建哈希计算的进度回调
            hash_progress_callback = lambda progress, total: \
                self._combined_progress_callback('hash', progress, total, progress_callback)
            
            # 计算哈希值
            processed_files = self.hash_calculator.calculate_hashes(
                all_files_info, hash_progress_callback, stop_event
            )
            
            end_time = time.time()
            logger.info(f'所有目录处理完成，耗时: {end_time - start_time:.2f} 秒')
            
            return processed_files
            
        finally:
            self._is_processing = False
            
    def process_directory(self, directory: str, 
                        progress_callback: Optional[Callable[[str, int, int], None]] = None, 
                        stop_event: Optional[Callable[[], bool]] = None) -> List[Dict[str, Any]]:
        """
        处理单个目录，执行扫描和哈希计算
        :param directory: 要处理的目录
        :param progress_callback: 进度回调函数，接收阶段、已处理文件数和总文件数
        :param stop_event: 停止事件函数，返回True表示需要停止处理
        :return: 处理后的文件信息列表
        """
        return self.process_directories([directory], progress_callback, stop_event)
        
    def set_max_workers(self, max_workers: int) -> None:
        """
        设置最大工作线程数
        :param max_workers: 最大工作线程数
        """
        if max_workers > 0:
            self.max_workers = max_workers
            self.scanner.max_workers = max_workers
            self.hash_calculator.max_workers = max_workers
            logger.info(f'最大工作线程数已设置为: {max_workers}')
        else:
            logger.error(f'无效的最大工作线程数: {max_workers}')
            
    def set_scan_depth(self, scan_depth: int) -> None:
        """
        设置扫描深度
        :param scan_depth: 扫描深度，-1表示无限制
        """
        self.scan_depth = scan_depth
        self.scanner.scan_depth = scan_depth
        logger.info(f'扫描深度已设置为: {scan_depth}')
        
    def set_hash_algorithm(self, algorithm: str) -> bool:
        """
        设置哈希算法
        :param algorithm: 哈希算法名称
        :return: 是否设置成功
        """
        result = self.hash_calculator.set_hash_algorithm(algorithm)
        if result:
            self.hash_algorithm = algorithm
        return result
        
    def get_supported_hash_algorithms(self) -> list:
        """
        获取支持的哈希算法列表
        :return: 支持的哈希算法列表
        """
        return self.hash_calculator.get_supported_algorithms()
        
    def cancel(self) -> None:
        """
        取消当前处理操作（实际停止需要通过stop_event实现）"""
        logger.warning('取消操作请求已接收，但需要通过stop_event实现')
    
    def scan_and_calculate_hash(self, directory: str, 
                               progress_callback: Optional[Callable[[str, int, int], None]] = None, 
                               stop_event: Optional[Callable[[], bool]] = None) -> List[Dict[str, Any]]:
        """
        扫描目录并计算哈希值
        :param directory: 要扫描的目录
        :param progress_callback: 进度回调函数，接收阶段、已处理文件数和总文件数
        :param stop_event: 停止事件函数，返回True表示需要停止处理
        :return: 处理后的文件信息列表
        """
        return self.process_directories([directory], progress_callback, stop_event)