"""
工具模块
包含日志管理、文件操作等通用工具函数
"""
import logging
import os
import json
import yaml
import logging.handlers
import requests
import time
from pathlib import Path
from typing import Dict, Any, Optional, List, Tuple
from datetime import datetime
import cv2
import numpy as np
from config import config
import shutil
import subprocess

class StructuredFormatter(logging.Formatter):
    """结构化日志格式器"""
    
    def format(self, record):
        """格式化日志记录为结构化JSON格式"""
        log_entry = {
            "timestamp": datetime.fromtimestamp(record.created).isoformat(),
            "level": record.levelname,
            "logger": record.name,
            "message": record.getMessage(),
            "module": record.module,
            "function": record.funcName,  
            "line": record.lineno,
        }
        
        # 添加额外的字段
        if hasattr(record, 'task_id'):
            log_entry["task_id"] = record.task_id
        if hasattr(record, 'error_code'):
            log_entry["error_code"] = record.error_code
        if hasattr(record, 'details'):
            log_entry["details"] = record.details
        if hasattr(record, 'user_id'):
            log_entry["user_id"] = record.user_id
        if hasattr(record, 'request_id'):
            log_entry["request_id"] = record.request_id
            
        # 异常信息
        if record.exc_info:
            log_entry["exception"] = self.formatException(record.exc_info)
            
        return json.dumps(log_entry, ensure_ascii=False)

class LogManager:
    """统一日志管理器"""
    
    _initialized = False
    _loggers = {}
    _log_config = {
        "version": 1,
        "disable_existing_loggers": False,
        "root": {
            "level": "INFO",
            "handlers": ["console", "file"]
        }
    }
    
    @classmethod
    def initialize_logging(cls, log_level: str = "INFO", log_dir: Optional[Path] = None):
        """初始化全局日志配置"""
        if cls._initialized:
            return
            
        cls._initialized = True
        
        # 确保日志目录存在
        if log_dir is None:
            log_dir = config.paths.logs_dir
        log_dir.mkdir(parents=True, exist_ok=True)
        
        # 配置根日志记录器
        root_logger = logging.getLogger()
        root_logger.setLevel(getattr(logging, log_level.upper()))
        
        # 清除现有处理器
        for handler in root_logger.handlers[:]:
            root_logger.removeHandler(handler)
        
        # 创建控制台处理器
        console_handler = logging.StreamHandler()
        console_handler.setLevel(logging.INFO)
        console_formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
        )
        console_handler.setFormatter(console_formatter)
        
        # 创建文件处理器（带轮转）
        log_file = log_dir / "app.log"
        file_handler = logging.handlers.RotatingFileHandler(
            log_file, 
            maxBytes=10*1024*1024,  # 10MB
            backupCount=5,
            encoding='utf-8'
        )
        file_handler.setLevel(getattr(logging, log_level.upper()))
        
        # 使用结构化格式器
        structured_formatter = StructuredFormatter()
        file_handler.setFormatter(structured_formatter)
        
        # 添加处理器到根日志记录器
        root_logger.addHandler(console_handler)
        root_logger.addHandler(file_handler)
        
        # 设置第三方库的日志级别
        logging.getLogger("urllib3").setLevel(logging.WARNING)
        logging.getLogger("requests").setLevel(logging.WARNING)
        logging.getLogger("PIL").setLevel(logging.WARNING)
        
        logging.info("日志系统初始化完成")
    
    @classmethod
    def get_logger(cls, name: str, task_id: Optional[str] = None) -> logging.Logger:
        """获取日志记录器"""
        if not cls._initialized:
            cls.initialize_logging()
        
        # 创建或获取日志记录器
        logger_key = f"{name}_{task_id}" if task_id else name
        
        if logger_key not in cls._loggers:
            logger = logging.getLogger(logger_key)
            
            # 如果是任务特定的日志记录器，添加任务ID上下文
            if task_id:
                logger = TaskLoggerAdapter(logger, {"task_id": task_id})
            
            cls._loggers[logger_key] = logger
        
        return cls._loggers[logger_key]
    
    @classmethod
    def create_task_logger(cls, task_id: str, log_level: str = "INFO") -> logging.Logger:
        """创建任务专用日志记录器"""
        if not cls._initialized:
            cls.initialize_logging()
        
        # 确保任务日志目录存在
        task_log_dir = config.paths.logs_dir / task_id
        task_log_dir.mkdir(parents=True, exist_ok=True)
        
        # 创建任务专用日志记录器
        logger_name = f"task_{task_id}"
        logger = logging.getLogger(logger_name)
        logger.setLevel(getattr(logging, log_level.upper()))
        
        # 清除已有处理器
        for handler in logger.handlers[:]:
            handler.close()
            logger.removeHandler(handler)
        
        # 创建任务专用文件处理器
        task_log_file = task_log_dir / f"{task_id}.log"
        task_handler = logging.FileHandler(task_log_file, encoding='utf-8')
        task_handler.setLevel(logging.DEBUG)
        
        # 使用更详细的格式
        task_formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s',
            datefmt='%Y-%m-%d %H:%M:%S'
        )
        task_handler.setFormatter(task_formatter)
        logger.addHandler(task_handler)
        
        # 包装为TaskLoggerAdapter
        adapter = TaskLoggerAdapter(logger, {"task_id": task_id})
        cls._loggers[logger_name] = adapter
        
        return adapter
    
    @classmethod
    def setup_logger(cls, name: str, level: str = "INFO", log_file: Optional[Path] = None) -> logging.Logger:
        """设置日志记录器"""
        return cls.get_logger(name)
    
    @classmethod
    def cleanup_task_logger(cls, task_id: str):
        """清理任务日志记录器"""
        logger_name = f"task_{task_id}"
        if logger_name in cls._loggers:
            logger = cls._loggers[logger_name]
            if hasattr(logger, 'logger'):  # TaskLoggerAdapter
                actual_logger = logger.logger
            else:
                actual_logger = logger
            
            # 关闭所有处理器
            for handler in actual_logger.handlers[:]:
                handler.close()
                actual_logger.removeHandler(handler)
            
            del cls._loggers[logger_name]
    
    @classmethod
    def get_log_stats(cls) -> Dict[str, Any]:
        """获取日志统计信息"""
        stats = {
            "initialized": cls._initialized,
            "active_loggers": len(cls._loggers),
            "logger_names": list(cls._loggers.keys()),
            "log_directory": str(config.paths.logs_dir),
            "log_files": []
        }
        
        # 统计日志文件
        if config.paths.logs_dir.exists():
            for log_file in config.paths.logs_dir.rglob("*.log"):
                try:
                    file_stat = log_file.stat()
                    stats["log_files"].append({
                        "path": str(log_file.relative_to(config.paths.logs_dir)),
                        "size": file_stat.st_size,
                        "modified": datetime.fromtimestamp(file_stat.st_mtime).isoformat()
                    })
                except OSError:
                    continue
        
        return stats

class TaskLoggerAdapter(logging.LoggerAdapter):
    """任务日志适配器，自动添加任务上下文"""
    
    def process(self, msg, kwargs):
        """处理日志消息，添加任务上下文"""
        # 将extra信息合并到kwargs中
        if 'extra' not in kwargs:
            kwargs['extra'] = {}
        kwargs['extra'].update(self.extra)
        return msg, kwargs

class FileManager:
    """文件管理器"""
    
    @staticmethod
    def safe_read_json(file_path: Path) -> Dict[str, Any]:
        """
        安全读取JSON文件
        
        Args:
            file_path: 文件路径
            
        Returns:
            解析后的字典
            
        Raises:
            FileNotFoundError: 文件不存在
            json.JSONDecodeError: JSON格式错误
        """
        if not file_path.exists():
            raise FileNotFoundError(f"文件不存在: {file_path}")
        
        try:
            with open(file_path, 'r', encoding='utf-8') as f:
                return json.load(f)
        except json.JSONDecodeError as e:
            raise json.JSONDecodeError(f"JSON格式错误: {e}")
    
    @staticmethod
    def safe_write_json(file_path: Path, data: Dict[str, Any]) -> None:
        """
        安全写入JSON文件
        
        Args:
            file_path: 文件路径
            data: 要写入的数据
        """
        file_path.parent.mkdir(parents=True, exist_ok=True)
        with open(file_path, 'w', encoding='utf-8') as f:
            json.dump(data, f, ensure_ascii=False, indent=2)
    
    @staticmethod
    def safe_read_yaml(file_path: Path) -> Dict[str, Any]:
        """
        安全读取YAML文件
        
        Args:
            file_path: 文件路径
            
        Returns:
            解析后的字典
        """
        if not file_path.exists():
            raise FileNotFoundError(f"文件不存在: {file_path}")
        
        with open(file_path, 'r', encoding='utf-8') as f:
            return yaml.safe_load(f)
    
    @staticmethod
    def get_file_size(file_path: Path) -> int:
        """
        获取文件大小
        
        Args:
            file_path: 文件路径
            
        Returns:
            文件大小（字节）
        """
        return file_path.stat().st_size if file_path.exists() else 0
    
    @staticmethod
    def format_file_size(file_path: Path) -> str:
        """
        格式化文件大小为可读字符串
        
        Args:
            file_path: 文件路径
            
        Returns:
            格式化的文件大小字符串（如：1.2 MB）
        """
        try:
            if not file_path.exists():
                return "未知"
            
            size_bytes = FileManager.get_file_size(file_path)
            
            # 转换为合适的单位
            if size_bytes < 1024:
                return f"{size_bytes} B"
            elif size_bytes < 1024 * 1024:
                return f"{size_bytes / 1024:.1f} KB"
            elif size_bytes < 1024 * 1024 * 1024:
                return f"{size_bytes / (1024 * 1024):.1f} MB"
            else:
                return f"{size_bytes / (1024 * 1024 * 1024):.1f} GB"
                
        except Exception as e:
            LogManager.get_logger(__name__).warning(f"格式化文件大小失败: {e}")
            return "未知"

class ValidationUtils:
    """验证工具类"""
    
    @staticmethod
    def validate_training_params(params: Dict[str, Any]) -> List[str]:
        """验证训练参数"""
        errors = []
        
        # 检查必需参数
        required_params = ['model_type']
        for param in required_params:
            if param not in params or not params[param]:
                errors.append(f"缺少必需参数: {param}")
        
        # 检查数值参数
        if 'epochs' in params:
            try:
                epochs = int(params['epochs'])
                if epochs <= 0 or epochs > 1000:
                    errors.append("epochs 必须在1-1000之间")
            except (ValueError, TypeError):
                errors.append("epochs 必须是有效的整数")
        
        if 'batch_size' in params:
            try:
                batch_size = int(params['batch_size'])
                if batch_size <= 0 or batch_size > 128:
                    errors.append("batch_size 必须在1-128之间")
            except (ValueError, TypeError):
                errors.append("batch_size 必须是有效的整数")
        
        if 'learning_rate' in params:
            try:
                lr = float(params['learning_rate'])
                if lr <= 0 or lr > 1:
                    errors.append("learning_rate 必须在0.0-1.0之间")
            except (ValueError, TypeError):
                errors.append("learning_rate 必须是有效的浮点数")
        
        return errors
    
    @staticmethod
    def validate_model_type(model_type: str) -> bool:
        """验证模型类型"""
        valid_types = ['yolov8n', 'yolov8s', 'yolov8m', 'yolov8l', 'yolov8x']
        return model_type in valid_types
    
    @staticmethod
    def validate_dataset_path(dataset_path: Path) -> bool:
        """验证数据集路径"""
        if not dataset_path.exists():
            return False
        
        # 检查必需的子目录
        required_dirs = ['images', 'labels']
        for dir_name in required_dirs:
            if not (dataset_path / dir_name).exists():
                return False
        
        # 检查是否有图像文件
        images_dir = dataset_path / 'images'
        image_extensions = ['.jpg', '.jpeg', '.png', '.bmp']
        has_images = any(
            f.suffix.lower() in image_extensions 
            for f in images_dir.iterdir() 
            if f.is_file()
        )
        
        return has_images

class DatasetUtils:
    """数据集工具类"""
    
    @staticmethod
    def create_yolo_config(dataset_path: Path, class_names: List[str]) -> str:
        """创建YOLO数据集配置文件"""
        # 创建YOLO格式的数据集配置
        config = {
            'path': str(dataset_path.absolute()),
            'train': 'images',
            'val': 'images',
            'nc': len(class_names),
            'names': {i: name for i, name in enumerate(class_names)}
        }
        
        config_path = dataset_path / "dataset.yaml"
        with open(config_path, 'w', encoding='utf-8') as f:
            yaml.dump(config, f, default_flow_style=False, allow_unicode=True)
        
        return str(config_path)
    
    @staticmethod
    def get_class_names_from_dataset(dataset_path: Path) -> List[str]:
        """从数据集获取类别名称"""
        # 尝试从cls_dict.yaml获取
        cls_dict_path = dataset_path / "cls_dict.yaml"
        if cls_dict_path.exists():
            try:
                with open(cls_dict_path, 'r', encoding='utf-8') as f:
                    cls_dict = yaml.safe_load(f)
                return list(cls_dict.keys())
            except Exception as e:
                LogManager.get_logger(__name__).warning(f"读取cls_dict.yaml失败: {e}")
        
        # 默认类别
        return ['bj_bpmh', 'bj_bpps', 'bj_wkps', 'hxq_gjbs', 'hxq_yfps', 
                'jyz_pl', 'sly_bjbmyw', 'sly_dmyw', 'yw_gkxfw', 'yw_nc']
    
    @staticmethod
    def analyze_class_distribution(label_files: List[Path]) -> Dict[str, int]:
        """分析类别分布"""
        class_count = {}
        
        for label_file in label_files:
            try:
                with open(label_file, 'r') as f:
                    lines = f.readlines()
                    for line in lines:
                        if line.strip():
                            class_id = int(line.strip().split()[0])
                            class_count[str(class_id)] = class_count.get(str(class_id), 0) + 1
            except Exception as e:
                LogManager.get_logger(__name__).warning(f"分析标签文件失败 {label_file}: {e}")
                continue
        
        return class_count

class ModelEvaluationUtils:
    """模型评估工具类"""
    
    @staticmethod
    def extract_evaluation_metrics(val_results) -> Dict[str, float]:
        """从验证结果中提取评估指标"""
        metrics = {
            "map_50": 0.0,
            "map_95": 0.0, 
            "precision": 0.0,
            "recall": 0.0
        }
        
        try:
            if hasattr(val_results, 'box'):
                box_metrics = val_results.box
                if hasattr(box_metrics, 'map50'):
                    metrics["map_50"] = float(box_metrics.map50)
                if hasattr(box_metrics, 'map'):
                    metrics["map_95"] = float(box_metrics.map)
                if hasattr(box_metrics, 'mp'):
                    metrics["precision"] = float(box_metrics.mp)
                if hasattr(box_metrics, 'mr'):
                    metrics["recall"] = float(box_metrics.mr)
        except Exception as e:
            LogManager.get_logger(__name__).error(f"提取评估指标失败: {e}")
        
        return metrics
    
    @staticmethod
    def format_evaluation_results(metrics: Dict[str, float]) -> str:
        """格式化评估结果为可读字符串"""
        return (
            f"mAP@0.5: {metrics.get('map_50', 0):.3f}, "
            f"mAP@0.5:0.95: {metrics.get('map_95', 0):.3f}, "
            f"Precision: {metrics.get('precision', 0):.3f}, "
            f"Recall: {metrics.get('recall', 0):.3f}"
        )

class HTTPClient:
    """HTTP客户端封装"""
    
    def __init__(self, base_url: str, timeout: int = 30, max_retries: int = 3):
        self.base_url = base_url.rstrip('/')
        self.timeout = timeout
        self.max_retries = max_retries
        self.session = requests.Session()
        
        # 配置连接池
        adapter = requests.adapters.HTTPAdapter(
            pool_connections=10,
            pool_maxsize=20,
            max_retries=requests.adapters.Retry(
                total=max_retries,
                backoff_factor=0.3,
                status_forcelist=[500, 502, 503, 504]
            )
        )
        self.session.mount("http://", adapter)
        self.session.mount("https://", adapter)
        
        self.logger = LogManager.get_logger("HTTPClient")
    
    def get(self, endpoint: str, params: Dict = None) -> Tuple[bool, Dict]:
        """发送GET请求"""
        return self._request("GET", endpoint, params=params)
    
    def post(self, endpoint: str, data: Dict = None, json_data: Dict = None, files: Dict = None) -> Tuple[bool, Dict]:
        """发送POST请求"""
        return self._request("POST", endpoint, data=data, json=json_data, files=files)
    
    def put(self, endpoint: str, data: Dict = None, json_data: Dict = None) -> Tuple[bool, Dict]:
        """发送PUT请求"""
        return self._request("PUT", endpoint, data=data, json=json_data)
    
    def delete(self, endpoint: str) -> Tuple[bool, Dict]:
        """发送DELETE请求"""
        return self._request("DELETE", endpoint)
    
    def _request(self, method: str, endpoint: str, **kwargs) -> Tuple[bool, Dict]:
        """发送HTTP请求的通用方法"""
        url = f"{self.base_url}/{endpoint.lstrip('/')}"
        start_time = time.time()
        
        try:
            self.logger.debug(f"发送{method}请求: {url}")
            
            response = self.session.request(
                method=method,
                url=url,
                timeout=self.timeout,
                **kwargs
            )
            
            elapsed_time = time.time() - start_time
            
            # 处理响应
            if response.status_code in [200, 201, 202]:
                try:
                    result = response.json()
                    self.logger.debug(f"请求成功: {method} {url} - {response.status_code} - 耗时: {elapsed_time:.2f}s")
                    return True, result
                except json.JSONDecodeError:
                    # 如果不是JSON响应，返回文本
                    self.logger.debug(f"请求成功(非JSON): {method} {url} - {response.status_code} - 耗时: {elapsed_time:.2f}s")
                    return True, {"text": response.text}
            else:
                self.logger.error(f"请求失败: {method} {url} - {response.status_code} - 耗时: {elapsed_time:.2f}s - 响应: {response.text[:200]}")
                try:
                    error_data = response.json()
                    error_data["http_status"] = response.status_code
                    error_data["elapsed_time"] = elapsed_time
                except:
                    error_data = {
                        "error": response.text,
                        "http_status": response.status_code,
                        "elapsed_time": elapsed_time
                    }
                return False, error_data
                
        except requests.exceptions.Timeout:
            elapsed_time = time.time() - start_time
            self.logger.error(f"请求超时: {method} {url} - 耗时: {elapsed_time:.2f}s (超时阈值: {self.timeout}s)")
            return False, {"error": "请求超时", "timeout": self.timeout, "elapsed_time": elapsed_time}
        except requests.exceptions.ConnectionError as e:
            elapsed_time = time.time() - start_time
            self.logger.error(f"连接错误: {method} {url} - 耗时: {elapsed_time:.2f}s - 错误: {str(e)}")
            return False, {"error": "连接错误", "detail": str(e), "elapsed_time": elapsed_time}
        except Exception as e:
            elapsed_time = time.time() - start_time
            self.logger.error(f"请求异常: {method} {url} - 耗时: {elapsed_time:.2f}s - 错误: {str(e)}")
            return False, {"error": str(e), "elapsed_time": elapsed_time}

def ensure_directory(path: Path) -> Path:
    """确保目录存在，如果不存在则创建"""
    path.mkdir(parents=True, exist_ok=True)
    return path

def get_file_size(file_path: Path) -> int:
    """获取文件大小（字节）"""
    if file_path.exists():
        return file_path.stat().st_size
    return 0

def format_file_size(size_bytes: int) -> str:
    """格式化文件大小显示"""
    if size_bytes == 0:
        return "0 B"
    size_names = ["B", "KB", "MB", "GB", "TB"]
    import math
    i = int(math.floor(math.log(size_bytes, 1024)))
    p = math.pow(1024, i)
    s = round(size_bytes / p, 2)
    return f"{s} {size_names[i]}"

def clean_temp_files(temp_dir: Path, max_age_hours: int = 24):
    """清理临时文件"""
    if not temp_dir.exists():
        return
    
    current_time = datetime.now()
    for file_path in temp_dir.iterdir():
        if file_path.is_file():
            file_age = current_time - datetime.fromtimestamp(file_path.stat().st_mtime)
            if file_age.total_seconds() > max_age_hours * 3600:
                try:
                    file_path.unlink()
                except Exception as e:
                    logging.warning(f"无法删除临时文件 {file_path}: {e}")

def copy_file(src: Path, dst: Path) -> bool:
    """复制文件"""
    try:
        dst.parent.mkdir(parents=True, exist_ok=True)
        shutil.copy2(src, dst)
        return True
    except Exception as e:
        logging.error(f"复制文件失败 {src} -> {dst}: {e}")
        return False

def move_file(src: Path, dst: Path) -> bool:
    """移动文件"""
    try:
        dst.parent.mkdir(parents=True, exist_ok=True)
        shutil.move(str(src), str(dst))
        return True
    except Exception as e:
        logging.error(f"移动文件失败 {src} -> {dst}: {e}")
        return False

def delete_file(file_path: Path) -> bool:
    """删除文件"""
    try:
        if file_path.exists():
            file_path.unlink()
        return True
    except Exception as e:
        logging.error(f"删除文件失败 {file_path}: {e}")
        return False

def compress_directory(src_dir: Path, dst_file: Path, format: str = 'zip') -> bool:
    """压缩目录"""
    try:
        dst_file.parent.mkdir(parents=True, exist_ok=True)
        if format == 'zip':
            shutil.make_archive(str(dst_file.with_suffix('')), 'zip', str(src_dir))
        elif format == 'tar':
            shutil.make_archive(str(dst_file.with_suffix('')), 'tar', str(src_dir))
        return True
    except Exception as e:
        logging.error(f"压缩目录失败 {src_dir} -> {dst_file}: {e}")
        return False

def extract_archive(archive_path: Path, dst_dir: Path) -> bool:
    """解压文档"""
    try:
        dst_dir.mkdir(parents=True, exist_ok=True)
        shutil.unpack_archive(str(archive_path), str(dst_dir))
        return True
    except Exception as e:
        logging.error(f"解压文档失败 {archive_path} -> {dst_dir}: {e}")
        return False

def run_command(command: List[str], cwd: Optional[Path] = None, timeout: int = 300) -> tuple:
    """运行命令"""
    try:
        result = subprocess.run(
            command,
            cwd=str(cwd) if cwd else None,
            capture_output=True,
            text=True,
            timeout=timeout
        )
        return result.returncode == 0, result.stdout, result.stderr
    except subprocess.TimeoutExpired:
        return False, "", "命令执行超时"
    except Exception as e:
        return False, "", str(e)

def load_json_file(file_path: Path) -> Optional[Dict[str, Any]]:
    """加载JSON文件"""
    try:
        with open(file_path, 'r', encoding='utf-8') as f:
            return json.load(f)
    except Exception as e:
        logging.error(f"加载JSON文件失败 {file_path}: {e}")
        return None

def save_json_file(data: Dict[str, Any], file_path: Path) -> bool:
    """保存JSON文件"""
    try:
        file_path.parent.mkdir(parents=True, exist_ok=True)
        with open(file_path, 'w', encoding='utf-8') as f:
            json.dump(data, f, ensure_ascii=False, indent=2)
        return True
    except Exception as e:
        logging.error(f"保存JSON文件失败 {file_path}: {e}")
        return False

def setup_logging():
    """设置日志记录器"""
    log_config = config.log
    
    # 创建日志目录
    config.paths.logs_dir.mkdir(parents=True, exist_ok=True)
    
    # 配置根日志记录器
    logging.basicConfig(
        level=getattr(logging, log_config.level.upper()),
        format=log_config.format,
        handlers=[
            logging.StreamHandler(),
            logging.FileHandler(config.paths.logs_dir / "application.log")
        ]
    )
    
    # 设置第三方库日志级别
    for lib_name, level in log_config.third_party_levels.items():
        logging.getLogger(lib_name).setLevel(getattr(logging, level.upper()))

def validate_dataset_structure(dataset_path: Path) -> tuple:
    """验证数据集结构"""
    required_files = ['dataset.yaml', 'cls_dict.yaml']
    required_dirs = ['images', 'labels']
    
    missing_items = []
    
    # 检查必需文件
    for file_name in required_files:
        if not (dataset_path / file_name).exists():
            missing_items.append(f"文件: {file_name}")
    
    # 检查必需目录
    for dir_name in required_dirs:
        if not (dataset_path / dir_name).is_dir():
            missing_items.append(f"目录: {dir_name}")
    
    is_valid = len(missing_items) == 0
    return is_valid, missing_items

def count_files_in_directory(directory: Path, extensions: Optional[List[str]] = None) -> int:
    """统计目录中的文件数量"""
    if not directory.exists():
        return 0
    
    count = 0
    for file_path in directory.iterdir():
        if file_path.is_file():
            if extensions is None or file_path.suffix.lower() in extensions:
                count += 1
    return count

def get_directory_size(directory: Path) -> int:
    """获取目录总大小（字节）"""
    if not directory.exists():
        return 0
    
    total_size = 0
    for file_path in directory.rglob('*'):
        if file_path.is_file():
            total_size += file_path.stat().st_size
    return total_size 