"""
文件操作工具模块

提供文件和目录操作的工具函数。
"""

import os
import shutil
import gzip
import json
from pathlib import Path
from typing import Dict, List, Optional, Any, Union, Iterator
from .logger import get_module_logger

logger = get_module_logger(__name__)


class FileManager:
    """文件管理器
    
    提供文件和目录操作的统一接口。
    """
    
    def __init__(self, base_path: Optional[Union[str, Path]] = None):
        """初始化文件管理器
        
        Args:
            base_path: 基础路径
        """
        self.base_path = Path(base_path) if base_path else Path.cwd()
        logger.debug(f"文件管理器初始化，基础路径: {self.base_path}")
    
    def ensure_dir(self, path: Union[str, Path]) -> Path:
        """确保目录存在
        
        Args:
            path: 目录路径
            
        Returns:
            目录路径对象
        """
        dir_path = Path(path)
        if not dir_path.is_absolute():
            dir_path = self.base_path / dir_path
        
        dir_path.mkdir(parents=True, exist_ok=True)
        return dir_path
    
    def get_file_size(self, file_path: Union[str, Path]) -> int:
        """获取文件大小
        
        Args:
            file_path: 文件路径
            
        Returns:
            文件大小（字节）
        """
        path = Path(file_path)
        if not path.is_absolute():
            path = self.base_path / path
        
        return path.stat().st_size if path.exists() else 0
    
    def list_files(self, directory: Union[str, Path], pattern: str = "*") -> List[Path]:
        """列出目录中的文件
        
        Args:
            directory: 目录路径
            pattern: 文件模式
            
        Returns:
            文件路径列表
        """
        dir_path = Path(directory)
        if not dir_path.is_absolute():
            dir_path = self.base_path / dir_path
        
        if not dir_path.exists():
            return []
        
        return list(dir_path.glob(pattern))
    
    def remove_file(self, file_path: Union[str, Path]) -> bool:
        """删除文件
        
        Args:
            file_path: 文件路径
            
        Returns:
            是否删除成功
        """
        try:
            path = Path(file_path)
            if not path.is_absolute():
                path = self.base_path / path
            
            if path.exists():
                path.unlink()
                logger.debug(f"文件删除成功: {path}")
                return True
            return False
        except Exception as e:
            logger.error(f"删除文件失败: {file_path}, {e}")
            return False
    
    def copy_file(self, src: Union[str, Path], dst: Union[str, Path]) -> bool:
        """复制文件
        
        Args:
            src: 源文件路径
            dst: 目标文件路径
            
        Returns:
            是否复制成功
        """
        try:
            src_path = Path(src)
            dst_path = Path(dst)
            
            if not src_path.is_absolute():
                src_path = self.base_path / src_path
            if not dst_path.is_absolute():
                dst_path = self.base_path / dst_path
            
            # 确保目标目录存在
            dst_path.parent.mkdir(parents=True, exist_ok=True)
            
            shutil.copy2(src_path, dst_path)
            logger.debug(f"文件复制成功: {src_path} -> {dst_path}")
            return True
        except Exception as e:
            logger.error(f"复制文件失败: {src} -> {dst}, {e}")
            return False
    
    def move_file(self, src: Union[str, Path], dst: Union[str, Path]) -> bool:
        """移动文件
        
        Args:
            src: 源文件路径
            dst: 目标文件路径
            
        Returns:
            是否移动成功
        """
        try:
            src_path = Path(src)
            dst_path = Path(dst)
            
            if not src_path.is_absolute():
                src_path = self.base_path / src_path
            if not dst_path.is_absolute():
                dst_path = self.base_path / dst_path
            
            # 确保目标目录存在
            dst_path.parent.mkdir(parents=True, exist_ok=True)
            
            shutil.move(str(src_path), str(dst_path))
            logger.debug(f"文件移动成功: {src_path} -> {dst_path}")
            return True
        except Exception as e:
            logger.error(f"移动文件失败: {src} -> {dst}, {e}")
            return False


def parse_size_string(size_str: str) -> int:
    """解析大小字符串为字节数
    
    Args:
        size_str: 大小字符串，如 "100MB", "1.5GB"
        
    Returns:
        字节数
    """
    size_str = size_str.upper().strip()
    
    # 单位映射
    units = {
        'B': 1,
        'KB': 1024,
        'MB': 1024 ** 2,
        'GB': 1024 ** 3,
        'TB': 1024 ** 4,
    }
    
    # 提取数字和单位
    import re
    match = re.match(r'^(\d+(?:\.\d+)?)\s*([KMGT]?B)$', size_str)
    if not match:
        raise ValueError(f"无效的大小格式: {size_str}")
    
    number = float(match.group(1))
    unit = match.group(2)
    
    return int(number * units[unit])


def format_size(size_bytes: int) -> str:
    """格式化字节数为可读字符串
    
    Args:
        size_bytes: 字节数
        
    Returns:
        格式化的大小字符串
    """
    if size_bytes == 0:
        return "0B"
    
    units = ['B', 'KB', 'MB', 'GB', 'TB']
    unit_index = 0
    size = float(size_bytes)
    
    while size >= 1024 and unit_index < len(units) - 1:
        size /= 1024
        unit_index += 1
    
    if unit_index == 0:
        return f"{int(size)}B"
    else:
        return f"{size:.1f}{units[unit_index]}"


def read_json_file(file_path: Union[str, Path]) -> Dict[str, Any]:
    """读取JSON文件
    
    Args:
        file_path: 文件路径
        
    Returns:
        JSON数据
    """
    with open(file_path, 'r', encoding='utf-8') as f:
        return json.load(f)


def write_json_file(file_path: Union[str, Path], data: Dict[str, Any], indent: int = 2) -> None:
    """写入JSON文件
    
    Args:
        file_path: 文件路径
        data: JSON数据
        indent: 缩进
    """
    # 确保目录存在
    Path(file_path).parent.mkdir(parents=True, exist_ok=True)
    
    with open(file_path, 'w', encoding='utf-8') as f:
        json.dump(data, f, indent=indent, ensure_ascii=False)


def read_gzip_file(file_path: Union[str, Path]) -> bytes:
    """读取gzip压缩文件
    
    Args:
        file_path: 文件路径
        
    Returns:
        解压后的数据
    """
    with gzip.open(file_path, 'rb') as f:
        return f.read()


def write_gzip_file(file_path: Union[str, Path], data: bytes) -> None:
    """写入gzip压缩文件
    
    Args:
        file_path: 文件路径
        data: 要压缩的数据
    """
    # 确保目录存在
    Path(file_path).parent.mkdir(parents=True, exist_ok=True)
    
    with gzip.open(file_path, 'wb') as f:
        f.write(data)


def get_available_space(path: Union[str, Path]) -> int:
    """获取可用磁盘空间
    
    Args:
        path: 路径
        
    Returns:
        可用空间（字节）
    """
    statvfs = os.statvfs(path)
    return statvfs.f_frsize * statvfs.f_bavail


def cleanup_old_files(directory: Union[str, Path], max_age_days: int, pattern: str = "*") -> int:
    """清理旧文件
    
    Args:
        directory: 目录路径
        max_age_days: 最大保留天数
        pattern: 文件模式
        
    Returns:
        删除的文件数量
    """
    import time
    
    dir_path = Path(directory)
    if not dir_path.exists():
        return 0
    
    current_time = time.time()
    max_age_seconds = max_age_days * 24 * 3600
    deleted_count = 0
    
    for file_path in dir_path.glob(pattern):
        if file_path.is_file():
            file_age = current_time - file_path.stat().st_mtime
            if file_age > max_age_seconds:
                try:
                    file_path.unlink()
                    deleted_count += 1
                    logger.debug(f"删除旧文件: {file_path}")
                except Exception as e:
                    logger.warning(f"删除文件失败: {file_path}, {e}")
    
    logger.info(f"清理完成，删除了 {deleted_count} 个旧文件")
    return deleted_count
