"""
工具函数模块

提供各种辅助函数，包括文件操作、数据处理、格式化等。
"""

import os
import re
import json
import hashlib
from pathlib import Path
from typing import Any, Dict, List, Optional, Union, Tuple
from datetime import datetime, timedelta
import pandas as pd
import numpy as np

from .logger import get_logger, log_function_call, log_execution_time


logger = get_logger(__name__)


@log_function_call
def ensure_directory(path: Union[str, Path]) -> Path:
    """
    确保目录存在，如果不存在则创建
    
    Args:
        path: 目录路径
        
    Returns:
        目录路径对象
    """
    path = Path(path)
    path.mkdir(parents=True, exist_ok=True)
    return path


@log_function_call
def get_file_size(file_path: Union[str, Path]) -> int:
    """
    获取文件大小
    
    Args:
        file_path: 文件路径
        
    Returns:
        文件大小（字节）
    """
    file_path = Path(file_path)
    if file_path.exists():
        return file_path.stat().st_size
    return 0


@log_function_call
def get_file_extension(file_path: Union[str, Path]) -> str:
    """
    获取文件扩展名
    
    Args:
        file_path: 文件路径
        
    Returns:
        文件扩展名（包含点号）
    """
    return Path(file_path).suffix.lower()


@log_function_call
def is_excel_file(file_path: Union[str, Path]) -> bool:
    """
    检查是否为Excel文件
    
    Args:
        file_path: 文件路径
        
    Returns:
        是否为Excel文件
    """
    extensions = ['.xlsx', '.xls', '.xlsm', '.xlsb']
    return get_file_extension(file_path) in extensions


@log_function_call
def get_excel_sheets(file_path: Union[str, Path]) -> List[str]:
    """
    获取Excel文件的所有工作表名称
    
    Args:
        file_path: Excel文件路径
        
    Returns:
        工作表名称列表
    """
    try:
        excel_file = pd.ExcelFile(file_path)
        return excel_file.sheet_names
    except Exception as e:
        logger.error(f"读取Excel工作表失败: {e}")
        return []


@log_function_call
def format_file_size(size_bytes: int) -> str:
    """
    格式化文件大小
    
    Args:
        size_bytes: 文件大小（字节）
        
    Returns:
        格式化的文件大小字符串
    """
    if size_bytes == 0:
        return "0 B"
    
    size_names = ["B", "KB", "MB", "GB", "TB"]
    i = 0
    while size_bytes >= 1024 and i < len(size_names) - 1:
        size_bytes /= 1024.0
        i += 1
    
    return f"{size_bytes:.1f} {size_names[i]}"


@log_function_call
def format_number(number: Union[int, float]) -> str:
    """
    格式化数字
    
    Args:
        number: 数字
        
    Returns:
        格式化的数字字符串
    """
    if isinstance(number, int):
        return f"{number:,}"
    elif isinstance(number, float):
        if number.is_integer():
            return f"{int(number):,}"
        else:
            return f"{number:,.2f}"
    else:
        return str(number)


@log_function_call
def calculate_percentage(value: float, total: float) -> float:
    """
    计算百分比
    
    Args:
        value: 值
        total: 总数
        
    Returns:
        百分比
    """
    if total == 0:
        return 0.0
    return (value / total) * 100


@log_function_call
def calculate_change_rate(current: float, previous: float) -> float:
    """
    计算变化率
    
    Args:
        current: 当前值
        previous: 之前的值
        
    Returns:
        变化率（百分比）
    """
    if previous == 0:
        return 0.0 if current == 0 else float('inf')
    return ((current - previous) / previous) * 100


@log_function_call
def calculate_moving_average(data: List[float], window: int = 7) -> List[float]:
    """
    计算移动平均
    
    Args:
        data: 数据列表
        window: 窗口大小
        
    Returns:
        移动平均列表
    """
    if len(data) < window:
        return data
    
    result = []
    for i in range(len(data)):
        if i < window - 1:
            result.append(sum(data[:i+1]) / (i + 1))
        else:
            result.append(sum(data[i-window+1:i+1]) / window)
    
    return result


@log_function_call
def find_peak_value(data: List[float]) -> Tuple[int, float]:
    """
    找到峰值
    
    Args:
        data: 数据列表
        
    Returns:
        (峰值索引, 峰值)
    """
    if not data:
        return -1, 0.0
    
    peak_index = data.index(max(data))
    peak_value = data[peak_index]
    
    return peak_index, peak_value


@log_function_call
def find_trend_direction(data: List[float], window: int = 5) -> str:
    """
    判断趋势方向
    
    Args:
        data: 数据列表
        window: 窗口大小
        
    Returns:
        趋势方向 ('up', 'down', 'stable')
    """
    if len(data) < window:
        return 'stable'
    
    recent_data = data[-window:]
    first_half = recent_data[:window//2]
    second_half = recent_data[window//2:]
    
    first_avg = sum(first_half) / len(first_half)
    second_avg = sum(second_half) / len(second_half)
    
    change_rate = calculate_change_rate(second_avg, first_avg)
    
    if change_rate > 5:
        return 'up'
    elif change_rate < -5:
        return 'down'
    else:
        return 'stable'


@log_function_call
def validate_date_format(date_str: str, format_str: str = "%Y-%m-%d") -> bool:
    """
    验证日期格式
    
    Args:
        date_str: 日期字符串
        format_str: 日期格式
        
    Returns:
        是否为有效日期
    """
    try:
        datetime.strptime(date_str, format_str)
        return True
    except ValueError:
        return False


@log_function_call
def parse_date_range(start_date: str, end_date: str) -> List[str]:
    """
    解析日期范围
    
    Args:
        start_date: 开始日期
        end_date: 结束日期
        
    Returns:
        日期列表
    """
    try:
        start = datetime.strptime(start_date, "%Y-%m-%d")
        end = datetime.strptime(end_date, "%Y-%m-%d")
        
        date_list = []
        current = start
        while current <= end:
            date_list.append(current.strftime("%Y-%m-%d"))
            current += timedelta(days=1)
        
        return date_list
    except ValueError as e:
        logger.error(f"解析日期范围失败: {e}")
        return []


@log_function_call
def generate_file_hash(file_path: Union[str, Path]) -> str:
    """
    生成文件哈希值
    
    Args:
        file_path: 文件路径
        
    Returns:
        文件哈希值
    """
    file_path = Path(file_path)
    if not file_path.exists():
        return ""
    
    hash_md5 = hashlib.md5()
    try:
        with open(file_path, "rb") as f:
            for chunk in iter(lambda: f.read(4096), b""):
                hash_md5.update(chunk)
        return hash_md5.hexdigest()
    except Exception as e:
        logger.error(f"生成文件哈希失败: {e}")
        return ""


@log_function_call
def clean_filename(filename: str) -> str:
    """
    清理文件名，移除非法字符
    
    Args:
        filename: 原始文件名
        
    Returns:
        清理后的文件名
    """
    # 移除或替换非法字符
    illegal_chars = r'[<>:"/\\|?*]'
    cleaned = re.sub(illegal_chars, '_', filename)
    
    # 移除首尾空格和点
    cleaned = cleaned.strip('. ')
    
    # 确保文件名不为空
    if not cleaned:
        cleaned = "unnamed_file"
    
    return cleaned


@log_function_call
def create_backup_file(file_path: Union[str, Path]) -> Path:
    """
    创建文件备份
    
    Args:
        file_path: 原文件路径
        
    Returns:
        备份文件路径
    """
    file_path = Path(file_path)
    if not file_path.exists():
        return file_path
    
    timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
    backup_path = file_path.parent / f"{file_path.stem}_backup_{timestamp}{file_path.suffix}"
    
    try:
        import shutil
        shutil.copy2(file_path, backup_path)
        logger.info(f"文件备份已创建: {backup_path}")
        return backup_path
    except Exception as e:
        logger.error(f"创建文件备份失败: {e}")
        return file_path


@log_function_call
def safe_json_dump(data: Any, file_path: Union[str, Path], indent: int = 2) -> bool:
    """
    安全地保存JSON数据
    
    Args:
        data: 要保存的数据
        file_path: 文件路径
        indent: 缩进空格数
        
    Returns:
        是否保存成功
    """
    file_path = Path(file_path)
    try:
        # 确保目录存在
        file_path.parent.mkdir(parents=True, exist_ok=True)
        
        # 创建备份
        if file_path.exists():
            create_backup_file(file_path)
        
        with open(file_path, 'w', encoding='utf-8') as f:
            json.dump(data, f, ensure_ascii=False, indent=indent)
        
        logger.info(f"JSON数据已保存: {file_path}")
        return True
    except Exception as e:
        logger.error(f"保存JSON数据失败: {e}")
        return False


@log_function_call
def safe_json_load(file_path: Union[str, Path]) -> Optional[Any]:
    """
    安全地加载JSON数据
    
    Args:
        file_path: 文件路径
        
    Returns:
        JSON数据，如果失败返回None
    """
    file_path = Path(file_path)
    try:
        if not file_path.exists():
            logger.warning(f"JSON文件不存在: {file_path}")
            return None
        
        with open(file_path, 'r', encoding='utf-8') as f:
            data = json.load(f)
        
        logger.info(f"JSON数据已加载: {file_path}")
        return data
    except Exception as e:
        logger.error(f"加载JSON数据失败: {e}")
        return None


@log_function_call
def get_system_info() -> Dict[str, Any]:
    """
    获取系统信息
    
    Returns:
        系统信息字典
    """
    import platform
    import psutil
    
    info = {
        'platform': platform.platform(),
        'python_version': platform.python_version(),
        'architecture': platform.architecture()[0],
        'processor': platform.processor(),
        'memory_total': format_file_size(psutil.virtual_memory().total),
        'memory_available': format_file_size(psutil.virtual_memory().available),
        'disk_usage': {}
    }
    
    # 获取磁盘使用情况
    for partition in psutil.disk_partitions():
        try:
            usage = psutil.disk_usage(partition.mountpoint)
            info['disk_usage'][partition.device] = {
                'total': format_file_size(usage.total),
                'used': format_file_size(usage.used),
                'free': format_file_size(usage.free),
                'percent': f"{usage.percent:.1f}%"
            }
        except Exception:
            continue
    
    return info


@log_function_call
def format_duration(seconds: float) -> str:
    """
    格式化持续时间
    
    Args:
        seconds: 秒数
        
    Returns:
        格式化的持续时间字符串
    """
    if seconds < 60:
        return f"{seconds:.1f}秒"
    elif seconds < 3600:
        minutes = seconds / 60
        return f"{minutes:.1f}分钟"
    else:
        hours = seconds / 3600
        return f"{hours:.1f}小时"


@log_function_call
def retry_on_error(max_retries: int = 3, delay: float = 1.0):
    """
    错误重试装饰器
    
    Args:
        max_retries: 最大重试次数
        delay: 重试延迟（秒）
        
    Returns:
        装饰器函数
    """
    def decorator(func):
        def wrapper(*args, **kwargs):
            import time
            
            for attempt in range(max_retries + 1):
                try:
                    return func(*args, **kwargs)
                except Exception as e:
                    if attempt == max_retries:
                        logger.error(f"函数 {func.__name__} 最终执行失败: {e}")
                        raise
                    else:
                        logger.warning(f"函数 {func.__name__} 第 {attempt + 1} 次执行失败: {e}")
                        time.sleep(delay)
            return None
        return wrapper
    return decorator 