#!/usr/bin/env python3
"""
文件工具模块
负责文件操作和数据处理
"""

import os
import json
import csv
import pickle
import shutil
from typing import Any, Dict, List, Optional
from datetime import datetime

from config.settings import setup_logging

logger = setup_logging()


def ensure_directory(directory_path: str) -> bool:
    """
    确保目录存在，如果不存在则创建
    
    Args:
        directory_path: 目录路径
        
    Returns:
        是否成功
    """
    try:
        if not os.path.exists(directory_path):
            os.makedirs(directory_path, exist_ok=True)
            logger.debug(f"创建目录: {directory_path}")
        return True
    except Exception as e:
        logger.error(f"创建目录失败 {directory_path}: {e}")
        return False


def save_json(data: Any, file_path: str, ensure_ascii: bool = False, indent: int = 2) -> bool:
    """
    保存数据为JSON文件
    
    Args:
        data: 要保存的数据
        file_path: 文件路径
        ensure_ascii: 是否确保ASCII
        indent: 缩进空格数
        
    Returns:
        是否成功
    """
    try:
        # 确保目录存在
        directory = os.path.dirname(file_path)
        if directory and not ensure_directory(directory):
            return False
        
        with open(file_path, 'w', encoding='utf-8') as f:
            json.dump(data, f, ensure_ascii=ensure_ascii, indent=indent)
        
        logger.debug(f"JSON数据已保存至: {file_path}")
        return True
    except Exception as e:
        logger.error(f"保存JSON文件失败 {file_path}: {e}")
        return False


def load_json(file_path: str) -> Optional[Any]:
    """
    从JSON文件加载数据
    
    Args:
        file_path: 文件路径
        
    Returns:
        加载的数据或None
    """
    try:
        if not os.path.exists(file_path):
            logger.warning(f"JSON文件不存在: {file_path}")
            return None
        
        with open(file_path, 'r', encoding='utf-8') as f:
            data = json.load(f)
        
        logger.debug(f"从JSON文件加载数据: {file_path}")
        return data
    except Exception as e:
        logger.error(f"加载JSON文件失败 {file_path}: {e}")
        return None


def save_csv(data: List[Dict], file_path: str, encoding: str = 'utf-8-sig') -> bool:
    """
    保存数据为CSV文件
    
    Args:
        data: 要保存的数据列表
        file_path: 文件路径
        encoding: 文件编码
        
    Returns:
        是否成功
    """
    if not data:
        logger.warning("没有数据可保存为CSV")
        return False
    
    try:
        # 确保目录存在
        directory = os.path.dirname(file_path)
        if directory and not ensure_directory(directory):
            return False
        
        # 获取所有字段名
        fieldnames = set()
        for item in data:
            fieldnames.update(item.keys())
        fieldnames = list(fieldnames)
        
        with open(file_path, 'w', newline='', encoding=encoding) as f:
            writer = csv.DictWriter(f, fieldnames=fieldnames)
            writer.writeheader()
            writer.writerows(data)
        
        logger.debug(f"CSV数据已保存至: {file_path}")
        return True
    except Exception as e:
        logger.error(f"保存CSV文件失败 {file_path}: {e}")
        return False


def load_csv(file_path: str, encoding: str = 'utf-8-sig') -> Optional[List[Dict]]:
    """
    从CSV文件加载数据
    
    Args:
        file_path: 文件路径
        encoding: 文件编码
        
    Returns:
        加载的数据或None
    """
    try:
        if not os.path.exists(file_path):
            logger.warning(f"CSV文件不存在: {file_path}")
            return None
        
        with open(file_path, 'r', encoding=encoding) as f:
            reader = csv.DictReader(f)
            data = list(reader)
        
        logger.debug(f"从CSV文件加载数据: {file_path}, 共 {len(data)} 行")
        return data
    except Exception as e:
        logger.error(f"加载CSV文件失败 {file_path}: {e}")
        return None


def save_pickle(data: Any, file_path: str) -> bool:
    """
    保存数据为pickle文件
    
    Args:
        data: 要保存的数据
        file_path: 文件路径
        
    Returns:
        是否成功
    """
    try:
        # 确保目录存在
        directory = os.path.dirname(file_path)
        if directory and not ensure_directory(directory):
            return False
        
        with open(file_path, 'wb') as f:
            pickle.dump(data, f)
        
        logger.debug(f"数据已pickle保存至: {file_path}")
        return True
    except Exception as e:
        logger.error(f"保存pickle文件失败 {file_path}: {e}")
        return False


def load_pickle(file_path: str) -> Optional[Any]:
    """
    从pickle文件加载数据
    
    Args:
        file_path: 文件路径
        
    Returns:
        加载的数据或None
    """
    try:
        if not os.path.exists(file_path):
            logger.warning(f"pickle文件不存在: {file_path}")
            return None
        
        with open(file_path, 'rb') as f:
            data = pickle.load(f)
        
        logger.debug(f"从pickle文件加载数据: {file_path}")
        return data
    except Exception as e:
        logger.error(f"加载pickle文件失败 {file_path}: {e}")
        return None


def save_text(text: str, file_path: str, encoding: str = 'utf-8') -> bool:
    """
    保存文本到文件
    
    Args:
        text: 要保存的文本
        file_path: 文件路径
        encoding: 文件编码
        
    Returns:
        是否成功
    """
    try:
        # 确保目录存在
        directory = os.path.dirname(file_path)
        if directory and not ensure_directory(directory):
            return False
        
        with open(file_path, 'w', encoding=encoding) as f:
            f.write(text)
        
        logger.debug(f"文本已保存至: {file_path}")
        return True
    except Exception as e:
        logger.error(f"保存文本文件失败 {file_path}: {e}")
        return False


def load_text(file_path: str, encoding: str = 'utf-8') -> Optional[str]:
    """
    从文件加载文本
    
    Args:
        file_path: 文件路径
        encoding: 文件编码
        
    Returns:
        加载的文本或None
    """
    try:
        if not os.path.exists(file_path):
            logger.warning(f"文本文件不存在: {file_path}")
            return None
        
        with open(file_path, 'r', encoding=encoding) as f:
            text = f.read()
        
        logger.debug(f"从文本文件加载数据: {file_path}")
        return text
    except Exception as e:
        logger.error(f"加载文本文件失败 {file_path}: {e}")
        return None


def get_file_size(file_path: str) -> Optional[int]:
    """
    获取文件大小（字节）
    
    Args:
        file_path: 文件路径
        
    Returns:
        文件大小或None
    """
    try:
        if os.path.exists(file_path):
            return os.path.getsize(file_path)
        return None
    except Exception as e:
        logger.error(f"获取文件大小失败 {file_path}: {e}")
        return None


def list_files(directory: str, pattern: str = None) -> List[str]:
    """
    列出目录中的文件
    
    Args:
        directory: 目录路径
        pattern: 文件模式（如 '*.json'）
        
    Returns:
        文件路径列表
    """
    try:
        if not os.path.exists(directory):
            logger.warning(f"目录不存在: {directory}")
            return []
        
        files = []
        for filename in os.listdir(directory):
            file_path = os.path.join(directory, filename)
            if os.path.isfile(file_path):
                if pattern:
                    import fnmatch
                    if fnmatch.fnmatch(filename, pattern):
                        files.append(file_path)
                else:
                    files.append(file_path)
        
        return sorted(files)
    except Exception as e:
        logger.error(f"列出文件失败 {directory}: {e}")
        return []


def backup_file(file_path: str, backup_dir: str = "backups") -> bool:
    """
    备份文件
    
    Args:
        file_path: 原文件路径
        backup_dir: 备份目录
        
    Returns:
        是否成功
    """
    try:
        if not os.path.exists(file_path):
            logger.warning(f"要备份的文件不存在: {file_path}")
            return False
        
        # 创建备份目录
        if not ensure_directory(backup_dir):
            return False
        
        # 生成备份文件名（包含时间戳）
        filename = os.path.basename(file_path)
        name, ext = os.path.splitext(filename)
        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        backup_filename = f"{name}_{timestamp}{ext}"
        backup_path = os.path.join(backup_dir, backup_filename)
        
        # 复制文件
        shutil.copy2(file_path, backup_path)
        
        logger.info(f"文件已备份: {file_path} -> {backup_path}")
        return True
    except Exception as e:
        logger.error(f"备份文件失败 {file_path}: {e}")
        return False


def clean_directory(directory: str, pattern: str = None, keep_count: int = 10) -> bool:
    """
    清理目录，保留指定数量的最新文件
    
    Args:
        directory: 目录路径
        pattern: 文件模式
        keep_count: 保留文件数量
        
    Returns:
        是否成功
    """
    try:
        files = list_files(directory, pattern)
        if len(files) <= keep_count:
            return True
        
        # 按修改时间排序
        files_with_mtime = [(f, os.path.getmtime(f)) for f in files]
        files_sorted = sorted(files_with_mtime, key=lambda x: x[1])
        
        # 删除最旧的文件
        files_to_delete = files_sorted[:-keep_count]
        for file_path, _ in files_to_delete:
            os.remove(file_path)
            logger.debug(f"删除旧文件: {file_path}")
        
        logger.info(f"清理目录完成: {directory}, 删除了 {len(files_to_delete)} 个文件")
        return True
    except Exception as e:
        logger.error(f"清理目录失败 {directory}: {e}")
        return False


class DataExporter:
    """数据导出器"""
    
    def __init__(self, output_dir: str = "exports"):
        """
        初始化数据导出器
        
        Args:
            output_dir: 输出目录
        """
        self.output_dir = output_dir
        ensure_directory(output_dir)
    
    def export_dataframe(self, df, filename: str, formats: List[str] = None) -> Dict[str, bool]:
        """
        导出DataFrame到多种格式
        
        Args:
            df: pandas DataFrame
            filename: 文件名（不含扩展名）
            formats: 导出格式列表 ['csv', 'json', 'excel']
            
        Returns:
            各格式导出结果字典
        """
        if formats is None:
            formats = ['csv', 'json']
        
        results = {}
        
        for format_type in formats:
            file_path = os.path.join(self.output_dir, f"{filename}.{format_type}")
            
            try:
                if format_type == 'csv':
                    df.to_csv(file_path, index=False, encoding='utf-8-sig')
                    results['csv'] = True
                elif format_type == 'json':
                    df.to_json(file_path, orient='records', force_ascii=False, indent=2)
                    results['json'] = True
                elif format_type == 'excel':
                    df.to_excel(file_path, index=False)
                    results['excel'] = True
                else:
                    results[format_type] = False
                    continue
                
                logger.info(f"DataFrame已导出为 {format_type}: {file_path}")
                
            except Exception as e:
                logger.error(f"导出 {format_type} 失败: {e}")
                results[format_type] = False
        
        return results


if __name__ == "__main__":
    # 测试文件工具
    test_dir = "test_data"
    ensure_directory(test_dir)
    
    # 测试JSON保存/加载
    test_data = {"name": "测试", "value": 123}
    json_file = os.path.join(test_dir, "test.json")
    save_json(test_data, json_file)
    loaded_data = load_json(json_file)
    print(f"JSON测试: {loaded_data}")
    
    # 测试CSV保存/加载
    csv_data = [{"name": "Alice", "age": 25}, {"name": "Bob", "age": 30}]
    csv_file = os.path.join(test_dir, "test.csv")
    save_csv(csv_data, csv_file)
    loaded_csv = load_csv(csv_file)
    print(f"CSV测试: {loaded_csv}")
    
    # 清理测试文件
    import shutil
    shutil.rmtree(test_dir)