#!/usr/bin/env python3
"""
日志管理工具
提供日志清理、分析、导出等功能
"""

import sys
import argparse
from pathlib import Path
import gzip
import shutil
from datetime import datetime, timedelta

# 添加项目根目录到路径
project_root = Path(__file__).parent.parent
sys.path.insert(0, str(project_root))
sys.path.insert(0, str(project_root / "src"))

# 注意：这个脚本现在独立运行，不依赖项目的日志系统
from utils.config import BASE_DIR


def compress_old_logs(log_dir: Path, days: int = 7):
    """压缩旧日志文件"""
    cutoff_date = datetime.now() - timedelta(days=days)
    
    for log_file in log_dir.rglob("*.log"):
        if log_file.stat().st_mtime < cutoff_date.timestamp():
            if not str(log_file).endswith('.gz'):
                compressed_file = Path(str(log_file) + '.gz')
                print(f"压缩日志文件: {log_file} -> {compressed_file}")
                
                with open(log_file, 'rb') as f_in:
                    with gzip.open(compressed_file, 'wb') as f_out:
                        shutil.copyfileobj(f_in, f_out)
                
                log_file.unlink()


def cleanup_logs(log_dir: Path, days: int = 30):
    """清理超过指定天数的日志文件"""
    cutoff_date = datetime.now() - timedelta(days=days)
    deleted_files = []
    
    for log_file in log_dir.rglob("*.log*"):
        if log_file.stat().st_mtime < cutoff_date.timestamp():
            deleted_files.append(log_file)
            log_file.unlink()
    
    print(f"删除了 {len(deleted_files)} 个过期日志文件")
    for file in deleted_files:
        print(f"  - {file}")


def analyze_logs(log_dir: Path):
    """分析日志文件统计信息"""
    stats = {
        'total_files': 0,
        'total_size': 0,
        'by_type': {},
        'by_date': {}
    }
    
    for log_file in log_dir.rglob("*.log*"):
        stats['total_files'] += 1
        size = log_file.stat().st_size
        stats['total_size'] += size
        
        # 按类型统计
        parent_name = log_file.parent.name
        if parent_name not in stats['by_type']:
            stats['by_type'][parent_name] = {'files': 0, 'size': 0}
        stats['by_type'][parent_name]['files'] += 1
        stats['by_type'][parent_name]['size'] += size
        
        # 按日期统计
        date = datetime.fromtimestamp(log_file.stat().st_mtime).strftime('%Y-%m-%d')
        if date not in stats['by_date']:
            stats['by_date'][date] = {'files': 0, 'size': 0}
        stats['by_date'][date]['files'] += 1
        stats['by_date'][date]['size'] += size
    
    print(f"日志统计信息:")
    print(f"  总文件数: {stats['total_files']}")
    print(f"  总大小: {stats['total_size'] / 1024 / 1024:.2f} MB")
    
    print(f"\n按类型统计:")
    for log_type, data in stats['by_type'].items():
        print(f"  {log_type}: {data['files']} 文件, {data['size'] / 1024 / 1024:.2f} MB")
    
    print(f"\n最近的日志 (按日期):")
    sorted_dates = sorted(stats['by_date'].items(), reverse=True)[:10]
    for date, data in sorted_dates:
        print(f"  {date}: {data['files']} 文件, {data['size'] / 1024 / 1024:.2f} MB")


def export_error_logs(log_dir: Path, output_file: Path, days: int = 7):
    """导出错误日志"""
    cutoff_date = datetime.now() - timedelta(days=days)
    error_logs = []
    
    for log_file in log_dir.rglob("*.log"):
        if log_file.stat().st_mtime >= cutoff_date.timestamp():
            try:
                with open(log_file, 'r', encoding='utf-8') as f:
                    for line_num, line in enumerate(f, 1):
                        if 'ERROR' in line or 'CRITICAL' in line:
                            error_logs.append(f"{log_file.name}:{line_num} | {line.strip()}")
            except Exception as e:
                print(f"读取文件失败 {log_file}: {e}")
    
    with open(output_file, 'w', encoding='utf-8') as f:
        f.write(f"错误日志导出 - {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
        f.write(f"时间范围: 最近 {days} 天\n")
        f.write("=" * 80 + "\n\n")
        
        for log in error_logs:
            f.write(log + "\n")
    
    print(f"已导出 {len(error_logs)} 条错误日志到: {output_file}")


def main():
    parser = argparse.ArgumentParser(description="日志管理工具")
    parser.add_argument("action", choices=['compress', 'cleanup', 'analyze', 'export-errors'], 
                       help="要执行的操作")
    parser.add_argument("--days", type=int, default=30, 
                       help="天数阈值 (默认: 30)")
    parser.add_argument("--output", type=str, 
                       help="输出文件路径 (用于export-errors)")
    
    args = parser.parse_args()
    
    log_dir = BASE_DIR / "logs"
    
    if not log_dir.exists():
        print(f"日志目录不存在: {log_dir}")
        return
    
    if args.action == "compress":
        compress_old_logs(log_dir, args.days)
    elif args.action == "cleanup":
        cleanup_logs(log_dir, args.days)
    elif args.action == "analyze":
        analyze_logs(log_dir)
    elif args.action == "export-errors":
        output_file = Path(args.output) if args.output else BASE_DIR / f"error_logs_{datetime.now().strftime('%Y%m%d')}.txt"
        export_error_logs(log_dir, output_file, args.days)


if __name__ == "__main__":
    main()
