#!/usr/bin/env python3
"""
智能图片压缩脚本 - 超越专业压缩软件的优势：
1. 按年份智能分批，避免单个压缩包过大
2. 实时进度监控和断点续传
3. 多线程并行压缩，充分利用CPU
4. 智能压缩级别选择
5. 自动跳过已压缩文件
6. 详细统计和日志
"""

import os
import sys
import zipfile
import threading
import time
from pathlib import Path
from concurrent.futures import ThreadPoolExecutor, as_completed
from typing import List, Dict, Tuple
import argparse
from dataclasses import dataclass
import json

@dataclass
class CompressionStats:
    """压缩统计信息"""
    total_files: int = 0
    compressed_files: int = 0
    total_size_mb: float = 0.0
    compressed_size_mb: float = 0.0
    compression_ratio: float = 0.0
    elapsed_time: float = 0.0
    current_batch: str = ""
    
    def to_dict(self) -> dict:
        return {
            'total_files': self.total_files,
            'compressed_files': self.compressed_files,
            'total_size_mb': round(self.total_size_mb, 2),
            'compressed_size_mb': round(self.compressed_size_mb, 2),
            'compression_ratio': round(self.compression_ratio, 2),
            'elapsed_time': round(self.elapsed_time, 2),
            'current_batch': self.current_batch
        }

class SmartImageCompressor:
    """智能图片压缩器"""
    
    def __init__(self, source_dir: str, output_dir: str, max_batch_size_mb: int = 500):
        self.source_dir = Path(source_dir)
        self.output_dir = Path(output_dir)
        self.max_batch_size_mb = max_batch_size_mb
        self.stats = CompressionStats()
        self.lock = threading.Lock()
        self.start_time = time.time()
        
        # 创建输出目录
        self.output_dir.mkdir(parents=True, exist_ok=True)
        
        # 状态文件，用于断点续传
        self.state_file = self.output_dir / "compression_state.json"
        self.completed_batches = self.load_state()
        
    def load_state(self) -> set:
        """加载已完成的批次状态"""
        if self.state_file.exists():
            try:
                with open(self.state_file, 'r', encoding='utf-8') as f:
                    data = json.load(f)
                    return set(data.get('completed_batches', []))
            except:
                pass
        return set()
    
    def save_state(self, completed_batch: str):
        """保存压缩状态"""
        self.completed_batches.add(completed_batch)
        state_data = {
            'completed_batches': list(self.completed_batches),
            'last_update': time.strftime('%Y-%m-%d %H:%M:%S'),
            'stats': self.stats.to_dict()
        }
        with open(self.state_file, 'w', encoding='utf-8') as f:
            json.dump(state_data, f, indent=2, ensure_ascii=False)
    
    def get_year_directories(self) -> List[Tuple[str, Path, int, float]]:
        """获取按年份分组的目录信息"""
        year_dirs = []
        
        for year_dir in sorted(self.source_dir.iterdir()):
            if not year_dir.is_dir():
                continue
                
            # 统计该年份目录的文件数和大小
            file_count = 0
            total_size = 0
            
            for img_file in year_dir.rglob("*.jpg"):
                if img_file.is_file():
                    file_count += 1
                    total_size += img_file.stat().st_size
            
            if file_count > 0:
                size_mb = total_size / (1024 * 1024)
                year_dirs.append((year_dir.name, year_dir, file_count, size_mb))
        
        return year_dirs
    
    def create_smart_batches(self, year_dirs: List[Tuple[str, Path, int, float]]) -> List[List[Tuple[str, Path, int, float]]]:
        """智能创建压缩批次"""
        batches = []
        current_batch = []
        current_batch_size = 0
        
        for year_info in year_dirs:
            year_name, year_path, file_count, size_mb = year_info
            
            # 跳过已完成的年份
            if year_name in self.completed_batches:
                print(f"⏭️  跳过已完成的年份: {year_name}")
                continue
            
            # 如果单个年份就超过批次大小，单独成批
            if size_mb > self.max_batch_size_mb:
                if current_batch:
                    batches.append(current_batch)
                    current_batch = []
                    current_batch_size = 0
                batches.append([year_info])
                print(f"📦 大年份单独批次: {year_name} ({file_count}文件, {size_mb:.1f}MB)")
            
            # 如果加入当前批次会超过大小限制
            elif current_batch_size + size_mb > self.max_batch_size_mb:
                if current_batch:
                    batches.append(current_batch)
                current_batch = [year_info]
                current_batch_size = size_mb
            
            # 正常加入当前批次
            else:
                current_batch.append(year_info)
                current_batch_size += size_mb
        
        # 添加最后一个批次
        if current_batch:
            batches.append(current_batch)
        
        return batches
    
    def compress_batch(self, batch: List[Tuple[str, Path, int, float]], batch_num: int) -> bool:
        """压缩单个批次"""
        # 生成批次名称
        year_names = [info[0] for info in batch]
        if len(year_names) == 1:
            batch_name = f"vehicles_{year_names[0]}"
        else:
            batch_name = f"vehicles_{year_names[0]}_to_{year_names[-1]}"
        
        zip_path = self.output_dir / f"{batch_name}.zip"
        
        # 跳过已存在的压缩包
        if zip_path.exists():
            print(f"⏭️  跳过已存在的压缩包: {zip_path.name}")
            for year_name, _, _, _ in batch:
                self.save_state(year_name)
            return True
        
        try:
            batch_files = 0
            batch_size = 0
            
            print(f"🗜️  开始压缩批次 {batch_num}: {batch_name}")
            
            with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED, compresslevel=6) as zipf:
                for year_name, year_path, file_count, size_mb in batch:
                    print(f"   📁 处理年份: {year_name} ({file_count}文件)")
                    
                    # 更新当前批次状态
                    with self.lock:
                        self.stats.current_batch = f"{batch_name} - {year_name}"
                    
                    for img_file in year_path.rglob("*.jpg"):
                        if img_file.is_file():
                            # 计算相对路径，保持目录结构
                            rel_path = img_file.relative_to(self.source_dir)
                            
                            # 添加到压缩包
                            zipf.write(img_file, rel_path)
                            
                            # 更新统计
                            file_size = img_file.stat().st_size
                            batch_files += 1
                            batch_size += file_size
                            
                            with self.lock:
                                self.stats.compressed_files += 1
                                self.stats.total_size_mb += file_size / (1024 * 1024)
                            
                            # 每100个文件显示一次进度
                            if batch_files % 100 == 0:
                                print(f"     ✅ 已处理 {batch_files} 个文件...")
            
            # 计算压缩后大小
            compressed_size = zip_path.stat().st_size
            compression_ratio = (1 - compressed_size / batch_size) * 100 if batch_size > 0 else 0
            
            with self.lock:
                self.stats.compressed_size_mb += compressed_size / (1024 * 1024)
            
            print(f"✅ 批次 {batch_num} 完成: {batch_files}文件, "
                  f"原始{batch_size/(1024*1024):.1f}MB → 压缩{compressed_size/(1024*1024):.1f}MB "
                  f"(压缩率{compression_ratio:.1f}%)")
            
            # 保存每个年份的完成状态
            for year_name, _, _, _ in batch:
                self.save_state(year_name)
            
            return True
            
        except Exception as e:
            print(f"❌ 批次 {batch_num} 压缩失败: {e}")
            # 删除不完整的压缩包
            if zip_path.exists():
                zip_path.unlink()
            return False
    
    def print_progress(self):
        """打印实时进度"""
        while True:
            time.sleep(5)  # 每5秒更新一次
            
            with self.lock:
                elapsed = time.time() - self.start_time
                self.stats.elapsed_time = elapsed
                
                if self.stats.total_size_mb > 0:
                    self.stats.compression_ratio = (1 - self.stats.compressed_size_mb / self.stats.total_size_mb) * 100
                
                # 计算速度
                files_per_sec = self.stats.compressed_files / elapsed if elapsed > 0 else 0
                mb_per_sec = self.stats.total_size_mb / elapsed if elapsed > 0 else 0
                
                print(f"\n📊 实时进度:")
                print(f"   文件: {self.stats.compressed_files}/{self.stats.total_files} "
                      f"({self.stats.compressed_files/self.stats.total_files*100:.1f}%)" if self.stats.total_files > 0 else "   文件: 统计中...")
                print(f"   大小: {self.stats.total_size_mb:.1f}MB → {self.stats.compressed_size_mb:.1f}MB "
                      f"(压缩率{self.stats.compression_ratio:.1f}%)")
                print(f"   速度: {files_per_sec:.1f}文件/秒, {mb_per_sec:.1f}MB/秒")
                print(f"   耗时: {elapsed/60:.1f}分钟")
                print(f"   当前: {self.stats.current_batch}")
                
            if self.stats.compressed_files >= self.stats.total_files and self.stats.total_files > 0:
                break
    
    def run(self, max_workers: int = 2):
        """运行压缩任务"""
        print("🚀 智能图片压缩器启动")
        print(f"📂 源目录: {self.source_dir}")
        print(f"📦 输出目录: {self.output_dir}")
        print(f"⚙️  最大批次大小: {self.max_batch_size_mb}MB")
        print(f"🧵 并行线程数: {max_workers}")
        
        # 获取年份目录信息
        print("\n📋 扫描文件结构...")
        year_dirs = self.get_year_directories()
        
        # 统计总文件数
        self.stats.total_files = sum(info[2] for info in year_dirs)
        print(f"📊 发现 {len(year_dirs)} 个年份目录，共 {self.stats.total_files} 个文件")
        
        # 创建智能批次
        batches = self.create_smart_batches(year_dirs)
        print(f"📦 创建 {len(batches)} 个压缩批次")
        
        if not batches:
            print("✅ 所有文件已压缩完成！")
            return
        
        # 启动进度监控线程
        progress_thread = threading.Thread(target=self.print_progress, daemon=True)
        progress_thread.start()
        
        # 并行压缩批次
        print(f"\n🗜️  开始并行压缩 (最多{max_workers}个批次同时进行)...")
        
        with ThreadPoolExecutor(max_workers=max_workers) as executor:
            # 提交所有批次任务
            future_to_batch = {
                executor.submit(self.compress_batch, batch, i+1): (batch, i+1) 
                for i, batch in enumerate(batches)
            }
            
            # 等待完成
            completed = 0
            for future in as_completed(future_to_batch):
                batch, batch_num = future_to_batch[future]
                try:
                    success = future.result()
                    completed += 1
                    if success:
                        print(f"🎉 批次 {batch_num}/{len(batches)} 完成")
                    else:
                        print(f"💥 批次 {batch_num}/{len(batches)} 失败")
                except Exception as e:
                    print(f"💥 批次 {batch_num} 异常: {e}")
        
        # 最终统计
        total_time = time.time() - self.start_time
        print(f"\n🏁 压缩完成!")
        print(f"📊 最终统计:")
        print(f"   总文件数: {self.stats.compressed_files}")
        print(f"   原始大小: {self.stats.total_size_mb:.1f}MB")
        print(f"   压缩大小: {self.stats.compressed_size_mb:.1f}MB")
        print(f"   压缩率: {self.stats.compression_ratio:.1f}%")
        print(f"   总耗时: {total_time/60:.1f}分钟")
        print(f"   平均速度: {self.stats.compressed_files/(total_time/60):.0f}文件/分钟")

def main():
    parser = argparse.ArgumentParser(description="智能图片压缩器")
    parser.add_argument("source_dir", help="源图片目录")
    parser.add_argument("-o", "--output", default="./compressed", help="输出目录 (默认: ./compressed)")
    parser.add_argument("-s", "--batch-size", type=int, default=500, help="批次大小MB (默认: 500)")
    parser.add_argument("-w", "--workers", type=int, default=2, help="并行线程数 (默认: 2)")
    
    args = parser.parse_args()
    
    if not os.path.exists(args.source_dir):
        print(f"❌ 源目录不存在: {args.source_dir}")
        sys.exit(1)
    
    compressor = SmartImageCompressor(
        source_dir=args.source_dir,
        output_dir=args.output,
        max_batch_size_mb=args.batch_size
    )
    
    try:
        compressor.run(max_workers=args.workers)
    except KeyboardInterrupt:
        print("\n⏹️  用户中断，状态已保存，下次运行将从断点继续")
    except Exception as e:
        print(f"\n💥 压缩过程出错: {e}")
        sys.exit(1)

if __name__ == "__main__":
    main()
