#!/usr/bin/env python3
"""dedupe.py

Find and optionally delete duplicate images in a directory.

Strategy:
- Exact duplicates: compare file size + sha1
- Perceptual duplicates: use imagehash (average hash) and hamming distance

Usage:
    python dedupe.py --path /path/to/images [--delete] [--hash-size 8] [--max-distance 5] [--threads 4]

Defaults to dry-run; use --delete to actually remove files.
"""

from __future__ import annotations

import argparse
import hashlib
import os
import sys
from collections import defaultdict
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Tuple

try:
    from PIL import Image
except Exception:
    print("Pillow is required. Run: pip install -r requirements.txt", file=sys.stderr)
    raise

try:
    import imagehash
except Exception:
    print("imagehash is required. Run: pip install -r requirements.txt", file=sys.stderr)
    raise


@dataclass
class ImageInfo:
    path: Path
    size: int
    sha1: str | None
    phash: imagehash.ImageHash | None


def sha1_of_file(path: Path, buf_size: int = 65536) -> str:
    h = hashlib.sha1()
    with path.open("rb") as f:
        for chunk in iter(lambda: f.read(buf_size), b""):
            h.update(chunk)
    return h.hexdigest()


def compute_phash(path: Path, hash_size: int = 8) -> imagehash.ImageHash:
    try:
        with Image.open(path) as img:
            return imagehash.phash(img, hash_size=hash_size)
    except Exception as e:
        raise RuntimeError(f"Error computing phash for {path}: {e}")


def collect_images(paths: List[Path], recursive: bool = True) -> List[Path]:
    exts = {".jpg", ".jpeg", ".png", ".bmp", ".gif", ".tiff", ".webp"}
    result = []
    for p in paths:
        if p.is_file():
            if p.suffix.lower() in exts:
                result.append(p)
        elif p.is_dir():
            for root, dirs, files in os.walk(p):
                for fn in files:
                    if Path(fn).suffix.lower() in exts:
                        result.append(Path(root) / fn)
                if not recursive:
                    break
    return result


def find_duplicates(files: List[Path], hash_size: int = 8, max_distance: int = 5, threads: int = 4) -> List[List[Path]]:
    """Return groups of duplicate files. First group by size+sha1 (exact), then by perceptual hash."""
    infos: List[ImageInfo] = []

    def process(path: Path) -> ImageInfo:
        size = path.stat().st_size
        sha = sha1_of_file(path)
        ph = None
        try:
            ph = compute_phash(path, hash_size=hash_size)
        except Exception:
            ph = None
        return ImageInfo(path=path, size=size, sha1=sha, phash=ph)

    with ThreadPoolExecutor(max_workers=threads) as ex:
        for info in ex.map(process, files):
            infos.append(info)

    # Exact duplicates: size + sha1
    by_sha: Dict[Tuple[int, str], List[Path]] = defaultdict(list)
    for info in infos:
        by_sha[(info.size, info.sha1)].append(info.path)

    groups: List[List[Path]] = []
    used = set()
    for k, g in by_sha.items():
        if len(g) > 1:
            groups.append(sorted(g))
            for p in g:
                used.add(p)

    # Perceptual duplicates among remaining
    remaining = [i for i in infos if i.path not in used and i.phash is not None]
    # bucket by hash to speed up
    for i, a in enumerate(remaining):
        if a.path in used:
            continue
        group = [a.path]
        for b in remaining[i+1:]:
            if b.path in used:
                continue
            # hamming distance
            try:
                dist = a.phash - b.phash
            except Exception:
                continue
            if dist <= max_distance:
                group.append(b.path)
                used.add(b.path)
        if len(group) > 1:
            groups.append(sorted(group))
            for p in group:
                used.add(p)

    return groups


def delete_groups(groups: List[List[Path]], do_delete: bool = False) -> List[Tuple[Path, List[Path]]]:
    """For each group keep the first (sorted) file, delete others. Return list of (kept, [deleted...])"""
    results = []
    for g in groups:
        sorted_g = sorted(g)
        keep = sorted_g[0]
        del_list = sorted_g[1:]
        deleted = []
        for p in del_list:
            if do_delete:
                try:
                    p.unlink()
                    deleted.append(p)
                except Exception as e:
                    print(f"Failed to delete {p}: {e}", file=sys.stderr)
            else:
                deleted.append(p)
        results.append((keep, deleted))
    return results


def parse_args() -> argparse.Namespace:
    p = argparse.ArgumentParser(description="Find and optionally delete duplicate images")
    p.add_argument("path", nargs="+", help="Files or directories to scan")
    p.add_argument("--hash-size", type=int, default=8, help="Hash size for phash (8 = 64-bit)")
    p.add_argument("--max-distance", type=int, default=5, help="Max hamming distance to consider as duplicate")
    p.add_argument("--threads", type=int, default=4, help="Worker threads for hashing")
    p.add_argument("--delete", action="store_true", help="Actually delete duplicates (default: dry-run)")
    p.add_argument("--no-recursive", dest="recursive", action="store_false", help="Don't recurse directories")
    return p.parse_args()


def main(argv: List[str] | None = None) -> int:
    args = parse_args() if argv is None else parse_args()
    paths = [Path(p) for p in args.path]
    files = collect_images(paths, recursive=args.recursive)
    if not files:
        print("No image files found.")
        return 0
    print(f"Found {len(files)} image files. Computing hashes...")
    groups = find_duplicates(files, hash_size=args.hash_size, max_distance=args.max_distance, threads=args.threads)
    if not groups:
        print("No duplicates found.")
        return 0
    print(f"Found {len(groups)} groups of duplicates:")
    for g in groups:
        print("Group:")
        for p in g:
            print("  ", p)
    # delete
    if args.delete:
        print("Deleting duplicates (keeping first file in each group)...")
    else:
        print("Dry run: not deleting. Rerun with --delete to remove files.")
    results = delete_groups(groups, do_delete=args.delete)
    print("Summary:")
    for keep, dels in results:
        print(f"Keep: {keep}")
        for d in dels:
            print(f"  Delete: {d}")
    return 0


if __name__ == "__main__":
    raise SystemExit(main())
