#!/usr/bin/env python3
"""
SfM Pipeline Script
Converted from pipeline_SfM.ipynb

This script runs SfM reconstruction from scratch on a set of images.
We use the South-Building dataset for demonstration.
"""

import os
import subprocess
import sys
from pathlib import Path

# Add autoreload functionality
try:
    from IPython import get_ipython
    if get_ipython() is not None:
        get_ipython().run_line_magic('load_ext', 'autoreload')
        get_ipython().run_line_magic('autoreload', '2')
except ImportError:
    print("IPython not available, skipping autoreload")

from hloc import (
    extract_features,
    match_features,
    reconstruction,
    visualization,
    pairs_from_retrieval,
)


def download_dataset():
    """Download the South-Building dataset if it doesn't exist."""
    images = Path("datasets/South-Building/images/")
    
    if not images.exists():
        print("Downloading South-Building dataset...")
        
        # Create datasets directory if it doesn't exist
        os.makedirs("datasets", exist_ok=True)
        
        # Download dataset
        download_cmd = [
            "wget", 
            "https://cvg-data.inf.ethz.ch/local-feature-evaluation-schoenberger2017/South-Building.zip",
            "-P", "datasets/"
        ]
        
        try:
            subprocess.run(download_cmd, check=True)
            print("Dataset downloaded successfully!")
        except subprocess.CalledProcessError as e:
            print(f"Error downloading dataset: {e}")
            print("Please ensure wget is installed and you have internet connection.")
            return False
        
        # Extract dataset
        extract_cmd = ["unzip", "-q", "datasets/South-Building.zip", "-d", "datasets/"]
        try:
            subprocess.run(extract_cmd, check=True)
            print("Dataset extracted successfully!")
        except subprocess.CalledProcessError as e:
            print(f"Error extracting dataset: {e}")
            return False
    else:
        print("Dataset already exists, skipping download.")
    
    return True


def setup_paths():
    """Setup all necessary paths for the pipeline."""
    images = Path("datasets/South-Building/images/")
    outputs = Path("outputs/sfm/")
    sfm_pairs = outputs / "pairs-netvlad.txt"
    sfm_dir = outputs / "sfm_superpoint+superglue"
    
    # Create output directory
    outputs.mkdir(parents=True, exist_ok=True)
    
    return images, outputs, sfm_pairs, sfm_dir


def main():
    """Main pipeline execution."""
    print("Starting SfM Pipeline...")
    
    # Setup paths
    images, outputs, sfm_pairs, sfm_dir = setup_paths()
    
    # Download dataset if needed
    if not download_dataset():
        print("Failed to download dataset. Exiting.")
        sys.exit(1)
    
    # Configuration
    retrieval_conf = extract_features.confs["netvlad"]
    feature_conf = extract_features.confs["superpoint_aachen"]
    matcher_conf = match_features.confs["superglue"]
    
    print(f"Images directory: {images}")
    print(f"Output directory: {outputs}")
    
    try:
        # Step 1: Find image pairs via image retrieval
        print("\n=== Step 1: Image Retrieval ===")
        print("Extracting global descriptors with NetVLAD...")
        retrieval_path = extract_features.main(retrieval_conf, images, outputs)
        print(f"Retrieval features saved to: {retrieval_path}")
        
        print("Finding image pairs...")
        pairs_from_retrieval.main(retrieval_path, sfm_pairs, num_matched=5)
        print(f"Image pairs saved to: {sfm_pairs}")
        
        # Step 2: Extract and match local features
        print("\n=== Step 2: Local Feature Extraction and Matching ===")
        print("Extracting local features with SuperPoint...")
        feature_path = extract_features.main(feature_conf, images, outputs)
        print(f"Local features saved to: {feature_path}")
        
        print("Matching features with SuperGlue...")
        match_path = match_features.main(
            matcher_conf, sfm_pairs, feature_conf["output"], outputs
        )
        print(f"Feature matches saved to: {match_path}")
        
        # Step 3: 3D reconstruction
        print("\n=== Step 3: 3D Reconstruction ===")
        print("Running COLMAP reconstruction...")
        model = reconstruction.main(sfm_dir, images, sfm_pairs, feature_path, match_path)
        print(f"3D model saved to: {sfm_dir}")
        
        # Step 4: Visualization
        print("\n=== Step 4: Visualization ===")
        print("Generating visualizations...")
        
        print("Visualizing by visibility...")
        visualization.visualize_sfm_2d(model, images, color_by="visibility", n=5)
        
        print("Visualizing by track length...")
        visualization.visualize_sfm_2d(model, images, color_by="track_length", n=5)
        
        print("Visualizing by depth...")
        visualization.visualize_sfm_2d(model, images, color_by="depth", n=5)
        
        print("\n=== Pipeline Completed Successfully! ===")
        print(f"Results saved in: {outputs}")
        
    except Exception as e:
        print(f"Error during pipeline execution: {e}")
        import traceback
        traceback.print_exc()
        sys.exit(1)


if __name__ == "__main__":
    main()
