import os
import argparse
import json
import numpy as np
import SimpleITK as sitk
from tqdm import tqdm
import multiprocessing



def load_mha(file_path: str) -> np.ndarray:
    itk = sitk.ReadImage(file_path)
    itk = sitk.DICOMOrient(itk, 'LPI')
    return sitk.GetArrayFromImage(itk)


def create_sliding_windows(
    volume: np.ndarray, 
    window_size: int, 
    stride: int
) -> list[tuple[int, np.ndarray]]:
    """
    Perform sliding-window sampling along the Z axis of a 3D volume.
    
    Args:
        volume: 3D volume array, shape=[Z, Y, X]
        window_size: Window size along Z axis
        stride: Sliding stride
    
    Returns:
        List of tuples (start_idx, window_data)
    """
    z_length = volume.shape[0]
    windows = []
    
    # Regular windows
    for start_idx in range(0, z_length - window_size + 1, stride):
        slice_data = volume[start_idx : start_idx + window_size]
        windows.append((start_idx, slice_data))
    
    # Handle tail region if not covered by stride
    last_start_idx = stride * ((z_length - window_size) // stride)
    if last_start_idx + window_size < z_length:
        slice_data = volume[-window_size : ]
        windows.append((len(slice_data)-window_size, slice_data))
    
    return windows


def sample_volume(args):
    """
    Perform Z-axis sliding window sampling on an image/label MHA pair and generate SeriesMeta.json.
    
    Args:
        image_path: Image MHA path
        label_path: Label MHA path
        output_dir: Output directory
        window_size: Window size along Z axis
        stride: Sliding stride
    
    Returns:
        Dict {series_id: meta} or (filename, error_message) on failure.
    """
    try:
        image_path, label_path, output_dir, window_size, stride, ensure_slice_foreground = args
        # Load image and label
        image = load_mha(image_path)
        label = load_mha(label_path)
        if image.shape != label.shape:
            raise RuntimeError(f"Image and label shape mismatch: image={image.shape}, label={label.shape}")
        if image.shape[0] < window_size:
            tqdm.write(f"Z length smaller than window size, skipping: {image_path}")
            return { 
                os.path.basename(image_path.replace('.mha', '')): {
                    "num_patches": 0,
                    "anno_available": False,
                    "cropped_center": None,
                }
            }
        
        # Create output folder
        series_id = os.path.splitext(os.path.basename(image_path))[0]
        series_folder = os.path.join(output_dir, series_id)
        
        # Generate sliding windows
        image_windows = create_sliding_windows(image, window_size, stride)
        label_windows = create_sliding_windows(label, window_size, stride)
        
        # For JSON recording
        existed_classes: dict[str, list[int]] = {}
        cropped_center: list[tuple[float, float, float]] = []
        
        # Get Y, X sizes (note: volume.shape = [Z, Y, X])
        _, height, width = image.shape
        
        # Iterate and save window samples
        for idx, ((z_start, img_window), (_, label_window)) in enumerate(zip(image_windows, label_windows)):
            if ensure_slice_foreground is True and label_window.any(axis=(1,2)).all().item() is False:
                continue
            
            # Save npz
            os.makedirs(series_folder, exist_ok=True)
            sample_name = f"{idx}.npz"
            save_path = os.path.join(series_folder, sample_name)
            np.savez_compressed(save_path, img=img_window, gt_seg_map=label_window)
            
            # Record unique label classes in this window
            unique_classes = np.unique(label_window).tolist()
            existed_classes[sample_name] = unique_classes
            
            # Compute window center (Z actual range, XY midpoints)
            z_end = z_start + window_size
            z_center = (z_start + z_end) / 2
            y_center = height / 2
            x_center = width / 2
            cropped_center.append((z_center, y_center, x_center))
        
        num_patches = len(existed_classes)
        anno_available = (num_patches > 0)
        
        # If at least one patch, fetch patch shape (use first)
        patch_shape = image_windows[0][1].shape if num_patches > 0 else None
        
        # Generate SeriesMeta.json
        if anno_available is True:
            metadata_path = os.path.join(series_folder, "SeriesMeta.json")
            with open(metadata_path, "w", encoding="utf-8") as f:
                json.dump(
                    {
                        "series_id": series_id,
                        "shape": patch_shape,
                        "num_patches": num_patches,
                        "anno_available": anno_available,
                        "class_within_patch": existed_classes,
                        "cropped_center": cropped_center,
                    },
                    f,
                    indent=4
                )
        
        return {
            os.path.basename(series_folder): {
                "num_patches": num_patches,
                "anno_available": anno_available,
                "cropped_center": cropped_center,
            }
        }
        
    except Exception as e:
        return os.path.basename(image_path), str(e)


def process_dataset(
    data_dir: str,
    output_dir: str,
    window_size: int,
    stride: int,
    use_mp: bool = False,
    num_workers: int|None = None,
    ensure_slice_foreground: bool = False,
) -> None:
    """
    Traverse image/ and label/ under data_dir, perform sliding window sampling.
    Outputs and JSON metadata are saved under output_dir.
    """
    # Check directories
    image_dir = os.path.join(data_dir, "image")
    label_dir = os.path.join(data_dir, "label")
    for d in [image_dir, label_dir]:
        if not os.path.exists(d):
            raise FileNotFoundError(f"Directory does not exist: {d}")
    # Find all image files
    image_files = [f for f in os.listdir(image_dir) if f.endswith(".mha")]
    if not image_files:
        print(f"Warning: No MHA files found in {image_dir}.")
        return
    print(f"Found {len(image_files)} MHA files in {image_dir}, processing...")
    
    # Assemble tasks
    tasks = []
    for img_file in image_files:
        image_path = os.path.join(image_dir, img_file)
        label_path = os.path.join(label_dir, img_file)
        if not os.path.exists(label_path):
            print(f"Warning: Missing label file for {img_file}: {label_path}")
            continue
        tasks.append((image_path, label_path, output_dir, window_size, stride, ensure_slice_foreground))
    
    # Process files
    results = {}
    if use_mp:
        num_workers = num_workers or max(1, multiprocessing.cpu_count() - 1)
        print(f"Using multiprocessing with {num_workers} workers")
        with multiprocessing.Pool(processes=num_workers) as pool:
            for result in tqdm(pool.imap_unordered(sample_volume, tasks),
                               total=len(tasks), 
                               desc="Processing", 
                               dynamic_ncols=True):
                if isinstance(result, tuple):
                    series_id, error = result
                    print(f"Error while processing {series_id}: {error}")
                else:
                    results.update(result)
    else:
        for t in tqdm(tasks, desc="Processing", dynamic_ncols=True):
            result = sample_volume(t)
            if isinstance(result, tuple):
                series_id, error = result
                print(f"Error while processing {series_id}: {error}")
            else:
                results.update(result)
    
    cropped_series_meta = {
        "data_dir": data_dir,
        "output_dir": output_dir,
        "window_size": window_size,
        "stride": stride,
        "num_series": len(results),
        "num_patches": sum([one_series_meta["num_patches"]
                            for one_series_meta in results.values()]),
        "anno_available": [series_id
                           for series_id, series_meta in results.items()
                           if series_meta["anno_available"] is True],
    }
    json.dump(cropped_series_meta, 
              open(os.path.join(output_dir, "crop_meta.json"), "w", encoding="utf-8"), indent=4)
    print(f"All done. Metadata saved to {os.path.join(output_dir, 'crop_meta.json')}.")


def main():
    parser = argparse.ArgumentParser(description="Sliding-window sampling along Z axis for 3D medical MHA volumes and generate JSON metadata.")
    parser.add_argument("data_root", help="Dataset root directory (contains image and label subfolders)")
    parser.add_argument("output_dir", help="Output directory for sampled patches")
    parser.add_argument("--window-size", type=int, default=64, help="Window size along Z axis")
    parser.add_argument("--stride", type=int, default=32, help="Sliding stride")
    parser.add_argument("--mp", action="store_true", help="Use multiprocessing")
    parser.add_argument("--num-workers", type=int, help="Number of worker processes (default: CPU cores - 1)")
    parser.add_argument("--ensure-slice-foreground", action="store_true", help="Ensure each sampled window has at least one foreground slice")
    
    args = parser.parse_args()
    
    os.makedirs(args.output_dir, exist_ok=True)
    
    try:
        process_dataset(
            data_dir=args.data_root,
            output_dir=args.output_dir,
            window_size=args.window_size,
            stride=args.stride,
            use_mp=args.mp,
            num_workers=args.num_workers,
            ensure_slice_foreground=args.ensure_slice_foreground
        )
    except Exception as e:
        print(f"[FATAL] Exception occurred during execution: {e}")
        import traceback
        traceback.print_exc()



if __name__ == "__main__":
    main()