"""
    SpatialProcessing

Module for spatial processing of observations in GSI data assimilation.
This module implements sophisticated algorithms for optimal spatial distribution
and processing of observations including thinning, super-observations, and
spatial quality control for improved analysis performance.

# Key Features

- **Observation Thinning**: Systematic reduction of observation density
- **Super-Observation Creation**: Combining nearby observations optimally
- **Spatial Quality Control**: Location-based quality assessment
- **Grid-Based Processing**: Efficient spatial organization and access
- **Distance-Based Operations**: Great circle distance calculations and operations
- **Spatial Interpolation**: Observation value interpolation to model grid

# Processing Methods

## Observation Thinning
- **Regular Thinning**: Uniform spatial grid thinning
- **Adaptive Thinning**: Density-based adaptive thinning
- **Quality-Based Thinning**: Retain highest quality observations
- **Platform-Specific Thinning**: Different thinning for different platforms

## Super-Observation Processing
- **Weighted Averaging**: Error-weighted combination of observations
- **Spatial Correlation**: Account for spatial observation correlations
- **Quality Propagation**: Combine quality control information
- **Representative Selection**: Choose most representative observation locations

"""
module SpatialProcessing

using LinearAlgebra
using Statistics
using NearestNeighbors
using Dates
using ..ObservationTypes
using ..GSICoreAnalysis: AbstractAnalysisConfig

# Include configuration types
include("SpatialProcessingConfig.jl")

# Export main types and functions
export AbstractSpatialProcessor, ObservationThinning, SuperObservation
export SpatialGrid, SpatialProcessingConfig, SpatialProcessingResult
export thin_observations, create_super_observations, spatial_quality_control
export compute_distances, interpolate_to_grid, optimize_observation_distribution
export temporal_thinning, process_observations, validate_spatial_processing

# Re-export configuration types
export ThinningConfig, SuperObservationConfig, ProcessingPipeline
export ThinningMethod, SuperObservationMethod
export SIMPLE_AVERAGE, QUALITY_WEIGHTED, ERROR_WEIGHTED, OPTIMAL_INTERPOLATION
export create_standard_thinning_pipeline, create_standard_superob_pipeline, create_comprehensive_pipeline

# Abstract base type for spatial processing methods
abstract type AbstractSpatialProcessor{T <: AbstractFloat} end

"""
    SpatialGrid{T}

Regular spatial grid for organizing observations spatially.
"""
struct SpatialGrid{T <: AbstractFloat}
    lon_min::T
    lon_max::T
    lat_min::T  
    lat_max::T
    dlon::T                    # Grid spacing in longitude
    dlat::T                    # Grid spacing in latitude
    nlon::Int                  # Number of longitude grid points
    nlat::Int                  # Number of latitude grid points
    
    # Grid cell assignments for observations
    cell_indices::Matrix{Vector{Int}}  # [nlat × nlon] -> observation indices
    observation_counts::Matrix{Int}    # [nlat × nlon] -> count per cell
end

"""
    ObservationThinning{T} <: AbstractSpatialProcessor{T}

Systematic observation thinning processor.
"""
struct ObservationThinning{T} <: AbstractSpatialProcessor{T}
    thinning_distance::T       # Minimum distance between kept observations (km)
    grid_spacing::T           # Regular grid spacing for thinning (km)
    quality_threshold::T      # Minimum quality for retained observations
    platform_priorities::Dict{String, Int}  # Platform priority rankings
    preserve_best_quality::Bool              # Keep highest quality in each grid cell
end

"""
    SuperObservation{T} <: AbstractSpatialProcessor{T}

Super-observation creation processor.
"""
struct SuperObservation{T} <: AbstractSpatialProcessor{T}
    combination_radius::T     # Radius for combining observations (km)
    minimum_obs_count::Int    # Minimum observations needed for super-ob
    maximum_obs_count::Int    # Maximum observations per super-ob
    error_correlation_model::Symbol  # :exponential, :gaussian, :spherical
    correlation_length::T     # Spatial correlation length scale (km)
    quality_weighting::Bool   # Weight by observation quality
end

"""
    SpatialProcessingConfig{T}

Configuration for spatial processing operations.
"""
struct SpatialProcessingConfig{T <: AbstractFloat}
    # Thinning Configuration
    thinning_enabled::Bool
    thinning_distance_km::T
    adaptive_thinning::Bool
    quality_based_thinning::Bool
    
    # Super-observation Configuration
    superob_enabled::Bool
    superob_radius_km::T
    min_obs_per_superob::Int
    max_obs_per_superob::Int
    
    # Grid Configuration
    grid_lon_spacing::T       # Degrees
    grid_lat_spacing::T       # Degrees
    
    # Quality Control
    spatial_qc_enabled::Bool
    spatial_consistency_check::Bool
    neighbor_distance_km::T
    
    # Performance Options
    parallel_processing::Bool
    use_spatial_index::Bool
end

"""
    SpatialProcessingResult{T}

Result of spatial processing with detailed statistics.
"""
struct SpatialProcessingResult{T <: AbstractFloat}
    # Processed observations
    thinned_indices::Vector{Int}       # Indices of observations after thinning
    superob_locations::Vector{Tuple{T, T}}  # (lat, lon) of super-observations
    superob_values::Vector{T}          # Super-observation values
    superob_errors::Vector{T}          # Super-observation error estimates
    superob_counts::Vector{Int}        # Number of obs per super-observation
    
    # Quality control results
    spatial_qc_flags::Vector{Int}      # Spatial QC flags for each observation
    consistency_check_results::Vector{Bool}  # Spatial consistency results
    
    # Processing statistics
    original_count::Int
    thinned_count::Int
    superob_count::Int
    processing_time::Float64
    memory_usage::Int
end

"""
    SpatialProcessor{T}

Main spatial processing orchestrator.
"""
mutable struct SpatialProcessor{T <: AbstractFloat}
    config::SpatialProcessingConfig{T}
    thinning_processor::Union{ObservationThinning{T}, Nothing}
    superob_processor::Union{SuperObservation{T}, Nothing}
    spatial_grid::Union{SpatialGrid{T}, Nothing}
    spatial_tree::Union{BallTree, Nothing}  # For efficient spatial queries
    
    # Processing state
    last_grid_update::Int
    processing_statistics::Dict{String, Any}
end

# Constructor
function SpatialProcessor{T}(config::SpatialProcessingConfig{T}) where {T}
    thinning = config.thinning_enabled ? 
               ObservationThinning{T}(
                   config.thinning_distance_km,
                   config.grid_lon_spacing * 111.0,  # Convert degrees to km
                   T(0.8),  # Default quality threshold
                   Dict{String, Int}(),  # Platform priorities
                   config.quality_based_thinning
               ) : nothing
               
    superob = config.superob_enabled ?
              SuperObservation{T}(
                  config.superob_radius_km,
                  config.min_obs_per_superob,
                  config.max_obs_per_superob,
                  :exponential,  # Default correlation model
                  config.superob_radius_km / 3.0,  # Default correlation length
                  true  # Quality weighting enabled
              ) : nothing
    
    return SpatialProcessor{T}(
        config, thinning, superob, nothing, nothing,
        0, Dict{String, Any}()
    )
end

"""
    thin_observations(processor, locations, values, quality_flags) -> Vector{Int}

Perform observation thinning to reduce spatial density.
"""
function thin_observations(
    processor::SpatialProcessor{T},
    locations::Matrix{T},  # [n_obs × 2] (lat, lon)
    values::Vector{T},
    quality_flags::Vector{Int}
) where {T}
    
    if isnothing(processor.thinning_processor)
        return collect(1:length(values))  # Return all indices if thinning disabled
    end
    
    thinning = processor.thinning_processor
    n_obs = size(locations, 1)
    
    # Create spatial grid if needed
    if isnothing(processor.spatial_grid)
        processor.spatial_grid = create_spatial_grid(processor.config, locations)
    end
    
    grid = processor.spatial_grid
    kept_indices = Int[]
    
    # Process each grid cell
    for i in 1:grid.nlat, j in 1:grid.nlon
        cell_obs = grid.cell_indices[i, j]
        
        if isempty(cell_obs)
            continue
        end
        
        # Apply quality threshold
        quality_mask = quality_flags[cell_obs] .>= thinning.quality_threshold
        qualified_obs = cell_obs[quality_mask]
        
        if isempty(qualified_obs)
            continue
        end
        
        # Select best observation in cell
        if thinning.preserve_best_quality
            best_idx = argmax(quality_flags[qualified_obs])
            push!(kept_indices, qualified_obs[best_idx])
        else
            # Keep first qualified observation (could implement other strategies)
            push!(kept_indices, qualified_obs[1])
        end
    end
    
    return sort(kept_indices)
end

"""
    create_super_observations(processor, locations, values, errors) -> SpatialProcessingResult

Create super-observations by combining nearby observations.
"""
function create_super_observations(
    processor::SpatialProcessor{T},
    locations::Matrix{T},  # [n_obs × 2] (lat, lon)
    values::Vector{T},
    errors::Vector{T}
) where {T}
    
    if isnothing(processor.superob_processor)
        # Return unchanged observations if super-ob disabled
        return SpatialProcessingResult{T}(
            collect(1:length(values)),
            [(locations[i, 1], locations[i, 2]) for i in 1:size(locations, 1)],
            values,
            errors,
            ones(Int, length(values)),
            zeros(Int, length(values)),
            fill(true, length(values)),
            length(values), length(values), length(values),
            0.0, 0
        )
    end
    
    superob = processor.superob_processor
    start_time = time()
    
    # Build spatial tree for efficient neighbor searching
    if processor.config.use_spatial_index
        tree = BallTree(locations', Haversine(6371.0))  # Earth radius in km
        processor.spatial_tree = tree
    end
    
    n_obs = length(values)
    processed_mask = falses(n_obs)
    
    superob_locations = Tuple{T, T}[]
    superob_values = T[]
    superob_errors = T[]
    superob_counts = Int[]
    
    for i in 1:n_obs
        if processed_mask[i]
            continue
        end
        
        # Find nearby observations
        if !isnothing(processor.spatial_tree)
            neighbors = inrange(processor.spatial_tree, locations[i, :], superob.combination_radius)
        else
            neighbors = find_neighbors_bruteforce(locations, i, superob.combination_radius)
        end
        
        # Filter already processed neighbors
        neighbors = filter(idx -> !processed_mask[idx], neighbors)
        
        if length(neighbors) < superob.minimum_obs_count
            # Not enough observations for super-ob, keep original
            push!(superob_locations, (locations[i, 1], locations[i, 2]))
            push!(superob_values, values[i])
            push!(superob_errors, errors[i])
            push!(superob_counts, 1)
            processed_mask[i] = true
        else
            # Create super-observation
            if length(neighbors) > superob.maximum_obs_count
                # Limit to maximum count, keeping closest observations
                distances = [haversine_distance(locations[i, :], locations[j, :]) for j in neighbors]
                perm = sortperm(distances)
                neighbors = neighbors[perm[1:superob.maximum_obs_count]]
            end
            
            # Compute weighted super-observation
            weights = compute_superob_weights(errors[neighbors], superob)
            weighted_location = compute_weighted_location(locations[neighbors, :], weights)
            weighted_value = sum(weights .* values[neighbors]) / sum(weights)
            weighted_error = compute_superob_error(errors[neighbors], weights, superob)
            
            push!(superob_locations, (weighted_location[1], weighted_location[2]))
            push!(superob_values, weighted_value)
            push!(superob_errors, weighted_error)
            push!(superob_counts, length(neighbors))
            
            processed_mask[neighbors] .= true
        end
    end
    
    processing_time = time() - start_time
    
    return SpatialProcessingResult{T}(
        findall(.!processed_mask),  # Indices of individual observations kept
        superob_locations,
        superob_values,
        superob_errors,
        superob_counts,
        zeros(Int, length(values)),
        fill(true, length(values)),
        n_obs,
        length(findall(.!processed_mask)),
        length(superob_locations),
        processing_time,
        0  # Memory usage tracking would be implemented here
    )
end

"""
    spatial_quality_control(processor, locations, values) -> Vector{Int}

Perform spatial quality control checks on observations.
"""
function spatial_quality_control(
    processor::SpatialProcessor{T},
    locations::Matrix{T},
    values::Vector{T}
) where {T}
    
    n_obs = length(values)
    qc_flags = zeros(Int, n_obs)
    
    if !processor.config.spatial_qc_enabled
        return qc_flags
    end
    
    # Build spatial tree for neighbor searching
    if isnothing(processor.spatial_tree)
        tree = BallTree(locations', Haversine(6371.0))
        processor.spatial_tree = tree
    end
    
    for i in 1:n_obs
        # Find nearby observations
        neighbors = inrange(processor.spatial_tree, locations[i, :], processor.config.neighbor_distance_km)
        neighbors = filter(j -> j != i, neighbors)
        
        if length(neighbors) < 3
            qc_flags[i] = 1  # Insufficient neighbors for spatial check
            continue
        end
        
        # Perform spatial consistency check
        neighbor_values = values[neighbors]
        obs_value = values[i]
        
        # Simple spatial consistency: check if observation is outlier
        mean_neighbors = mean(neighbor_values)
        std_neighbors = std(neighbor_values)
        
        if abs(obs_value - mean_neighbors) > 3.0 * std_neighbors
            qc_flags[i] = 2  # Spatial outlier detected
        end
    end
    
    return qc_flags
end

"""
    compute_distances(locations1, locations2) -> Matrix{T}

Compute great circle distances between observation locations.
"""
function compute_distances(
    locations1::Matrix{T},
    locations2::Matrix{T}
) where {T}
    
    n1, n2 = size(locations1, 1), size(locations2, 1)
    distances = Matrix{T}(undef, n1, n2)
    
    for i in 1:n1, j in 1:n2
        distances[i, j] = haversine_distance(locations1[i, :], locations2[j, :])
    end
    
    return distances
end

# Helper functions
function create_spatial_grid(config::SpatialProcessingConfig{T}, locations::Matrix{T}) where {T}
    lon_min = minimum(locations[:, 2]) - config.grid_lon_spacing
    lon_max = maximum(locations[:, 2]) + config.grid_lon_spacing
    lat_min = minimum(locations[:, 1]) - config.grid_lat_spacing
    lat_max = maximum(locations[:, 1]) + config.grid_lat_spacing
    
    nlon = ceil(Int, (lon_max - lon_min) / config.grid_lon_spacing)
    nlat = ceil(Int, (lat_max - lat_min) / config.grid_lat_spacing)
    
    cell_indices = Matrix{Vector{Int}}(undef, nlat, nlon)
    observation_counts = zeros(Int, nlat, nlon)
    
    for i in 1:nlat, j in 1:nlon
        cell_indices[i, j] = Int[]
    end
    
    # Assign observations to grid cells
    n_obs = size(locations, 1)
    for k in 1:n_obs
        lat, lon = locations[k, 1], locations[k, 2]
        
        # Find grid cell indices
        j = min(nlon, max(1, ceil(Int, (lon - lon_min) / config.grid_lon_spacing)))
        i = min(nlat, max(1, ceil(Int, (lat - lat_min) / config.grid_lat_spacing)))
        
        push!(cell_indices[i, j], k)
        observation_counts[i, j] += 1
    end
    
    return SpatialGrid{T}(
        lon_min, lon_max, lat_min, lat_max,
        config.grid_lon_spacing, config.grid_lat_spacing,
        nlon, nlat, cell_indices, observation_counts
    )
end

function haversine_distance(loc1::Vector{T}, loc2::Vector{T}) where {T}
    # Haversine formula for great circle distance
    lat1, lon1 = deg2rad.(loc1)
    lat2, lon2 = deg2rad.(loc2)
    
    dlat = lat2 - lat1
    dlon = lon2 - lon1
    
    a = sin(dlat/2)^2 + cos(lat1) * cos(lat2) * sin(dlon/2)^2
    c = 2 * asin(sqrt(a))
    
    return T(6371.0) * c  # Earth radius in km
end

function find_neighbors_bruteforce(locations::Matrix{T}, obs_idx::Int, radius::T) where {T}
    neighbors = Int[]
    obs_loc = locations[obs_idx, :]
    
    for i in 1:size(locations, 1)
        if i == obs_idx
            continue
        end
        
        distance = haversine_distance(obs_loc, locations[i, :])
        if distance <= radius
            push!(neighbors, i)
        end
    end
    
    return neighbors
end

"""
    temporal_thinning(observations::Vector{AbstractObservation{T}}, 
                     config::ThinningConfig{T})

Perform temporal thinning of observations with regular time intervals.

# Arguments  
- `observations`: Vector of observations to thin temporally
- `config::ThinningConfig{T}`: Thinning configuration with temporal_interval

# Returns
- `Vector{AbstractObservation{T}}`: Temporally thinned observations  
- `Vector{Int}`: Indices of retained observations
"""
function temporal_thinning(observations::Vector{AbstractObservation{T}},
                          config::ThinningConfig{T}) where T
    
    if isempty(observations) || config.temporal_interval <= 0
        return observations, collect(1:length(observations))
    end
    
    # Group observations by time intervals
    time_groups = Dict{Int,Vector{Tuple{Int,AbstractObservation{T}}}}()
    
    # Find time range
    times = [obs.time for obs in observations]
    if isempty(times)
        return observations, collect(1:length(observations))
    end
    
    min_time = minimum(times)
    interval_hours = config.temporal_interval
    
    for (i, obs) in enumerate(observations)
        # Skip low quality observations
        if obs.quality_metrics.overall_quality < config.quality_threshold
            continue
        end
        
        # Calculate time interval index
        time_diff = Dates.value(obs.time - min_time) / (1000 * 3600)  # Hours
        interval_idx = floor(Int, time_diff / interval_hours)
        
        if haskey(time_groups, interval_idx)
            push!(time_groups[interval_idx], (i, obs))
        else
            time_groups[interval_idx] = [(i, obs)]
        end
    end
    
    # Select best observation from each time interval
    selected_obs = AbstractObservation{T}[]
    selected_indices = Int[]
    
    for group_obs in values(time_groups)
        if isempty(group_obs)
            continue
        end
        
        # Select highest quality observation from time group
        best_idx = 1
        best_quality = group_obs[1][2].quality_metrics.overall_quality
        
        for (j, (_, obs)) in enumerate(group_obs)
            quality = obs.quality_metrics.overall_quality
            if quality > best_quality
                best_quality = quality
                best_idx = j
            end
        end
        
        idx, obs = group_obs[best_idx]
        push!(selected_obs, obs)
        push!(selected_indices, idx)
    end
    
    # Apply maximum observation limit
    if length(selected_obs) > config.max_observations
        # Sort by quality and time, take best observations
        quality_time_pairs = [(obs.quality_metrics.overall_quality, obs.time, i)
                             for (i, obs) in enumerate(selected_obs)]
        sort!(quality_time_pairs, by=x->(x[1], x[2]), rev=(true, false))
        
        keep_indices = [quality_time_pairs[i][3] for i in 1:config.max_observations]
        selected_obs = selected_obs[keep_indices]
        selected_indices = selected_indices[keep_indices]
    end
    
    return selected_obs, selected_indices
end

# =============================================================================  
# Super-Observation Creation Algorithms
# =============================================================================

"""
    create_super_observations(observations::Vector{AbstractObservation{T}},
                             config::SuperObservationConfig{T})

Create super-observations through spatial aggregation of nearby observations.

# Arguments
- `observations`: Vector of observations to aggregate
- `config::SuperObservationConfig{T}`: Super-observation configuration

# Returns  
- `Vector{AbstractObservation{T}}`: Super-observations created
- `Dict{Int,Vector{Int}}`: Mapping from super-obs index to constituent obs indices
"""
function create_super_observations(observations::Vector{AbstractObservation{T}},
                                  config::SuperObservationConfig{T}) where T
    
    if isempty(observations)
        return AbstractObservation{T}[], Dict{Int,Vector{Int}}()
    end
    
    # Build spatial index for efficient neighbor finding
    index = build_spatial_index(observations)
    used_mask = falses(length(observations))
    
    super_obs = AbstractObservation{T}[]
    constituent_map = Dict{Int,Vector{Int}}()
    super_idx = 0
    
    for (i, obs) in enumerate(observations)
        if used_mask[i]
            continue
        end
        
        # Find nearby observations within aggregation radius
        center = (obs.location.longitude, obs.location.latitude)
        nearby_indices = spatial_search(index, center, config.radius)
        
        # Filter to unused observations
        candidates = Int[]
        for idx in nearby_indices
            if !used_mask[idx]
                push!(candidates, idx)
            end
        end
        
        # Check minimum observation requirement
        if length(candidates) < config.min_observations
            continue
        end
        
        # Limit to maximum observations
        if length(candidates) > config.max_observations
            # Sort by quality and distance, keep best
            candidate_data = []
            for idx in candidates
                cand_obs = observations[idx]
                dist = haversine_distance(center, 
                    (cand_obs.location.longitude, cand_obs.location.latitude))
                quality = cand_obs.quality_metrics.overall_quality
                push!(candidate_data, (idx, quality, dist))
            end
            
            # Sort by quality (desc), then distance (asc)
            sort!(candidate_data, by=x->(-x[2], x[3]))
            candidates = [cd[1] for cd in candidate_data[1:config.max_observations]]
        end
        
        # Create super-observation
        if config.method == SIMPLE_AVERAGE
            super_ob = create_simple_average_super_obs(observations[candidates], config)
        elseif config.method == QUALITY_WEIGHTED
            super_ob = create_quality_weighted_super_obs(observations[candidates], config)  
        elseif config.method == ERROR_WEIGHTED
            super_ob = create_error_weighted_super_obs(observations[candidates], config)
        elseif config.method == OPTIMAL_INTERPOLATION
            super_ob = create_optimal_interpolation_super_obs(observations[candidates], config)
        else
            error("Super-observation method $(config.method) not implemented")
        end
        
        if super_ob !== nothing
            push!(super_obs, super_ob)
            super_idx += 1
            constituent_map[super_idx] = candidates
            
            # Mark constituent observations as used
            for idx in candidates
                used_mask[idx] = true
            end
        end
    end
    
    return super_obs, constituent_map
end

"""
    create_simple_average_super_obs(obs_group, config)

Create super-observation using simple unweighted averaging.
"""
function create_simple_average_super_obs(obs_group::Vector{AbstractObservation{T}},
                                        config::SuperObservationConfig{T}) where T
    
    if isempty(obs_group)
        return nothing
    end
    
    n_obs = length(obs_group)
    
    # Average position
    avg_lon = mean([obs.location.longitude for obs in obs_group])
    avg_lat = mean([obs.location.latitude for obs in obs_group])
    avg_pressure = mean([obs.location.pressure for obs in obs_group])
    
    # Average time (use first observation time as reference)
    reference_time = obs_group[1].time
    
    # Average observation value and error
    avg_value = mean([obs.value for obs in obs_group])
    
    # Calculate super-observation error
    individual_errors = [obs.error for obs in obs_group]
    avg_error = sqrt(mean(individual_errors.^2))
    
    # Add representativeness error
    total_error = sqrt(avg_error^2 + config.representativeness_error^2)
    total_error *= config.error_inflation
    
    # Create averaged metadata
    avg_metadata = average_observation_metadata(obs_group, config)
    
    # Create super-observation (using first observation as template)
    template_obs = obs_group[1]
    
    # Create location
    super_location = typeof(template_obs.location)(
        avg_lon, avg_lat, avg_pressure
    )
    
    # Create quality metrics (consensus-based)
    super_quality = create_consensus_quality_metrics(obs_group, config)
    
    # Create super-observation
    super_obs = typeof(template_obs)(
        template_obs.observation_type,
        super_location,
        reference_time,
        avg_value,
        total_error,
        super_quality,
        avg_metadata,
        template_obs.qc_flags
    )
    
    return super_obs
end

"""
    create_quality_weighted_super_obs(obs_group, config)

Create super-observation using quality-weighted averaging.
"""
function create_quality_weighted_super_obs(obs_group::Vector{AbstractObservation{T}},
                                          config::SuperObservationConfig{T}) where T
    
    if isempty(obs_group)
        return nothing  
    end
    
    n_obs = length(obs_group)
    qualities = [obs.quality_metrics.overall_quality for obs in obs_group]
    
    # Calculate quality weights
    if all(q -> q ≈ 0, qualities)
        # All zero qualities - use equal weights
        weights = ones(T, n_obs) ./ n_obs
    else
        # Quality-based weights (squared to emphasize high quality)
        weights = qualities .^ 2
        weights = weights ./ sum(weights)
    end
    
    # Weighted averages
    avg_lon = sum(weights .* [obs.location.longitude for obs in obs_group])
    avg_lat = sum(weights .* [obs.location.latitude for obs in obs_group])
    avg_pressure = sum(weights .* [obs.location.pressure for obs in obs_group])
    
    avg_value = sum(weights .* [obs.value for obs in obs_group])
    
    # Weighted error calculation
    individual_errors = [obs.error for obs in obs_group]
    weighted_error_var = sum(weights .* individual_errors.^2)
    avg_error = sqrt(weighted_error_var)
    
    # Add representativeness error and inflation
    total_error = sqrt(avg_error^2 + config.representativeness_error^2)
    total_error *= config.error_inflation
    
    # Use highest quality observation as template
    best_idx = argmax(qualities)
    template_obs = obs_group[best_idx]
    reference_time = template_obs.time
    
    # Create super-observation components
    super_location = typeof(template_obs.location)(
        avg_lon, avg_lat, avg_pressure
    )
    
    avg_metadata = average_observation_metadata(obs_group, config)
    super_quality = create_consensus_quality_metrics(obs_group, config)
    
    # Create super-observation
    super_obs = typeof(template_obs)(
        template_obs.observation_type,
        super_location, 
        reference_time,
        avg_value,
        total_error,
        super_quality,
        avg_metadata,
        template_obs.qc_flags
    )
    
    return super_obs
end

"""  
    create_error_weighted_super_obs(obs_group, config)

Create super-observation using error-weighted averaging (inverse variance weighting).
"""
function create_error_weighted_super_obs(obs_group::Vector{AbstractObservation{T}},
                                        config::SuperObservationConfig{T}) where T
    
    if isempty(obs_group)
        return nothing
    end
    
    n_obs = length(obs_group)
    errors = [obs.error for obs in obs_group]
    
    # Calculate inverse variance weights
    if any(e -> e ≈ 0, errors)
        # Handle zero errors by setting small minimum
        min_error = T(1e-6)
        errors = max.(errors, min_error)
    end
    
    inv_var_weights = 1 ./ errors.^2
    weights = inv_var_weights ./ sum(inv_var_weights)
    
    # Weighted averages
    avg_lon = sum(weights .* [obs.location.longitude for obs in obs_group])
    avg_lat = sum(weights .* [obs.location.latitude for obs in obs_group])  
    avg_pressure = sum(weights .* [obs.location.pressure for obs in obs_group])
    
    avg_value = sum(weights .* [obs.value for obs in obs_group])
    
    # Optimal error for inverse variance weighting
    total_inv_var = sum(inv_var_weights)
    avg_error = 1 / sqrt(total_inv_var)
    
    # Add representativeness error and inflation  
    total_error = sqrt(avg_error^2 + config.representativeness_error^2)
    total_error *= config.error_inflation
    
    # Use lowest error observation as template
    best_idx = argmin(errors)
    template_obs = obs_group[best_idx]
    reference_time = template_obs.time
    
    # Create super-observation components
    super_location = typeof(template_obs.location)(
        avg_lon, avg_lat, avg_pressure
    )
    
    avg_metadata = average_observation_metadata(obs_group, config)
    super_quality = create_consensus_quality_metrics(obs_group, config)
    
    # Create super-observation
    super_obs = typeof(template_obs)(
        template_obs.observation_type,
        super_location,
        reference_time, 
        avg_value,
        total_error,
        super_quality,
        avg_metadata,
        template_obs.qc_flags
    )
    
    return super_obs
end

"""
    create_optimal_interpolation_super_obs(obs_group, config)

Create super-observation using optimal interpolation principles.
"""
function create_optimal_interpolation_super_obs(obs_group::Vector{AbstractObservation{T}},
                                               config::SuperObservationConfig{T}) where T
    
    if length(obs_group) < 2
        return create_simple_average_super_obs(obs_group, config)
    end
    
    n_obs = length(obs_group)
    
    # Calculate observation covariance matrix
    R = create_observation_covariance_matrix(obs_group, config)
    
    # Calculate background covariance (simplified)
    B = create_background_covariance_matrix(obs_group, config)
    
    # Optimal interpolation weights
    H = ones(T, n_obs)  # Observation operator (identity for same variable)
    K = B * H' * inv(H * B * H' + R)
    
    weights = K / sum(K)  # Normalize weights
    
    # Weighted averages using OI weights
    avg_lon = sum(weights .* [obs.location.longitude for obs in obs_group])
    avg_lat = sum(weights .* [obs.location.latitude for obs in obs_group]) 
    avg_pressure = sum(weights .* [obs.location.pressure for obs in obs_group])
    
    avg_value = sum(weights .* [obs.value for obs in obs_group])
    
    # Optimal interpolation error
    analysis_error_var = inv(H' * inv(R) * H + inv(B))
    avg_error = sqrt(analysis_error_var[1,1])
    
    # Add representativeness error and inflation
    total_error = sqrt(avg_error^2 + config.representativeness_error^2)
    total_error *= config.error_inflation
    
    # Use first observation as template
    template_obs = obs_group[1]
    reference_time = template_obs.time
    
    # Create super-observation components
    super_location = typeof(template_obs.location)(
        avg_lon, avg_lat, avg_pressure  
    )
    
    avg_metadata = average_observation_metadata(obs_group, config)
    super_quality = create_consensus_quality_metrics(obs_group, config)
    
    # Create super-observation
    super_obs = typeof(template_obs)(
        template_obs.observation_type,
        super_location,
        reference_time,
        avg_value, 
        total_error,
        super_quality,
        avg_metadata,
        template_obs.qc_flags
    )
    
    return super_obs
end

# =============================================================================
# Helper Functions for Super-Observation Creation  
# =============================================================================

"""
    average_observation_metadata(obs_group, config)

Create averaged metadata from constituent observations.
"""
function average_observation_metadata(obs_group::Vector{AbstractObservation{T}},
                                     config::SuperObservationConfig{T}) where T
    
    if isempty(obs_group)
        return obs_group[1].metadata
    end
    
    # Use first observation metadata as template
    template_metadata = obs_group[1].metadata
    
    # For now, return template metadata
    # In full implementation, would aggregate numeric metadata fields
    return template_metadata
end

"""
    create_consensus_quality_metrics(obs_group, config)  

Create consensus quality metrics from constituent observations.
"""
function create_consensus_quality_metrics(obs_group::Vector{AbstractObservation{T}},
                                         config::SuperObservationConfig{T}) where T
    
    if isempty(obs_group)
        return obs_group[1].quality_metrics
    end
    
    n_obs = length(obs_group)
    qualities = [obs.quality_metrics.overall_quality for obs in obs_group]
    
    # Consensus quality based on threshold
    consensus_count = count(q -> q >= config.consensus_threshold, qualities)
    consensus_fraction = consensus_count / n_obs
    
    if consensus_fraction >= 0.5
        # Majority consensus - use average of good quality observations
        good_qualities = filter(q -> q >= config.consensus_threshold, qualities)
        overall_quality = mean(good_qualities)
    else
        # No consensus - use median quality
        overall_quality = median(qualities)
    end
    
    # Use first observation quality metrics as template
    template_quality = obs_group[1].quality_metrics
    
    # Create new quality metrics with consensus overall quality
    # In full implementation, would aggregate all quality components
    super_quality = typeof(template_quality)(
        overall_quality,
        template_quality.background_check,
        template_quality.spatial_consistency,
        template_quality.temporal_consistency, 
        template_quality.instrument_quality
    )
    
    return super_quality
end

"""
    create_observation_covariance_matrix(obs_group, config)

Create observation error covariance matrix for optimal interpolation.
"""
function create_observation_covariance_matrix(obs_group::Vector{AbstractObservation{T}},
                                             config::SuperObservationConfig{T}) where T
    
    n_obs = length(obs_group)
    R = Matrix{T}(undef, n_obs, n_obs)
    
    for i in 1:n_obs
        for j in 1:n_obs
            if i == j
                # Diagonal: observation error variance
                R[i,j] = obs_group[i].error^2
            else
                # Off-diagonal: correlation based on distance
                obs_i = obs_group[i]
                obs_j = obs_group[j]
                
                dist = haversine_distance(
                    (obs_i.location.longitude, obs_i.location.latitude),
                    (obs_j.location.longitude, obs_j.location.latitude)
                )
                
                # Simple exponential correlation
                correlation_length = T(10.0)  # km
                correlation = exp(-dist / correlation_length)
                
                R[i,j] = correlation * sqrt(obs_group[i].error * obs_group[j].error)
            end
        end
    end
    
    return R
end

"""
    create_background_covariance_matrix(obs_group, config)

Create background error covariance matrix for optimal interpolation.
"""
function create_background_covariance_matrix(obs_group::Vector{AbstractObservation{T}},
                                            config::SuperObservationConfig{T}) where T
    
    n_obs = length(obs_group)
    B = Matrix{T}(undef, n_obs, n_obs)
    
    # Simplified background covariance
    background_variance = T(1.0)  # Placeholder value
    correlation_length = T(50.0)  # km
    
    for i in 1:n_obs
        for j in 1:n_obs
            obs_i = obs_group[i]
            obs_j = obs_group[j]
            
            dist = haversine_distance(
                (obs_i.location.longitude, obs_i.location.latitude),
                (obs_j.location.longitude, obs_j.location.latitude)
            )
            
            # Exponential correlation function
            correlation = exp(-dist / correlation_length)
            B[i,j] = background_variance * correlation
        end
    end
    
    return B
end

# =============================================================================
# Processing Pipeline and Optimization
# =============================================================================

"""
    optimize_observation_distribution(observations::Vector{AbstractObservation{T}},
                                    target_count::Int,
                                    quality_weight::T = 0.7,
                                    spatial_weight::T = 0.3)

Optimize observation distribution for analysis quality using combined criteria.

# Arguments
- `observations`: Input observation vector
- `target_count::Int`: Target number of observations
- `quality_weight::T`: Weight for quality-based selection
- `spatial_weight::T`: Weight for spatial distribution

# Returns
- `Vector{AbstractObservation{T}}`: Optimized observation set
- `Dict{String,Any}`: Optimization statistics and metrics
"""
function optimize_observation_distribution(observations::Vector{AbstractObservation{T}},
                                          target_count::Int,
                                          quality_weight::T = T(0.7),
                                          spatial_weight::T = T(0.3)) where T
    
    if isempty(observations) || target_count <= 0
        return observations, Dict{String,Any}()
    end
    
    if length(observations) <= target_count
        return observations, Dict{String,Any}("optimization_needed" => false)
    end
    
    # Calculate observation scores combining quality and spatial distribution
    scores = calculate_combined_scores(observations, quality_weight, spatial_weight)
    
    # Sort by combined score
    score_pairs = [(scores[i], i) for i in 1:length(observations)]
    sort!(score_pairs, by=x->x[1], rev=true)
    
    # Select top observations
    selected_indices = [pair[2] for pair in score_pairs[1:target_count]]
    selected_obs = observations[selected_indices]
    
    # Calculate optimization statistics
    stats = Dict{String,Any}(
        "initial_count" => length(observations),
        "final_count" => length(selected_obs),
        "reduction_ratio" => length(selected_obs) / length(observations),
        "average_quality" => mean([obs.quality_metrics.overall_quality for obs in selected_obs]),
        "quality_range" => extrema([obs.quality_metrics.overall_quality for obs in selected_obs]),
        "spatial_coverage" => calculate_spatial_coverage(selected_obs),
        "optimization_needed" => true
    )
    
    return selected_obs, stats
end

"""
    calculate_combined_scores(observations, quality_weight, spatial_weight)

Calculate combined scores for observation optimization.
"""
function calculate_combined_scores(observations::Vector{AbstractObservation{T}},
                                  quality_weight::T,
                                  spatial_weight::T) where T
    
    n_obs = length(observations)
    scores = zeros(T, n_obs)
    
    # Quality scores (normalized)
    qualities = [obs.quality_metrics.overall_quality for obs in observations]
    max_quality = maximum(qualities)
    if max_quality > 0
        quality_scores = qualities ./ max_quality
    else
        quality_scores = ones(T, n_obs)
    end
    
    # Spatial distribution scores
    spatial_scores = calculate_spatial_importance_scores(observations)
    
    # Combined scores
    for i in 1:n_obs
        scores[i] = quality_weight * quality_scores[i] + spatial_weight * spatial_scores[i]
    end
    
    return scores
end

"""
    calculate_spatial_importance_scores(observations)

Calculate spatial importance scores based on observation distribution.
"""
function calculate_spatial_importance_scores(observations::Vector{AbstractObservation{T}}) where T
    
    n_obs = length(observations)
    spatial_scores = ones(T, n_obs)
    
    if n_obs <= 1
        return spatial_scores
    end
    
    # Build spatial index
    index = build_spatial_index(observations)
    
    # Calculate local density for each observation
    search_radius = T(100.0)  # km
    
    for i in 1:n_obs
        obs = observations[i]
        center = (obs.location.longitude, obs.location.latitude)
        
        # Count nearby observations
        nearby_indices = spatial_search(index, center, search_radius)
        local_density = length(nearby_indices)
        
        # Higher score for lower density (more unique locations)
        spatial_scores[i] = 1.0 / max(1.0, T(local_density))
    end
    
    # Normalize spatial scores
    max_score = maximum(spatial_scores)
    if max_score > 0
        spatial_scores ./= max_score
    end
    
    return spatial_scores
end

"""
    calculate_spatial_coverage(observations)

Calculate spatial coverage metrics for observation set.
"""
function calculate_spatial_coverage(observations::Vector{AbstractObservation{T}}) where T
    
    if isempty(observations)
        return Dict{String,T}()
    end
    
    lons = [obs.location.longitude for obs in observations]
    lats = [obs.location.latitude for obs in observations]
    
    lon_range = maximum(lons) - minimum(lons)
    lat_range = maximum(lats) - minimum(lats)
    
    # Calculate average nearest neighbor distance
    if length(observations) > 1
        avg_spacing = calculate_average_nearest_neighbor_distance(observations)
    else
        avg_spacing = T(0.0)
    end
    
    coverage = Dict{String,T}(
        "longitude_range" => lon_range,
        "latitude_range" => lat_range,
        "total_area" => lon_range * lat_range,
        "average_spacing_km" => avg_spacing,
        "observation_density" => T(length(observations)) / max(T(1.0), lon_range * lat_range)
    )
    
    return coverage
end

"""
    calculate_average_nearest_neighbor_distance(observations)

Calculate average distance to nearest neighbor for spatial distribution analysis.
"""
function calculate_average_nearest_neighbor_distance(observations::Vector{AbstractObservation{T}}) where T
    
    n_obs = length(observations)
    if n_obs <= 1
        return T(0.0)
    end
    
    total_distance = T(0.0)
    
    for i in 1:n_obs
        obs_i = observations[i]
        min_distance = T(Inf)
        
        for j in 1:n_obs
            if i != j
                obs_j = observations[j]
                dist = haversine_distance(
                    (obs_i.location.longitude, obs_i.location.latitude),
                    (obs_j.location.longitude, obs_j.location.latitude)
                )
                min_distance = min(min_distance, dist)
            end
        end
        
        total_distance += min_distance
    end
    
    return total_distance / T(n_obs)
end

"""
    process_observations(observations::Vector{AbstractObservation{T}},
                        pipeline::ProcessingPipeline{T})

Process observations through sequential pipeline of spatial processing operations.

# Arguments
- `observations`: Input observation vector
- `pipeline::ProcessingPipeline{T}`: Processing pipeline configuration

# Returns  
- `Vector{AbstractObservation{T}}`: Final processed observations
- `Dict{String,Any}`: Processing statistics and diagnostics
"""
function process_observations(observations::Vector{AbstractObservation{T}},
                             pipeline::ProcessingPipeline{T}) where T
    
    current_obs = copy(observations)
    processing_stats = Dict{String,Any}()
    
    processing_stats["initial_count"] = length(current_obs)
    processing_stats["stage_results"] = []
    
    # Process through each pipeline stage
    for (stage_idx, (stage_func, stage_config)) in enumerate(zip(pipeline.stages, pipeline.configs))
        
        stage_start_time = time()
        stage_input_count = length(current_obs)
        
        try
            if pipeline.parallel && nprocs() > 1
                # Parallel processing
                current_obs = stage_func(current_obs, stage_config)
            else
                # Sequential processing
                if isa(stage_config, ThinningConfig)
                    current_obs, _ = stage_func(current_obs, stage_config)
                elseif isa(stage_config, SuperObservationConfig)
                    current_obs, _ = stage_func(current_obs, stage_config)
                else
                    current_obs = stage_func(current_obs, stage_config)
                end
            end
            
            stage_duration = time() - stage_start_time
            stage_output_count = length(current_obs)
            
            stage_result = Dict{String,Any}(
                "stage" => stage_idx,
                "function" => string(stage_func),
                "input_count" => stage_input_count,
                "output_count" => stage_output_count,
                "reduction_ratio" => stage_output_count / max(1, stage_input_count),
                "duration_seconds" => stage_duration,
                "success" => true
            )
            
            push!(processing_stats["stage_results"], stage_result)
            
        catch e
            # Handle stage errors
            stage_result = Dict{String,Any}(
                "stage" => stage_idx,
                "function" => string(stage_func),
                "input_count" => stage_input_count,
                "output_count" => 0,
                "error" => string(e),
                "success" => false
            )
            
            push!(processing_stats["stage_results"], stage_result)
            
            if !pipeline.validation
                rethrow(e)
            end
            
            @warn "Stage $stage_idx failed: $e"
            break
        end
        
        # Validation after each stage
        if pipeline.validation
            validation_result = validate_spatial_processing(current_obs, observations)
            stage_result["validation"] = validation_result
        end
        
        if isempty(current_obs)
            @warn "Stage $stage_idx produced empty observation set"
            break
        end
    end
    
    processing_stats["final_count"] = length(current_obs)
    processing_stats["overall_reduction"] = length(current_obs) / max(1, length(observations))
    
    return current_obs, processing_stats
end

"""
    validate_spatial_processing(processed_obs, original_obs)

Validate spatial processing results for quality and consistency.

# Arguments  
- `processed_obs`: Processed observation vector
- `original_obs`: Original observation vector

# Returns
- `Dict{String,Any}`: Validation results and metrics
"""
function validate_spatial_processing(processed_obs::Vector{AbstractObservation{T}},
                                    original_obs::Vector{AbstractObservation{T}}) where T
    
    validation_result = Dict{String,Any}("valid" => true, "warnings" => String[], "errors" => String[])
    
    if isempty(processed_obs)
        push!(validation_result["errors"], "Processed observation set is empty")
        validation_result["valid"] = false
        return validation_result
    end
    
    if isempty(original_obs)
        push!(validation_result["warnings"], "Original observation set is empty")
        return validation_result
    end
    
    # Basic count validation
    reduction_ratio = length(processed_obs) / length(original_obs)
    validation_result["reduction_ratio"] = reduction_ratio
    
    if reduction_ratio > 1.0
        push!(validation_result["errors"], "Processed set larger than original set")
        validation_result["valid"] = false
    end
    
    # Quality preservation check
    orig_avg_quality = mean([obs.quality_metrics.overall_quality for obs in original_obs])
    proc_avg_quality = mean([obs.quality_metrics.overall_quality for obs in processed_obs])
    
    validation_result["original_avg_quality"] = orig_avg_quality
    validation_result["processed_avg_quality"] = proc_avg_quality
    
    if proc_avg_quality < orig_avg_quality * T(0.8)
        push!(validation_result["warnings"], "Significant quality degradation detected")
    end
    
    # Spatial coverage check
    orig_coverage = calculate_spatial_coverage(original_obs)
    proc_coverage = calculate_spatial_coverage(processed_obs)
    
    validation_result["coverage_change"] = Dict(
        "longitude_range_ratio" => proc_coverage["longitude_range"] / max(T(1e-10), orig_coverage["longitude_range"]),
        "latitude_range_ratio" => proc_coverage["latitude_range"] / max(T(1e-10), orig_coverage["latitude_range"])
    )
    
    # Check for extreme spatial reduction
    if proc_coverage["longitude_range"] < orig_coverage["longitude_range"] * T(0.5) ||
       proc_coverage["latitude_range"] < orig_coverage["latitude_range"] * T(0.5)
        push!(validation_result["warnings"], "Significant spatial coverage reduction")
    end
    
    return validation_result
end

# Utility function for sampling (simple implementation)
function sample(collection, n::Int; replace::Bool=false)
    if replace || n >= length(collection)
        return collection[rand(1:length(collection), n)]
    else
        return collection[randperm(length(collection))[1:n]]
    end
end

end # module SpatialProcessing