"""
    ProcessingPipeline

Complete workflow orchestration module for GSI observation processing.
This module integrates all observation processing components (DataFormats, QualityControl,
ForwardOperators, BiasCorrection, SpatialProcessing) into a unified, configurable 
processing pipeline with advanced features for production-grade atmospheric data assimilation.

# Key Features

- **Complete Workflow Orchestration**: End-to-end observation processing
- **Flexible Configuration System**: Configurable processing stages and parameters
- **Error Handling & Recovery**: Robust error management with recovery mechanisms
- **Progress Monitoring**: Real-time progress tracking and performance profiling
- **Parallel Processing**: Multi-threaded processing coordination
- **Checkpointing**: Save/restore capability for long processing runs
- **Memory Management**: Optimized memory usage and garbage collection
- **Quality Assurance**: Comprehensive validation and quality metrics

# Processing Stages

## 1. Data Ingestion & Format Conversion
- **Format Detection**: Automatic detection of BUFR, PrepBUFR, NetCDF, ASCII formats
- **Decoder Selection**: Optimal decoder selection based on format and content
- **Metadata Extraction**: Complete extraction of observation metadata
- **Error Recovery**: Robust handling of corrupted or malformed data

## 2. Quality Control Processing  
- **Multi-Stage QC**: Gross check, background check, buddy check, spatial QC
- **Adaptive Thresholds**: Dynamic quality control thresholds
- **Platform-Specific QC**: Tailored quality control for different observation types
- **Quality Flag Management**: Comprehensive quality flag tracking and propagation

## 3. Forward Operator Computation
- **H(x) Calculation**: Model equivalent computation using forward operators
- **CRTM Integration**: Advanced radiative transfer modeling for satellite observations
- **Tangent Linear/Adjoint**: Support for variational data assimilation
- **Innovation Computation**: Observation-minus-background calculations

## 4. Bias Correction
- **Variational Bias Correction (VarBC)**: Adaptive satellite radiance bias correction
- **Conventional Bias Correction**: Air mass and systematic bias corrections
- **Coefficient Updates**: Real-time bias coefficient estimation and updates
- **Quality Integration**: Bias correction integrated with quality control

## 5. Spatial Processing
- **Observation Thinning**: Optimal spatial distribution of observations
- **Super-Observations**: Combine nearby observations with error weighting
- **Spatial Quality Control**: Location-based consistency checking
- **Grid Optimization**: Optimize observation distribution for analysis grid

"""
module ProcessingPipeline

using LinearAlgebra
using Statistics
using Dates
using Base.Threads
using Printf
using Logging

# Import required modules
using ..DataFormats
using ..QualityControl  
using ..ForwardOperators
using ..CRTMInterface
using ..BiasCorrection
using ..SpatialProcessing
using ..ObservationTypes
using ..GSICoreAnalysis: AbstractAnalysisConfig

# Export main types and functions
export ObservationProcessingConfig, ProcessingResult, ProcessingPipeline
export ProcessingStage, ProcessingState, ProcessingStatistics
export process_observations, preprocess_observations, apply_quality_control
export compute_innovations, optimize_observation_usage
export validate_processing_chain, generate_processing_report
export save_checkpoint, restore_checkpoint, cleanup_processing

"""
    ProcessingStage

Enumeration of processing stages in the observation processing pipeline.
"""
@enum ProcessingStage begin
    STAGE_INITIALIZATION = 1
    STAGE_DATA_INGESTION = 2  
    STAGE_FORMAT_CONVERSION = 3
    STAGE_PRELIMINARY_QC = 4
    STAGE_FORWARD_OPERATORS = 5
    STAGE_BIAS_CORRECTION = 6
    STAGE_SPATIAL_PROCESSING = 7
    STAGE_FINAL_QC = 8
    STAGE_OUTPUT_GENERATION = 9
    STAGE_COMPLETION = 10
end

"""
    ProcessingState

Current state of the processing pipeline.
"""
@enum ProcessingState begin
    STATE_IDLE = 1
    STATE_RUNNING = 2
    STATE_PAUSED = 3
    STATE_ERROR = 4
    STATE_COMPLETED = 5
    STATE_CANCELLED = 6
end

"""
    ObservationProcessingConfig{T}

Comprehensive configuration for observation processing pipeline.
"""
struct ObservationProcessingConfig{T <: AbstractFloat}
    # General Configuration
    precision_type::Type{T}
    analysis_time::DateTime
    processing_mode::Symbol              # :operational, :research, :development
    parallel_processing::Bool
    max_workers::Int
    memory_limit_gb::Float64
    
    # Input/Output Configuration
    input_formats::Vector{Symbol}        # [:bufr, :prepbufr, :netcdf, :ascii]
    output_format::Symbol
    output_directory::String
    temporary_directory::String
    
    # Data Format Processing
    data_format_config::Any              # DataFormats configuration
    auto_format_detection::Bool
    format_validation_level::Symbol      # :strict, :moderate, :permissive
    
    # Quality Control Configuration
    qc_config::QualityControlConfig{T}
    qc_stages::Vector{Symbol}            # [:gross_check, :background_check, :buddy_check, :spatial_qc]
    qc_parallel_processing::Bool
    
    # Forward Operator Configuration  
    forward_operator_config::ForwardOperatorConfig{T}
    crtm_config::Union{CRTMConfig{T}, Nothing}
    innovation_computation::Bool
    tangent_linear_required::Bool
    
    # Bias Correction Configuration
    bias_correction_config::BiasCorrectionConfig{T}
    bias_correction_stages::Vector{Symbol}  # [:varbc, :conventional, :adaptive]
    
    # Spatial Processing Configuration
    spatial_processing_config::SpatialProcessingConfig{T}
    spatial_stages::Vector{Symbol}       # [:thinning, :superobs, :spatial_qc]
    
    # Performance and Monitoring
    performance_monitoring::Bool
    progress_reporting_interval::Int     # Seconds
    checkpoint_frequency::Int            # Processing cycles
    checkpoint_directory::String
    
    # Error Handling
    error_recovery_enabled::Bool
    max_retry_attempts::Int
    continue_on_errors::Bool
    error_threshold_percent::Float64
    
    # Validation and Quality Assurance
    validation_enabled::Bool
    validation_tolerance::T
    quality_metrics_computation::Bool
    detailed_diagnostics::Bool
end

"""
    ProcessingStatistics{T}

Detailed statistics from observation processing.
"""
mutable struct ProcessingStatistics{T <: AbstractFloat}
    # Processing counts
    total_observations_input::Int
    observations_after_qc::Int
    observations_after_thinning::Int
    observations_final::Int
    
    # Quality control statistics
    qc_reject_counts::Dict{Symbol, Int}
    qc_flag_counts::Dict{Int, Int}
    
    # Bias correction statistics
    bias_correction_applied::Int
    average_bias_magnitude::T
    bias_coefficient_updates::Int
    
    # Spatial processing statistics
    thinning_reduction_percent::Float64
    superobs_created::Int
    spatial_qc_rejections::Int
    
    # Performance statistics
    total_processing_time::Float64
    stage_processing_times::Dict{ProcessingStage, Float64}
    memory_usage_peak_mb::Float64
    parallel_efficiency::Float64
    
    # Error statistics  
    error_count::Int
    warning_count::Int
    recovery_attempts::Int
    
    # I/O statistics
    input_file_count::Int
    input_data_size_mb::Float64
    output_data_size_mb::Float64
    
    function ProcessingStatistics{T}() where {T}
        new{T}(
            0, 0, 0, 0,
            Dict{Symbol, Int}(),
            Dict{Int, Int}(),
            0, zero(T), 0,
            0.0, 0, 0,
            0.0, Dict{ProcessingStage, Float64}(), 0.0, 0.0,
            0, 0, 0,
            0, 0.0, 0.0
        )
    end
end

"""
    ProcessingResult{T}

Complete result from observation processing pipeline.
"""
struct ProcessingResult{T <: AbstractFloat}
    # Processed observations
    observations::Vector{AbstractObservation{T}}
    innovation_vectors::Vector{Vector{T}}
    quality_flags::Vector{Vector{Int}}
    spatial_indices::Vector{Int}
    
    # Processing metadata
    processing_config::ObservationProcessingConfig{T}
    statistics::ProcessingStatistics{T}
    processing_time::DateTime
    
    # Quality assurance results
    validation_passed::Bool
    validation_report::Dict{String, Any}
    quality_metrics::Dict{String, T}
    
    # Error and warning information
    errors_encountered::Vector{String}
    warnings_generated::Vector{String}
    recovery_actions_taken::Vector{String}
    
    # Checkpointing information
    checkpoint_files::Vector{String}
    final_checkpoint::String
end

"""
    ProcessingPipeline{T}

Main orchestration class for complete observation processing pipeline.
"""
mutable struct ProcessingPipeline{T <: AbstractFloat}
    # Configuration
    config::ObservationProcessingConfig{T}
    
    # Processing components
    data_format_processor::Union{Any, Nothing}              # DataFormats processor
    quality_controller::Union{QCProcessor{T}, Nothing}      # Quality control processor
    forward_operator::Union{ForwardOperatorProcessor{T}, Nothing} # Forward operator processor
    bias_corrector::Union{BiasCorrector{T}, Nothing}        # Bias correction processor
    spatial_processor::Union{SpatialProcessor{T}, Nothing} # Spatial processing processor
    crtm_interface::Union{CRTMProcessor{T}, Nothing}        # CRTM interface
    
    # Processing state
    current_stage::ProcessingStage
    current_state::ProcessingState
    processing_start_time::DateTime
    
    # Statistics and monitoring
    statistics::ProcessingStatistics{T}
    progress_reporter::Union{Function, Nothing}
    error_handler::Union{Function, Nothing}
    
    # Checkpointing and recovery
    checkpoint_data::Dict{String, Any}
    last_checkpoint_time::DateTime
    
    # Thread management
    worker_pool::Union{Any, Nothing}
    processing_lock::ReentrantLock
    
    function ProcessingPipeline{T}(config::ObservationProcessingConfig{T}) where {T}
        pipeline = new{T}(
            config,
            nothing, nothing, nothing, nothing, nothing, nothing,  # Processors
            STAGE_INITIALIZATION, STATE_IDLE, now(),              # State
            ProcessingStatistics{T}(), nothing, nothing,          # Statistics
            Dict{String, Any}(), now(),                           # Checkpointing
            nothing, ReentrantLock()                              # Threading
        )
        
        # Initialize processing components
        initialize_processing_components!(pipeline)
        
        return pipeline
    end
end

"""
Initialize all processing components based on configuration
"""
function initialize_processing_components!(pipeline::ProcessingPipeline{T}) where T
    config = pipeline.config
    
    # Initialize data format processor
    if :bufr in config.input_formats || :prepbufr in config.input_formats
        pipeline.data_format_processor = initialize_data_format_processor(config)
    end
    
    # Initialize quality control processor
    pipeline.quality_controller = QCProcessor{T}(config.qc_config)
    
    # Initialize forward operator processor
    pipeline.forward_operator = ForwardOperatorProcessor{T}(config.forward_operator_config)
    
    # Initialize CRTM interface if needed
    if config.crtm_config !== nothing
        pipeline.crtm_interface = CRTMProcessor{T}(config.crtm_config)
    end
    
    # Initialize bias correction processor
    pipeline.bias_corrector = initialize_bias_corrector(config.bias_correction_config)
    
    # Initialize spatial processor
    pipeline.spatial_processor = SpatialProcessor{T}(config.spatial_processing_config)
end

"""
Main processing function for complete observation processing pipeline
"""
function process_observations(pipeline::ProcessingPipeline{T},
                            input_files::Vector{String}) where T
    
    start_time = now()
    pipeline.processing_start_time = start_time
    pipeline.current_state = STATE_RUNNING
    
    try
        # Stage 1: Data Ingestion and Format Conversion
        pipeline.current_stage = STAGE_DATA_INGESTION
        raw_observations = ingest_observation_data(pipeline, input_files)
        update_statistics!(pipeline, :ingestion, length(raw_observations))
        
        # Stage 2: Preliminary Quality Control
        pipeline.current_stage = STAGE_PRELIMINARY_QC
        qc_observations = apply_preliminary_quality_control(pipeline, raw_observations)
        update_statistics!(pipeline, :preliminary_qc, length(qc_observations))
        
        # Stage 3: Forward Operator Computation
        pipeline.current_stage = STAGE_FORWARD_OPERATORS
        innovation_observations = compute_forward_operators(pipeline, qc_observations)
        update_statistics!(pipeline, :forward_operators, length(innovation_observations))
        
        # Stage 4: Bias Correction
        pipeline.current_stage = STAGE_BIAS_CORRECTION
        bias_corrected_observations = apply_bias_correction(pipeline, innovation_observations)
        update_statistics!(pipeline, :bias_correction, length(bias_corrected_observations))
        
        # Stage 5: Spatial Processing
        pipeline.current_stage = STAGE_SPATIAL_PROCESSING
        spatial_processed_observations = apply_spatial_processing(pipeline, bias_corrected_observations)
        update_statistics!(pipeline, :spatial_processing, length(spatial_processed_observations))
        
        # Stage 6: Final Quality Control
        pipeline.current_stage = STAGE_FINAL_QC
        final_observations = apply_final_quality_control(pipeline, spatial_processed_observations)
        update_statistics!(pipeline, :final_qc, length(final_observations))
        
        # Stage 7: Validation and Output
        pipeline.current_stage = STAGE_OUTPUT_GENERATION
        result = generate_processing_result(pipeline, final_observations, start_time)
        
        pipeline.current_state = STATE_COMPLETED
        return result
        
    catch e
        pipeline.current_state = STATE_ERROR
        handle_processing_error!(pipeline, e)
        rethrow(e)
    end
end

"""
Data ingestion and format conversion stage
"""
function ingest_observation_data(pipeline::ProcessingPipeline{T}, 
                               input_files::Vector{String}) where T
    
    all_observations = AbstractObservation{T}[]
    
    for file in input_files
        try
            # Detect format and decode
            file_observations = decode_observation_file(pipeline, file)
            append!(all_observations, file_observations)
            
            pipeline.statistics.input_file_count += 1
            pipeline.statistics.input_data_size_mb += filesize(file) / (1024^2)
            
        catch e
            @warn "Error processing file $file: $e"
            pipeline.statistics.error_count += 1
            
            if !pipeline.config.continue_on_errors
                rethrow(e)
            end
        end
        
        # Progress reporting
        report_progress(pipeline, "Processing file: $file")
    end
    
    pipeline.statistics.total_observations_input = length(all_observations)
    return all_observations
end

"""
Apply preliminary quality control
"""
function apply_preliminary_quality_control(pipeline::ProcessingPipeline{T},
                                          observations::Vector{AbstractObservation{T}}) where T
    
    if isnothing(pipeline.quality_controller)
        return observations
    end
    
    qc_result = QualityControl.apply_quality_control(
        pipeline.quality_controller,
        observations,
        [:gross_check, :range_check, :duplicate_check]
    )
    
    # Update statistics
    for (flag, count) in qc_result.rejection_counts
        pipeline.statistics.qc_reject_counts[flag] = get(pipeline.statistics.qc_reject_counts, flag, 0) + count
    end
    
    pipeline.statistics.observations_after_qc = length(qc_result.accepted_observations)
    
    return qc_result.accepted_observations
end

"""
Compute forward operators and innovations
"""
function compute_forward_operators(pipeline::ProcessingPipeline{T},
                                 observations::Vector{AbstractObservation{T}}) where T
    
    if isnothing(pipeline.forward_operator)
        return observations
    end
    
    # Compute H(x) for all observations
    innovations = ForwardOperators.compute_innovations(
        pipeline.forward_operator,
        observations,
        pipeline.config.innovation_computation
    )
    
    # Add CRTM computations for satellite radiances if needed
    if !isnothing(pipeline.crtm_interface)
        satellite_obs = filter(obs -> obs isa RadianceObservation, observations)
        if !isempty(satellite_obs)
            crtm_innovations = CRTMInterface.compute_radiance_innovations(
                pipeline.crtm_interface,
                satellite_obs
            )
            # Merge CRTM innovations with regular innovations
            merge_innovation_results!(innovations, crtm_innovations)
        end
    end
    
    return observations  # Return observations with updated innovation fields
end

"""
Apply bias correction
"""
function apply_bias_correction(pipeline::ProcessingPipeline{T},
                             observations::Vector{AbstractObservation{T}}) where T
    
    if isnothing(pipeline.bias_corrector)
        return observations
    end
    
    bias_result = BiasCorrection.apply_bias_correction(
        pipeline.bias_corrector,
        observations,
        pipeline.config.bias_correction_config
    )
    
    # Update statistics
    pipeline.statistics.bias_correction_applied = length(bias_result.corrected_observations)
    pipeline.statistics.average_bias_magnitude = mean([abs(obs.bias_correction) 
                                                      for obs in bias_result.corrected_observations])
    
    return [obs.corrected_observation for obs in bias_result.corrected_observations]
end

"""
Apply spatial processing (thinning and super-observations)
"""
function apply_spatial_processing(pipeline::ProcessingPipeline{T},
                                observations::Vector{AbstractObservation{T}}) where T
    
    if isnothing(pipeline.spatial_processor)
        return observations
    end
    
    # Extract locations and values for spatial processing
    locations = Matrix{T}(undef, length(observations), 2)
    values = Vector{T}(undef, length(observations))
    errors = Vector{T}(undef, length(observations))
    
    for (i, obs) in enumerate(observations)
        locations[i, 1] = obs.location.latitude
        locations[i, 2] = obs.location.longitude
        values[i] = obs.value
        errors[i] = obs.error
    end
    
    # Apply thinning if configured
    if pipeline.config.spatial_processing_config.thinning_enabled
        kept_indices = SpatialProcessing.thin_observations(
            pipeline.spatial_processor,
            locations, values, ones(Int, length(observations))
        )
        observations = observations[kept_indices]
        
        # Update statistics
        reduction_percent = (1.0 - length(kept_indices) / length(values)) * 100.0
        pipeline.statistics.thinning_reduction_percent = reduction_percent
    end
    
    # Apply super-observation creation if configured
    if pipeline.config.spatial_processing_config.superob_enabled
        # Update locations after thinning
        current_locations = Matrix{T}(undef, length(observations), 2)
        current_values = Vector{T}(undef, length(observations))
        current_errors = Vector{T}(undef, length(observations))
        
        for (i, obs) in enumerate(observations)
            current_locations[i, 1] = obs.location.latitude
            current_locations[i, 2] = obs.location.longitude
            current_values[i] = obs.value
            current_errors[i] = obs.error
        end
        
        superob_result = SpatialProcessing.create_super_observations(
            pipeline.spatial_processor,
            current_locations, current_values, current_errors
        )
        
        pipeline.statistics.superobs_created = superob_result.superob_count
    end
    
    pipeline.statistics.observations_after_thinning = length(observations)
    
    return observations
end

"""
Apply final quality control
"""
function apply_final_quality_control(pipeline::ProcessingPipeline{T},
                                    observations::Vector{AbstractObservation{T}}) where T
    
    if isnothing(pipeline.quality_controller)
        return observations
    end
    
    # Apply comprehensive quality control including spatial checks
    final_qc_result = QualityControl.apply_quality_control(
        pipeline.quality_controller,
        observations,
        [:background_check, :buddy_check, :spatial_consistency]
    )
    
    # Update final statistics
    pipeline.statistics.observations_final = length(final_qc_result.accepted_observations)
    pipeline.statistics.spatial_qc_rejections = length(observations) - length(final_qc_result.accepted_observations)
    
    return final_qc_result.accepted_observations
end

"""
Generate final processing result
"""
function generate_processing_result(pipeline::ProcessingPipeline{T},
                                  observations::Vector{AbstractObservation{T}},
                                  start_time::DateTime) where T
    
    # Calculate total processing time
    total_time = Dates.value(now() - start_time) / 1000.0  # Convert to seconds
    pipeline.statistics.total_processing_time = total_time
    
    # Validate processing results
    validation_passed, validation_report = validate_processing_results(pipeline, observations)
    
    # Generate quality metrics
    quality_metrics = compute_quality_metrics(pipeline, observations)
    
    # Create result object
    return ProcessingResult{T}(
        observations,
        Vector{Vector{T}}(),      # Innovation vectors (placeholder)
        Vector{Vector{Int}}(),     # Quality flags (placeholder)
        collect(1:length(observations)),  # Spatial indices
        pipeline.config,
        pipeline.statistics,
        now(),
        validation_passed,
        validation_report,
        quality_metrics,
        String[],  # Errors encountered
        String[],  # Warnings generated
        String[],  # Recovery actions taken
        String[],  # Checkpoint files
        ""         # Final checkpoint
    )
end

# Helper functions
function initialize_data_format_processor(config)
    # Initialize data format processor based on config
    return nothing  # Placeholder
end

function initialize_bias_corrector(config)
    # Initialize bias corrector based on config
    return nothing  # Placeholder
end

function decode_observation_file(pipeline, filename)
    # Decode observation file based on format
    return AbstractObservation{Float64}[]  # Placeholder
end

function update_statistics!(pipeline, stage, count)
    # Update processing statistics
    @info "Stage $(stage): processed $(count) observations"
end

function report_progress(pipeline, message)
    # Report processing progress
    if pipeline.progress_reporter !== nothing
        pipeline.progress_reporter(message)
    else
        @info message
    end
end

function handle_processing_error!(pipeline, error)
    # Handle processing errors
    pipeline.statistics.error_count += 1
    @error "Processing error: $(error)"
end

function merge_innovation_results!(innovations, crtm_innovations)
    # Merge CRTM innovations with regular innovations
    # Placeholder implementation
end

function validate_processing_results(pipeline, observations)
    # Validate processing results
    validation_passed = true
    validation_report = Dict{String, Any}(
        "total_observations" => length(observations),
        "validation_time" => now()
    )
    
    return validation_passed, validation_report
end

function compute_quality_metrics(pipeline, observations)
    # Compute quality metrics
    return Dict{String, Float64}(
        "average_quality" => mean([obs.quality_metrics.overall_quality for obs in observations]),
        "observation_density" => length(observations) / 1000.0  # placeholder density
    )
end

end # module ProcessingPipeline
