# Comprehensive posterior analysis and output tools for FLEXINVERT MCMC results
# Provides rich analysis capabilities for atmospheric flux inversion posterior distributions

module Analysis

using LinearAlgebra
using Statistics
using StatsBase
using Distributions
using JSON
using NCDatasets
using Dates
using Printf

using ..MCMCTypes
using ..CoreTypes
# Import existing output infrastructure
using ..OutputWriter

export PosteriorAnalyzer, AnalysisConfig, AnalysisResult
export analyze_posterior, compute_summary_statistics, compute_flux_aggregations
export compute_credible_intervals, compute_correlations, compute_pca_analysis
export generate_netcdf_output, generate_csv_output, generate_json_output
export validate_analysis_quality, compare_prior_posterior

# ============================================================================
# Core Types and Configuration
# ============================================================================

"""
    AnalysisConfig

Configuration for posterior analysis operations.
"""
Base.@kwdef struct AnalysisConfig
    # Statistical summary settings
    credible_levels::Vector{Float64} = [0.50, 0.90, 0.95]
    quantile_levels::Vector{Float64} = [0.025, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.975]
    compute_mode::Bool = true
    kde_bandwidth::Float64 = 0.1

    # Regional aggregation settings
    spatial_aggregations::Vector{String} = ["global", "northern_hemisphere", "southern_hemisphere"]
    temporal_aggregations::Vector{String} = ["annual", "seasonal", "monthly"]
    custom_regions::Dict{String, Vector{Int}} = Dict{String, Vector{Int}}()

    # Uncertainty analysis
    compute_correlations::Bool = true
    pca_threshold::Float64 = 0.95  # Explained variance threshold
    max_pca_components::Int = 50

    # Model validation
    compute_posterior_predictive::Bool = true
    validate_constraints::Bool = true
    physical_bounds::Dict{String, Tuple{Float64, Float64}} = Dict{String, Tuple{Float64, Float64}}()

    # Output settings
    save_samples::Bool = false  # Save individual samples to output
    thin_samples::Int = 1       # Additional thinning for output
    compression_level::Int = 4  # NetCDF compression

    # Memory management
    batch_size::Int = 1000      # Process samples in batches
    use_streaming::Bool = true  # Stream computation for large datasets
end

"""
    RegionalAggregation

Definition for spatial or temporal flux aggregation.
"""
struct RegionalAggregation
    name::String
    box_indices::Vector{Int}
    weights::Vector{Float64}  # Area or other weights
    description::String
end

"""
    SummaryStatistics

Summary statistics for a parameter or aggregated quantity.
"""
struct SummaryStatistics{T<:AbstractFloat}
    mean::T
    median::T
    std::T
    var::T
    quantiles::Dict{Float64, T}
    credible_intervals::Dict{Float64, Tuple{T, T}}
    mode::T
    mode_density::T
    tail_probabilities::Dict{String, T}  # e.g., "positive", "negative"
    effective_sample_size::T
    rhat::T
end

"""
    FluxAggregation

Results from spatial/temporal flux aggregation.
"""
struct FluxAggregation{T<:AbstractFloat}
    name::String
    samples::Vector{T}
    statistics::SummaryStatistics{T}
    units::String
    description::String
end

"""
    PosteriorPredictive

Posterior predictive analysis results.
"""
struct PosteriorPredictive{T<:AbstractFloat}
    observed::Vector{T}
    predicted_mean::Vector{T}
    predicted_std::Vector{T}
    residuals::Vector{T}
    standardized_residuals::Vector{T}
    coverage_50::T  # Fraction of observations in 50% prediction interval
    coverage_90::T  # Fraction of observations in 90% prediction interval
    coverage_95::T  # Fraction of observations in 95% prediction interval
    outlier_indices::Vector{Int}
    outlier_threshold::T
end

"""
    UncertaintyAnalysis

Uncertainty quantification and decomposition results.
"""
struct UncertaintyAnalysis{T<:AbstractFloat}
    correlation_matrix::Matrix{T}
    covariance_matrix::Matrix{T}
    pca_components::Matrix{T}
    pca_eigenvalues::Vector{T}
    pca_explained_variance::Vector{T}
    parameter_uncertainties::Vector{T}
    prior_uncertainties::Vector{T}
    uncertainty_reduction::Vector{T}  # (prior_std - posterior_std) / prior_std
end

"""
    AnalysisResult

Complete results from posterior analysis.
"""
struct AnalysisResult{T<:AbstractFloat}
    # Basic statistics
    parameter_statistics::Vector{SummaryStatistics{T}}

    # Flux aggregations
    flux_aggregations::Vector{FluxAggregation{T}}

    # Model validation
    posterior_predictive::Union{PosteriorPredictive{T}, Nothing}

    # Uncertainty analysis
    uncertainty_analysis::Union{UncertaintyAnalysis{T}, Nothing}

    # Quality metrics
    convergence_summary::Dict{String, Any}
    constraint_violations::Dict{String, Vector{Int}}

    # Metadata
    n_samples::Int
    n_parameters::Int
    analysis_time::Float64
    config::AnalysisConfig
end

"""
    PosteriorAnalyzer

Main analysis engine for MCMC posterior samples.
"""
struct PosteriorAnalyzer{T<:AbstractFloat}
    config::AnalysisConfig
    domain::Union{Domain, Nothing}

    # Aggregation definitions
    regional_aggregations::Vector{RegionalAggregation}

    # Forward model for posterior predictive (optional)
    forward_model::Union{Function, Nothing}
    observations::Union{Vector{T}, Nothing}
    observation_errors::Union{Vector{T}, Nothing}

    # Prior information for comparison
    prior_mean::Union{Vector{T}, Nothing}
    prior_covariance::Union{Matrix{T}, Nothing}

    function PosteriorAnalyzer{T}(
        config::AnalysisConfig;
        domain::Union{Domain, Nothing} = nothing,
        forward_model::Union{Function, Nothing} = nothing,
        observations::Union{Vector{T}, Nothing} = nothing,
        observation_errors::Union{Vector{T}, Nothing} = nothing,
        prior_mean::Union{Vector{T}, Nothing} = nothing,
        prior_covariance::Union{Matrix{T}, Nothing} = nothing
    ) where T<:AbstractFloat

        # Initialize regional aggregations
        regional_aggregations = Vector{RegionalAggregation}()

        # Add standard aggregations if domain is provided
        if domain !== nothing
            regional_aggregations = create_standard_aggregations(domain, config)
        end

        # Add custom aggregations
        for (name, indices) in config.custom_regions
            weights = ones(T, length(indices))  # Equal weights by default
            if domain !== nothing && !isempty(domain.area_box)
                weights = domain.area_box[indices]
            end
            push!(regional_aggregations, RegionalAggregation(name, indices, weights, "Custom region"))
        end

        new{T}(config, domain, regional_aggregations, forward_model,
                observations, observation_errors, prior_mean, prior_covariance)
    end
end

PosteriorAnalyzer(config::AnalysisConfig; kwargs...) = PosteriorAnalyzer{Float64}(config; kwargs...)

# ============================================================================
# Main Analysis Functions
# ============================================================================

"""
    analyze_posterior(analyzer::PosteriorAnalyzer, samples::Vector{PosteriorSample})

Perform comprehensive posterior analysis.
"""
function analyze_posterior(analyzer::PosteriorAnalyzer{T},
                         samples::Vector{PosteriorSample{T}}) where T<:AbstractFloat

    @info "Starting posterior analysis with $(length(samples)) samples"
    start_time = time()

    # Validate inputs
    validate_samples(samples)

    # Extract sample matrix for efficient computation
    n_samples = length(samples)
    n_params = length(samples[1].x_phys)
    sample_matrix = Matrix{T}(undef, n_samples, n_params)

    for (i, sample) in enumerate(samples)
        sample_matrix[i, :] = sample.x_phys
    end

    # Compute basic parameter statistics
    @info "Computing parameter statistics"
    parameter_statistics = compute_parameter_statistics(analyzer, sample_matrix)

    # Compute flux aggregations
    @info "Computing flux aggregations"
    flux_aggregations = compute_flux_aggregations(analyzer, sample_matrix)

    # Posterior predictive analysis
    posterior_predictive = nothing
    if analyzer.config.compute_posterior_predictive && analyzer.forward_model !== nothing
        @info "Computing posterior predictive analysis"
        posterior_predictive = compute_posterior_predictive(analyzer, sample_matrix)
    end

    # Uncertainty analysis
    uncertainty_analysis = nothing
    if analyzer.config.compute_correlations
        @info "Computing uncertainty analysis"
        uncertainty_analysis = compute_uncertainty_analysis(analyzer, sample_matrix)
    end

    # Convergence summary
    convergence_summary = compute_convergence_summary(samples)

    # Constraint validation
    constraint_violations = Dict{String, Vector{Int}}()
    if analyzer.config.validate_constraints
        constraint_violations = validate_constraints(analyzer, sample_matrix)
    end

    analysis_time = time() - start_time

    result = AnalysisResult{T}(
        parameter_statistics,
        flux_aggregations,
        posterior_predictive,
        uncertainty_analysis,
        convergence_summary,
        constraint_violations,
        n_samples,
        n_params,
        analysis_time,
        analyzer.config
    )

    @info "Posterior analysis completed in $(round(analysis_time, digits=2)) seconds"
    return result
end

"""
    compute_parameter_statistics(analyzer::PosteriorAnalyzer, sample_matrix::Matrix)

Compute summary statistics for each parameter.
"""
function compute_parameter_statistics(analyzer::PosteriorAnalyzer{T},
                                    sample_matrix::Matrix{T}) where T<:AbstractFloat
    n_params = size(sample_matrix, 2)
    statistics = Vector{SummaryStatistics{T}}(undef, n_params)

    for i in 1:n_params
        samples_i = sample_matrix[:, i]
        statistics[i] = compute_summary_statistics(samples_i, analyzer.config)
    end

    return statistics
end

"""
    compute_summary_statistics(samples::Vector, config::AnalysisConfig)

Compute comprehensive summary statistics for a single parameter.
"""
function compute_summary_statistics(samples::Vector{T}, config::AnalysisConfig) where T<:AbstractFloat
    n_samples = length(samples)

    # Basic moments
    sample_mean = mean(samples)
    sample_median = median(samples)
    sample_std = std(samples)
    sample_var = var(samples)

    # Quantiles
    quantiles = Dict{Float64, T}()
    for q in config.quantile_levels
        quantiles[q] = quantile(samples, q)
    end

    # Credible intervals
    credible_intervals = Dict{Float64, Tuple{T, T}}()
    for level in config.credible_levels
        alpha = 1.0 - level
        lower = quantile(samples, alpha/2)
        upper = quantile(samples, 1 - alpha/2)
        credible_intervals[level] = (lower, upper)
    end

    # Mode estimation using kernel density
    mode_val, mode_density = estimate_mode(samples, config.kde_bandwidth)

    # Tail probabilities
    tail_probs = Dict{String, T}()
    tail_probs["positive"] = mean(samples .> 0)
    tail_probs["negative"] = mean(samples .< 0)

    # Effective sample size (simplified)
    ess = compute_effective_sample_size(samples)

    # R-hat (would need multiple chains for proper calculation)
    rhat = T(1.0)  # Placeholder

    return SummaryStatistics{T}(
        sample_mean, sample_median, sample_std, sample_var,
        quantiles, credible_intervals, mode_val, mode_density,
        tail_probs, ess, rhat
    )
end

"""
    estimate_mode(samples::Vector, bandwidth::Float64)

Estimate mode using kernel density estimation.
"""
function estimate_mode(samples::Vector{T}, bandwidth::T) where T<:AbstractFloat
    if length(samples) < 10
        return median(samples), T(0.0)
    end

    # Simple histogram-based mode estimation
    n_bins = min(50, max(10, isqrt(length(samples))))
    hist = fit(Histogram, samples, nbins=n_bins)

    # Find bin with maximum count
    max_idx = argmax(hist.weights)
    mode_estimate = (hist.edges[1][max_idx] + hist.edges[1][max_idx + 1]) / 2
    mode_density = T(hist.weights[max_idx]) / (length(samples) * step(hist.edges[1]))

    return mode_estimate, mode_density
end

"""
    compute_effective_sample_size(samples::Vector)

Compute effective sample size accounting for autocorrelation.
"""
function compute_effective_sample_size(samples::Vector{T}) where T<:AbstractFloat
    n = length(samples)
    if n < 10
        return T(n)
    end

    # Simple autocorrelation-based ESS
    autocorr = autocor(samples, 1:min(n÷4, 100))

    # Find first negative autocorrelation or use all lags
    cutoff = findfirst(x -> x <= 0, autocorr[2:end])
    cutoff = cutoff === nothing ? length(autocorr) : cutoff + 1

    # Integrated autocorrelation time
    tau_int = 1 + 2 * sum(autocorr[2:cutoff])

    return T(n) / max(tau_int, 1.0)
end

"""
    compute_flux_aggregations(analyzer::PosteriorAnalyzer, sample_matrix::Matrix)

Compute spatial and temporal flux aggregations.
"""
function compute_flux_aggregations(analyzer::PosteriorAnalyzer{T},
                                 sample_matrix::Matrix{T}) where T<:AbstractFloat

    aggregations = Vector{FluxAggregation{T}}()

    for region in analyzer.regional_aggregations
        # Compute weighted sum for each sample
        aggregated_samples = T[]

        for i in 1:size(sample_matrix, 1)
            # Extract relevant parameters
            params = sample_matrix[i, region.box_indices]

            # Compute weighted aggregation
            aggregated_value = dot(params, region.weights)
            push!(aggregated_samples, aggregated_value)
        end

        # Compute statistics for aggregated samples
        stats = compute_summary_statistics(aggregated_samples, analyzer.config)

        # Determine units (placeholder)
        units = "flux_units"
        if analyzer.domain !== nothing
            units = "kg/m²/s"  # Or appropriate units
        end

        aggregation = FluxAggregation{T}(
            region.name,
            aggregated_samples,
            stats,
            units,
            region.description
        )

        push!(aggregations, aggregation)
    end

    return aggregations
end

"""
    compute_posterior_predictive(analyzer::PosteriorAnalyzer, sample_matrix::Matrix)

Compute posterior predictive analysis for model validation.
"""
function compute_posterior_predictive(analyzer::PosteriorAnalyzer{T},
                                    sample_matrix::Matrix{T}) where T<:AbstractFloat

    if analyzer.forward_model === nothing || analyzer.observations === nothing
        @warn "Forward model or observations not provided for posterior predictive analysis"
        return nothing
    end

    n_samples, n_params = size(sample_matrix)
    n_obs = length(analyzer.observations)

    # Compute predictions for each sample
    predictions = Matrix{T}(undef, n_samples, n_obs)

    for i in 1:n_samples
        predictions[i, :] = analyzer.forward_model(sample_matrix[i, :])
    end

    # Compute prediction statistics
    predicted_mean = vec(mean(predictions, dims=1))
    predicted_std = vec(std(predictions, dims=1))

    # Compute residuals
    residuals = predicted_mean - analyzer.observations

    # Standardized residuals
    obs_errors = analyzer.observation_errors !== nothing ? analyzer.observation_errors : ones(T, n_obs)
    standardized_residuals = residuals ./ obs_errors

    # Coverage probabilities
    coverage_50 = compute_coverage(analyzer.observations, predictions, 0.50)
    coverage_90 = compute_coverage(analyzer.observations, predictions, 0.90)
    coverage_95 = compute_coverage(analyzer.observations, predictions, 0.95)

    # Outlier detection (3-sigma rule)
    outlier_threshold = T(3.0)
    outlier_indices = findall(abs.(standardized_residuals) .> outlier_threshold)

    return PosteriorPredictive{T}(
        analyzer.observations,
        predicted_mean,
        predicted_std,
        residuals,
        standardized_residuals,
        coverage_50,
        coverage_90,
        coverage_95,
        outlier_indices,
        outlier_threshold
    )
end

"""
    compute_coverage(observations::Vector, predictions::Matrix, level::Float64)

Compute coverage probability for prediction intervals.
"""
function compute_coverage(observations::Vector{T}, predictions::Matrix{T}, level::T) where T<:AbstractFloat
    n_obs = length(observations)
    alpha = 1.0 - level

    coverage_count = 0
    for i in 1:n_obs
        pred_i = predictions[:, i]
        lower = quantile(pred_i, alpha/2)
        upper = quantile(pred_i, 1 - alpha/2)

        if lower <= observations[i] <= upper
            coverage_count += 1
        end
    end

    return T(coverage_count) / n_obs
end

"""
    compute_uncertainty_analysis(analyzer::PosteriorAnalyzer, sample_matrix::Matrix)

Compute uncertainty quantification and decomposition.
"""
function compute_uncertainty_analysis(analyzer::PosteriorAnalyzer{T},
                                    sample_matrix::Matrix{T}) where T<:AbstractFloat

    # Compute correlation and covariance matrices
    correlation_matrix = cor(sample_matrix)
    covariance_matrix = cov(sample_matrix)

    # Principal component analysis
    pca_components, pca_eigenvalues, pca_explained_variance = compute_pca_analysis(
        sample_matrix, analyzer.config.max_pca_components, analyzer.config.pca_threshold
    )

    # Parameter uncertainties (posterior standard deviations)
    parameter_uncertainties = vec(std(sample_matrix, dims=1))

    # Prior uncertainties and uncertainty reduction
    prior_uncertainties = T[]
    uncertainty_reduction = T[]

    if analyzer.prior_covariance !== nothing
        prior_uncertainties = sqrt.(diag(analyzer.prior_covariance))
        uncertainty_reduction = (prior_uncertainties - parameter_uncertainties) ./ prior_uncertainties
        uncertainty_reduction = max.(uncertainty_reduction, zero(T))  # Ensure non-negative
    else
        prior_uncertainties = fill(T(NaN), length(parameter_uncertainties))
        uncertainty_reduction = fill(T(NaN), length(parameter_uncertainties))
    end

    return UncertaintyAnalysis{T}(
        correlation_matrix,
        covariance_matrix,
        pca_components,
        pca_eigenvalues,
        pca_explained_variance,
        parameter_uncertainties,
        prior_uncertainties,
        uncertainty_reduction
    )
end

"""
    compute_pca_analysis(sample_matrix::Matrix, max_components::Int, threshold::Float64)

Perform principal component analysis on posterior samples.
"""
function compute_pca_analysis(sample_matrix::Matrix{T}, max_components::Int,
                            threshold::T) where T<:AbstractFloat

    # Center the data
    centered_samples = sample_matrix .- mean(sample_matrix, dims=1)

    # Compute SVD
    U, S, V = svd(centered_samples')

    # Eigenvalues are proportional to squared singular values
    eigenvalues = (S.^2) ./ (size(sample_matrix, 1) - 1)
    total_variance = sum(eigenvalues)
    explained_variance = cumsum(eigenvalues) ./ total_variance

    # Find number of components to retain
    n_components = findfirst(explained_variance .>= threshold)
    n_components = n_components === nothing ? max_components : min(n_components, max_components)
    n_components = min(n_components, length(eigenvalues))

    return V[:, 1:n_components], eigenvalues[1:n_components], explained_variance[1:n_components]
end

"""
    validate_constraints(analyzer::PosteriorAnalyzer, sample_matrix::Matrix)

Validate physical constraints on posterior samples.
"""
function validate_constraints(analyzer::PosteriorAnalyzer{T},
                            sample_matrix::Matrix{T}) where T<:AbstractFloat

    violations = Dict{String, Vector{Int}}()

    # Check physical bounds
    for (constraint_name, (lower, upper)) in analyzer.config.physical_bounds
        violation_indices = Int[]

        for i in 1:size(sample_matrix, 1)
            sample = sample_matrix[i, :]
            if any(sample .< lower) || any(sample .> upper)
                push!(violation_indices, i)
            end
        end

        if !isempty(violation_indices)
            violations[constraint_name] = violation_indices
        end
    end

    # Additional FLEXINVERT-specific constraints can be added here
    # e.g., mass balance, sectoral bounds, etc.

    return violations
end

"""
    compute_convergence_summary(samples::Vector{PosteriorSample})

Compute convergence summary from samples.
"""
function compute_convergence_summary(samples::Vector{PosteriorSample{T}}) where T<:AbstractFloat
    summary = Dict{String, Any}()

    summary["n_samples"] = length(samples)
    summary["log_posterior_range"] = (minimum(s.log_posterior for s in samples),
                                    maximum(s.log_posterior for s in samples))
    summary["log_likelihood_range"] = (minimum(s.log_likelihood for s in samples),
                                     maximum(s.log_likelihood for s in samples))
    summary["log_prior_range"] = (minimum(s.log_prior for s in samples),
                                maximum(s.log_prior for s in samples))

    # Compute running averages for trend assessment
    running_means = compute_running_means(samples)
    summary["running_means_trend"] = assess_trend_stability(running_means)

    return summary
end

"""
    compute_running_means(samples::Vector{PosteriorSample})

Compute running means of log-posterior for trend analysis.
"""
function compute_running_means(samples::Vector{PosteriorSample{T}}) where T<:AbstractFloat
    n_samples = length(samples)
    running_means = Vector{T}(undef, n_samples)

    cumsum_posterior = 0.0
    for i in 1:n_samples
        cumsum_posterior += samples[i].log_posterior
        running_means[i] = cumsum_posterior / i
    end

    return running_means
end

"""
    assess_trend_stability(running_means::Vector)

Assess stability of running means for convergence.
"""
function assess_trend_stability(running_means::Vector{T}) where T<:AbstractFloat
    if length(running_means) < 100
        return "insufficient_data"
    end

    # Look at last 20% of chain
    tail_start = max(1, length(running_means) - length(running_means) ÷ 5)
    tail_means = running_means[tail_start:end]

    # Compute coefficient of variation in tail
    cv = std(tail_means) / abs(mean(tail_means))

    if cv < 0.01
        return "stable"
    elseif cv < 0.05
        return "moderately_stable"
    else
        return "unstable"
    end
end

# ============================================================================
# Output Generation Functions
# ============================================================================

"""
    generate_netcdf_output(filepath::String, result::AnalysisResult, analyzer::PosteriorAnalyzer)

Generate comprehensive NetCDF output file.
"""
function generate_netcdf_output(filepath::String, result::AnalysisResult{T},
                               analyzer::PosteriorAnalyzer{T}) where T<:AbstractFloat

    @info "Generating NetCDF output: $filepath"
    isdir(dirname(filepath)) || mkpath(dirname(filepath))

    Dataset(filepath, "c"; deflatelevel=analyzer.config.compression_level) do ds
        # Define dimensions
        defDim(ds, "parameter", result.n_parameters)
        defDim(ds, "sample", result.n_samples)
        defDim(ds, "quantile", length(result.config.quantile_levels))
        defDim(ds, "credible_level", length(result.config.credible_levels))
        defDim(ds, "aggregation", length(result.flux_aggregations))

        # Add spatial dimensions if domain is available
        if analyzer.domain !== nothing
            defDim(ds, "longitude", analyzer.domain.nxregrid)
            defDim(ds, "latitude", analyzer.domain.nyregrid)
            defDim(ds, "nbox", analyzer.domain.nbox)
        end

        # Parameter statistics
        add_parameter_statistics_to_netcdf!(ds, result, T)

        # Flux aggregations
        add_flux_aggregations_to_netcdf!(ds, result, T)

        # Uncertainty analysis
        if result.uncertainty_analysis !== nothing
            add_uncertainty_analysis_to_netcdf!(ds, result.uncertainty_analysis, T)
        end

        # Posterior predictive
        if result.posterior_predictive !== nothing
            add_posterior_predictive_to_netcdf!(ds, result.posterior_predictive, T)
        end

        # Add metadata
        add_metadata_to_netcdf!(ds, result, analyzer)
    end

    @info "NetCDF output saved: $filepath"
end

"""
    add_parameter_statistics_to_netcdf!(ds::Dataset, result::AnalysisResult, ::Type{T})

Add parameter statistics to NetCDF dataset.
"""
function add_parameter_statistics_to_netcdf!(ds::Dataset, result::AnalysisResult{T},
                                           ::Type{T}) where T<:AbstractFloat

    n_params = result.n_parameters
    stats = result.parameter_statistics

    # Basic statistics
    mean_var = defVar(ds, "parameter_mean", T, ("parameter",))
    median_var = defVar(ds, "parameter_median", T, ("parameter",))
    std_var = defVar(ds, "parameter_std", T, ("parameter",))

    mean_var[:] = [s.mean for s in stats]
    median_var[:] = [s.median for s in stats]
    std_var[:] = [s.std for s in stats]

    # Quantiles
    quantile_values = defVar(ds, "parameter_quantiles", T, ("parameter", "quantile"))
    quantile_levels = defVar(ds, "quantile_levels", T, ("quantile",))

    quantile_levels[:] = result.config.quantile_levels
    for (i, stat) in enumerate(stats)
        for (j, q) in enumerate(result.config.quantile_levels)
            quantile_values[i, j] = stat.quantiles[q]
        end
    end

    # Credible intervals
    ci_lower = defVar(ds, "credible_interval_lower", T, ("parameter", "credible_level"))
    ci_upper = defVar(ds, "credible_interval_upper", T, ("parameter", "credible_level"))
    ci_levels = defVar(ds, "credible_levels", T, ("credible_level",))

    ci_levels[:] = result.config.credible_levels
    for (i, stat) in enumerate(stats)
        for (j, level) in enumerate(result.config.credible_levels)
            lower, upper = stat.credible_intervals[level]
            ci_lower[i, j] = lower
            ci_upper[i, j] = upper
        end
    end

    # Add attributes
    mean_var.attrib["long_name"] = "Posterior mean"
    median_var.attrib["long_name"] = "Posterior median"
    std_var.attrib["long_name"] = "Posterior standard deviation"
    quantile_values.attrib["long_name"] = "Posterior quantiles"
    ci_lower.attrib["long_name"] = "Credible interval lower bounds"
    ci_upper.attrib["long_name"] = "Credible interval upper bounds"
end

"""
    add_flux_aggregations_to_netcdf!(ds::Dataset, result::AnalysisResult, ::Type{T})

Add flux aggregations to NetCDF dataset.
"""
function add_flux_aggregations_to_netcdf!(ds::Dataset, result::AnalysisResult{T},
                                        ::Type{T}) where T<:AbstractFloat

    if isempty(result.flux_aggregations)
        return
    end

    n_agg = length(result.flux_aggregations)

    # Aggregation metadata
    agg_names = [agg.name for agg in result.flux_aggregations]
    agg_descriptions = [agg.description for agg in result.flux_aggregations]

    # Statistics for aggregations
    agg_mean = defVar(ds, "aggregation_mean", T, ("aggregation",))
    agg_std = defVar(ds, "aggregation_std", T, ("aggregation",))
    agg_median = defVar(ds, "aggregation_median", T, ("aggregation",))

    for (i, agg) in enumerate(result.flux_aggregations)
        agg_mean[i] = agg.statistics.mean
        agg_std[i] = agg.statistics.std
        agg_median[i] = agg.statistics.median
    end

    # Store aggregation names as attributes (NetCDF string handling)
    agg_mean.attrib["aggregation_names"] = join(agg_names, ",")
    agg_mean.attrib["aggregation_descriptions"] = join(agg_descriptions, ",")
    agg_mean.attrib["long_name"] = "Aggregated flux means"
    agg_std.attrib["long_name"] = "Aggregated flux standard deviations"
    agg_median.attrib["long_name"] = "Aggregated flux medians"
end

"""
    add_uncertainty_analysis_to_netcdf!(ds::Dataset, uncertainty::UncertaintyAnalysis, ::Type{T})

Add uncertainty analysis to NetCDF dataset.
"""
function add_uncertainty_analysis_to_netcdf!(ds::Dataset, uncertainty::UncertaintyAnalysis{T},
                                           ::Type{T}) where T<:AbstractFloat

    # Correlation matrix
    corr_var = defVar(ds, "correlation_matrix", T, ("parameter", "parameter"))
    corr_var[:, :] = uncertainty.correlation_matrix
    corr_var.attrib["long_name"] = "Parameter correlation matrix"

    # Parameter uncertainties
    uncert_var = defVar(ds, "parameter_uncertainties", T, ("parameter",))
    uncert_var[:] = uncertainty.parameter_uncertainties
    uncert_var.attrib["long_name"] = "Posterior parameter uncertainties"

    # Prior uncertainties (if available)
    if !all(isnan.(uncertainty.prior_uncertainties))
        prior_uncert_var = defVar(ds, "prior_uncertainties", T, ("parameter",))
        prior_uncert_var[:] = uncertainty.prior_uncertainties
        prior_uncert_var.attrib["long_name"] = "Prior parameter uncertainties"

        uncert_reduction_var = defVar(ds, "uncertainty_reduction", T, ("parameter",))
        uncert_reduction_var[:] = uncertainty.uncertainty_reduction
        uncert_reduction_var.attrib["long_name"] = "Relative uncertainty reduction"
    end

    # PCA results
    if !isempty(uncertainty.pca_eigenvalues)
        defDim(ds, "pca_component", length(uncertainty.pca_eigenvalues))

        pca_eigenvals = defVar(ds, "pca_eigenvalues", T, ("pca_component",))
        pca_eigenvals[:] = uncertainty.pca_eigenvalues
        pca_eigenvals.attrib["long_name"] = "PCA eigenvalues"

        pca_explained = defVar(ds, "pca_explained_variance", T, ("pca_component",))
        pca_explained[:] = uncertainty.pca_explained_variance
        pca_explained.attrib["long_name"] = "PCA cumulative explained variance"

        pca_components = defVar(ds, "pca_components", T, ("parameter", "pca_component"))
        pca_components[:, :] = uncertainty.pca_components
        pca_components.attrib["long_name"] = "PCA components (eigenvectors)"
    end
end

"""
    add_posterior_predictive_to_netcdf!(ds::Dataset, pp::PosteriorPredictive, ::Type{T})

Add posterior predictive analysis to NetCDF dataset.
"""
function add_posterior_predictive_to_netcdf!(ds::Dataset, pp::PosteriorPredictive{T},
                                           ::Type{T}) where T<:AbstractFloat

    n_obs = length(pp.observed)
    defDim(ds, "observation", n_obs)

    # Observations and predictions
    obs_var = defVar(ds, "observations", T, ("observation",))
    pred_mean_var = defVar(ds, "predicted_mean", T, ("observation",))
    pred_std_var = defVar(ds, "predicted_std", T, ("observation",))

    obs_var[:] = pp.observed
    pred_mean_var[:] = pp.predicted_mean
    pred_std_var[:] = pp.predicted_std

    # Residuals
    residuals_var = defVar(ds, "residuals", T, ("observation",))
    std_residuals_var = defVar(ds, "standardized_residuals", T, ("observation",))

    residuals_var[:] = pp.residuals
    std_residuals_var[:] = pp.standardized_residuals

    # Coverage probabilities
    ds.attrib["coverage_50"] = pp.coverage_50
    ds.attrib["coverage_90"] = pp.coverage_90
    ds.attrib["coverage_95"] = pp.coverage_95
    ds.attrib["n_outliers"] = length(pp.outlier_indices)
    ds.attrib["outlier_threshold"] = pp.outlier_threshold

    # Add attributes
    obs_var.attrib["long_name"] = "Observed values"
    pred_mean_var.attrib["long_name"] = "Posterior predictive mean"
    pred_std_var.attrib["long_name"] = "Posterior predictive standard deviation"
    residuals_var.attrib["long_name"] = "Model-observation residuals"
    std_residuals_var.attrib["long_name"] = "Standardized residuals"
end

"""
    add_metadata_to_netcdf!(ds::Dataset, result::AnalysisResult, analyzer::PosteriorAnalyzer)

Add comprehensive metadata to NetCDF dataset.
"""
function add_metadata_to_netcdf!(ds::Dataset, result::AnalysisResult{T},
                                analyzer::PosteriorAnalyzer{T}) where T<:AbstractFloat

    # General metadata
    ds.attrib["title"] = "FLEXINVERT MCMC Posterior Analysis"
    ds.attrib["source"] = "FLEXINVERT.jl"
    ds.attrib["creation_date"] = string(now())
    ds.attrib["analysis_time_seconds"] = result.analysis_time

    # Sample information
    ds.attrib["n_samples"] = result.n_samples
    ds.attrib["n_parameters"] = result.n_parameters

    # Configuration
    ds.attrib["credible_levels"] = join(result.config.credible_levels, ",")
    ds.attrib["quantile_levels"] = join(result.config.quantile_levels, ",")
    ds.attrib["spatial_aggregations"] = join(result.config.spatial_aggregations, ",")
    ds.attrib["temporal_aggregations"] = join(result.config.temporal_aggregations, ",")

    # Quality metrics
    ds.attrib["convergence_trend"] = get(result.convergence_summary, "running_means_trend", "unknown")
    ds.attrib["n_constraint_violations"] = length(result.constraint_violations)

    # Domain information (if available)
    if analyzer.domain !== nothing
        ds.attrib["domain_nbox"] = analyzer.domain.nbox
        ds.attrib["domain_rllx"] = analyzer.domain.rllx
        ds.attrib["domain_rlly"] = analyzer.domain.rlly
        ds.attrib["domain_rurx"] = analyzer.domain.rurx
        ds.attrib["domain_rury"] = analyzer.domain.rury
        ds.attrib["domain_rdx"] = analyzer.domain.rdx
        ds.attrib["domain_rdy"] = analyzer.domain.rdy
    end
end

"""
    generate_csv_output(filepath::String, result::AnalysisResult, analyzer::PosteriorAnalyzer)

Generate CSV summary tables.
"""
function generate_csv_output(filepath::String, result::AnalysisResult{T},
                           analyzer::PosteriorAnalyzer{T}) where T<:AbstractFloat

    @info "Generating CSV output: $filepath"
    isdir(dirname(filepath)) || mkpath(dirname(filepath))

    # Parameter statistics table
    param_file = replace(filepath, ".csv" => "_parameters.csv")
    open(param_file, "w") do io
        # Header
        println(io, "parameter,mean,median,std,q025,q975,mode,ess,rhat")

        # Data rows
        for (i, stat) in enumerate(result.parameter_statistics)
            q025 = get(stat.quantiles, 0.025, NaN)
            q975 = get(stat.quantiles, 0.975, NaN)

            @printf(io, "%d,%.6e,%.6e,%.6e,%.6e,%.6e,%.6e,%.2f,%.3f\n",
                   i, stat.mean, stat.median, stat.std, q025, q975,
                   stat.mode, stat.effective_sample_size, stat.rhat)
        end
    end

    # Flux aggregations table
    if !isempty(result.flux_aggregations)
        agg_file = replace(filepath, ".csv" => "_aggregations.csv")
        open(agg_file, "w") do io
            # Header
            println(io, "aggregation,mean,median,std,q025,q975,units,description")

            # Data rows
            for agg in result.flux_aggregations
                q025 = get(agg.statistics.quantiles, 0.025, NaN)
                q975 = get(agg.statistics.quantiles, 0.975, NaN)

                @printf(io, "%s,%.6e,%.6e,%.6e,%.6e,%.6e,%s,%s\n",
                       agg.name, agg.statistics.mean, agg.statistics.median,
                       agg.statistics.std, q025, q975, agg.units, agg.description)
            end
        end
    end

    @info "CSV output saved: parameter tables"
end

"""
    generate_json_output(filepath::String, result::AnalysisResult, analyzer::PosteriorAnalyzer)

Generate JSON metadata and summary.
"""
function generate_json_output(filepath::String, result::AnalysisResult{T},
                            analyzer::PosteriorAnalyzer{T}) where T<:AbstractFloat

    @info "Generating JSON output: $filepath"
    isdir(dirname(filepath)) || mkpath(dirname(filepath))

    # Build comprehensive metadata dictionary
    metadata = Dict{String, Any}()

    # Analysis configuration
    metadata["config"] = Dict(
        "credible_levels" => result.config.credible_levels,
        "quantile_levels" => result.config.quantile_levels,
        "spatial_aggregations" => result.config.spatial_aggregations,
        "temporal_aggregations" => result.config.temporal_aggregations,
        "compute_mode" => result.config.compute_mode,
        "compute_correlations" => result.config.compute_correlations,
        "pca_threshold" => result.config.pca_threshold,
        "max_pca_components" => result.config.max_pca_components
    )

    # Analysis results summary
    metadata["results"] = Dict(
        "n_samples" => result.n_samples,
        "n_parameters" => result.n_parameters,
        "analysis_time_seconds" => result.analysis_time,
        "convergence_summary" => result.convergence_summary,
        "n_aggregations" => length(result.flux_aggregations)
    )

    # Quality metrics
    metadata["quality"] = Dict(
        "constraint_violations" => Dict(name => length(indices) for (name, indices) in result.constraint_violations),
        "has_posterior_predictive" => result.posterior_predictive !== nothing,
        "has_uncertainty_analysis" => result.uncertainty_analysis !== nothing
    )

    # Posterior predictive summary (if available)
    if result.posterior_predictive !== nothing
        pp = result.posterior_predictive
        metadata["posterior_predictive"] = Dict(
            "n_observations" => length(pp.observed),
            "coverage_50" => pp.coverage_50,
            "coverage_90" => pp.coverage_90,
            "coverage_95" => pp.coverage_95,
            "n_outliers" => length(pp.outlier_indices),
            "outlier_threshold" => pp.outlier_threshold,
            "rmse" => sqrt(mean(pp.residuals.^2)),
            "mae" => mean(abs.(pp.residuals))
        )
    end

    # Uncertainty analysis summary (if available)
    if result.uncertainty_analysis !== nothing
        ua = result.uncertainty_analysis
        metadata["uncertainty_analysis"] = Dict(
            "n_pca_components" => length(ua.pca_eigenvalues),
            "pca_explained_variance" => length(ua.pca_explained_variance) > 0 ? ua.pca_explained_variance[end] : 0.0,
            "mean_uncertainty_reduction" => any(.!isnan.(ua.uncertainty_reduction)) ? mean(filter(!isnan, ua.uncertainty_reduction)) : NaN,
            "max_correlation" => maximum(abs.(ua.correlation_matrix[.!LinearAlgebra.I]))
        )
    end

    # Domain information (if available)
    if analyzer.domain !== nothing
        metadata["domain"] = Dict(
            "nbox" => analyzer.domain.nbox,
            "nxregrid" => analyzer.domain.nxregrid,
            "nyregrid" => analyzer.domain.nyregrid,
            "rllx" => analyzer.domain.rllx,
            "rlly" => analyzer.domain.rlly,
            "rurx" => analyzer.domain.rurx,
            "rury" => analyzer.domain.rury,
            "rdx" => analyzer.domain.rdx,
            "rdy" => analyzer.domain.rdy
        )
    end

    # Write JSON file
    open(filepath, "w") do io
        JSON.print(io, metadata, 2)  # Pretty print with 2-space indentation
    end

    @info "JSON output saved: $filepath"
end

# ============================================================================
# Utility and Validation Functions
# ============================================================================

"""
    validate_analysis_quality(result::AnalysisResult)

Validate the quality of the posterior analysis.
"""
function validate_analysis_quality(result::AnalysisResult{T}) where T<:AbstractFloat
    issues = String[]

    # Check sample size
    if result.n_samples < 1000
        push!(issues, "Low sample size ($(result.n_samples) < 1000)")
    end

    # Check effective sample sizes
    low_ess_count = count(stat.effective_sample_size < 100 for stat in result.parameter_statistics)
    if low_ess_count > 0
        push!(issues, "$(low_ess_count) parameters have low effective sample size (< 100)")
    end

    # Check convergence
    if haskey(result.convergence_summary, "running_means_trend")
        trend = result.convergence_summary["running_means_trend"]
        if trend == "unstable"
            push!(issues, "Chain appears unstable based on running means")
        end
    end

    # Check constraint violations
    if !isempty(result.constraint_violations)
        total_violations = sum(length(indices) for indices in values(result.constraint_violations))
        push!(issues, "$(total_violations) constraint violations detected")
    end

    # Check posterior predictive (if available)
    if result.posterior_predictive !== nothing
        pp = result.posterior_predictive
        if pp.coverage_95 < 0.90  # Should be close to 0.95
            push!(issues, "Poor 95% prediction interval coverage: $(round(pp.coverage_95, digits=3))")
        end

        if length(pp.outlier_indices) > 0.1 * length(pp.observed)
            push!(issues, "High number of outliers: $(length(pp.outlier_indices))")
        end
    end

    if isempty(issues)
        @info "Analysis quality validation passed"
        return true, String[]
    else
        @warn "Analysis quality issues detected:\n" * join(issues, "\n")
        return false, issues
    end
end

"""
    compare_prior_posterior(analyzer::PosteriorAnalyzer, result::AnalysisResult)

Compare prior and posterior distributions for information gain assessment.
"""
function compare_prior_posterior(analyzer::PosteriorAnalyzer{T},
                                result::AnalysisResult{T}) where T<:AbstractFloat

    if analyzer.prior_mean === nothing || analyzer.prior_covariance === nothing
        @warn "Prior information not available for comparison"
        return nothing
    end

    comparison = Dict{String, Any}()

    # Prior statistics
    prior_std = sqrt.(diag(analyzer.prior_covariance))

    # Posterior statistics
    posterior_mean = [stat.mean for stat in result.parameter_statistics]
    posterior_std = [stat.std for stat in result.parameter_statistics]

    # Information gain metrics
    comparison["prior_mean"] = analyzer.prior_mean
    comparison["posterior_mean"] = posterior_mean
    comparison["prior_std"] = prior_std
    comparison["posterior_std"] = posterior_std

    # Uncertainty reduction
    uncertainty_reduction = (prior_std - posterior_std) ./ prior_std
    comparison["uncertainty_reduction"] = uncertainty_reduction
    comparison["mean_uncertainty_reduction"] = mean(uncertainty_reduction)

    # Parameter shifts (in units of prior standard deviation)
    parameter_shifts = abs.(posterior_mean - analyzer.prior_mean) ./ prior_std
    comparison["parameter_shifts"] = parameter_shifts
    comparison["max_parameter_shift"] = maximum(parameter_shifts)

    # Kullback-Leibler divergence approximation (for Gaussian case)
    if result.uncertainty_analysis !== nothing
        posterior_cov = result.uncertainty_analysis.covariance_matrix
        try
            kl_div = kl_divergence_gaussian(analyzer.prior_mean, analyzer.prior_covariance,
                                          posterior_mean, posterior_cov)
            comparison["kl_divergence"] = kl_div
        catch e
            @warn "Could not compute KL divergence: $e"
            comparison["kl_divergence"] = NaN
        end
    end

    @info "Prior-posterior comparison completed"
    return comparison
end

"""
    kl_divergence_gaussian(μ1, Σ1, μ2, Σ2)

Compute KL divergence between two multivariate Gaussians.
"""
function kl_divergence_gaussian(μ1::Vector{T}, Σ1::Matrix{T},
                              μ2::Vector{T}, Σ2::Matrix{T}) where T<:AbstractFloat

    d = length(μ1)

    # KL(p||q) = 0.5 * (tr(Σ2^{-1} Σ1) + (μ2-μ1)^T Σ2^{-1} (μ2-μ1) - d + log(det(Σ2)/det(Σ1)))
    Σ2_inv = inv(Σ2)
    μ_diff = μ2 - μ1

    trace_term = tr(Σ2_inv * Σ1)
    quadratic_term = dot(μ_diff, Σ2_inv * μ_diff)
    log_det_term = logdet(Σ2) - logdet(Σ1)

    return 0.5 * (trace_term + quadratic_term - d + log_det_term)
end

"""
    validate_samples(samples::Vector{PosteriorSample})

Validate input samples for analysis.
"""
function validate_samples(samples::Vector{PosteriorSample{T}}) where T<:AbstractFloat
    if isempty(samples)
        throw(ArgumentError("No samples provided for analysis"))
    end

    # Check for consistent dimensions
    n_params = length(samples[1].x_phys)
    for (i, sample) in enumerate(samples)
        if length(sample.x_phys) != n_params
            throw(ArgumentError("Inconsistent parameter dimensions in sample $i"))
        end

        if !isfinite(sample.log_posterior)
            @warn "Non-finite log-posterior in sample $i"
        end
    end

    @info "Sample validation passed: $(length(samples)) samples with $n_params parameters"
end

"""
    create_standard_aggregations(domain::Domain, config::AnalysisConfig)

Create standard regional aggregations based on domain.
"""
function create_standard_aggregations(domain::Domain, config::AnalysisConfig)
    aggregations = Vector{RegionalAggregation}()

    # Global aggregation
    if "global" in config.spatial_aggregations
        global_indices = collect(1:domain.nbox)
        global_weights = domain.area_box[global_indices]
        push!(aggregations, RegionalAggregation("global", global_indices, global_weights, "Global total"))
    end

    # Hemisphere aggregations
    if "northern_hemisphere" in config.spatial_aggregations || "southern_hemisphere" in config.spatial_aggregations
        nh_indices = Int[]
        sh_indices = Int[]

        for i in 1:domain.nbox
            # Find grid coordinates for this box
            ix, jy = find_box_indices(domain, i)
            lat = domain.rlly + (jy - 1) * domain.rdy

            if lat >= 0.0
                push!(nh_indices, i)
            else
                push!(sh_indices, i)
            end
        end

        if "northern_hemisphere" in config.spatial_aggregations && !isempty(nh_indices)
            nh_weights = domain.area_box[nh_indices]
            push!(aggregations, RegionalAggregation("northern_hemisphere", nh_indices, nh_weights, "Northern Hemisphere"))
        end

        if "southern_hemisphere" in config.spatial_aggregations && !isempty(sh_indices)
            sh_weights = domain.area_box[sh_indices]
            push!(aggregations, RegionalAggregation("southern_hemisphere", sh_indices, sh_weights, "Southern Hemisphere"))
        end
    end

    return aggregations
end

"""
    find_box_indices(domain::Domain, box_id::Int)

Find grid indices for a given box ID (imported from OutputWriter logic).
"""
function find_box_indices(domain::Domain, box_id::Int)
    for jy in 1:domain.nyregrid
        for ix in 1:domain.nxregrid
            if domain.nbox_xy[ix, jy] == box_id
                return ix, jy
            end
        end
    end
    return 1, 1  # Fallback
end

end # module Analysis