"""
    DataFormats

Module for decoding and processing various atmospheric observation data formats used in GSI.
This module provides comprehensive support for the primary data formats encountered in 
operational meteorological data assimilation:

- **BUFR (Binary Universal Form)**: WMO standard for meteorological data exchange
- **PrepBUFR**: NCEP pre-processed observation format with quality control flags
- **NetCDF**: Self-describing array-oriented scientific data format  
- **ASCII/Text**: Human-readable observation formats for research and testing

The module implements efficient decoding algorithms that can handle large operational
observation datasets while maintaining numerical precision and metadata integrity.

# Key Features

- **High-Performance Decoding**: Optimized BUFR and PrepBUFR processing
- **Complete Metadata Support**: Full observation metadata and quality flags
- **Error Handling**: Robust error detection and recovery mechanisms
- **Memory Efficient**: Streaming processing for large observation files
- **Format Validation**: Comprehensive format checking and validation
- **Parallel Processing**: Multi-threaded decoding for improved performance

# Data Format Support

## BUFR (Binary Universal Form)
WMO standard format used worldwide for meteorological data exchange:
- Conventional surface and upper-air observations
- Satellite radiance and retrieved products  
- Aircraft observations (ACARS, AMDAR)
- Marine observations (ships, buoys)
- Specialized observations (GPS-RO, lightning, etc.)

## PrepBUFR  
NCEP pre-processed format with enhanced quality control:
- Quality control flags and background check results
- Observation error estimates and usage flags
- Thinned and super-observation datasets
- Bias correction applied observations
"""
module DataFormats

using Dates
using NCDatasets
using HDF5
using ..GSICoreAnalysis: AbstractAnalysisConfig

# Export main types and functions
export BUFRDecoder, PrepBUFRDecoder, NetCDFReader, ASCIIReader
export ObservationFile, decode_bufr, decode_prepbufr, read_netcdf_observations
export BUFRMessage, PrepBUFRSubset, ObservationRecord
export validate_format, get_observation_count, extract_metadata

"""
    ObservationFormat

Enumeration of supported observation data formats.
"""
@enum ObservationFormat begin
    BUFR_FORMAT
    PREPBUFR_FORMAT  
    NETCDF_FORMAT
    ASCII_FORMAT
    HDF5_FORMAT
    GRIB_FORMAT
end

"""
    ObservationRecord{T<:AbstractFloat}

Generic observation record containing all metadata and quality information.

# Fields
- `observation_type::Symbol`: Type of observation (:surface, :radiosonde, :aircraft, :satellite, etc.)
- `platform_id::String`: Platform identifier (station ID, satellite name, etc.)
- `location::Tuple{T,T,T}`: Geographic location (latitude, longitude, elevation/pressure)
- `time::DateTime`: Observation time
- `value::T`: Observed value
- `observation_error::T`: Assigned observation error standard deviation
- `background_value::T`: Background (first guess) value at observation location
- `analysis_usage::Bool`: Flag indicating whether observation is used in analysis
- `quality_flags::Dict{Symbol,Int}`: Comprehensive quality control flags
- `metadata::Dict{String,Any}`: Additional observation-specific metadata
"""
struct ObservationRecord{T<:AbstractFloat}
    observation_type::Symbol
    platform_id::String
    location::Tuple{T,T,T}  # lat, lon, elev/pressure
    time::DateTime
    value::T
    observation_error::T
    background_value::T
    analysis_usage::Bool
    quality_flags::Dict{Symbol,Int}
    metadata::Dict{String,Any}
end

"""
    BUFRMessage

Structure representing a complete BUFR message with header and data sections.

# Fields
- `header::BUFRHeader`: BUFR message header information
- `data_descriptors::Vector{Int}`: BUFR data descriptors defining message structure  
- `data_values::Matrix{Union{Float64,Missing}}`: Decoded numerical data values
- `string_values::Matrix{Union{String,Missing}}`: Decoded string data values
- `subset_count::Int`: Number of data subsets in message
- `parameter_count::Int`: Number of parameters per subset
"""
struct BUFRMessage
    header::Dict{String,Any}
    data_descriptors::Vector{Int}
    data_values::Matrix{Union{Float64,Missing}}
    string_values::Matrix{Union{String,Missing}}
    subset_count::Int
    parameter_count::Int
end

"""
    BUFRDecoder

High-performance BUFR message decoder with comprehensive format support.

# Fields
- `table_directory::String`: Path to BUFR table directory containing format definitions
- `message_buffer::Vector{UInt8}`: Internal buffer for message processing
- `table_cache::Dict{String,Any}`: Cached BUFR tables for performance optimization
- `error_handling::Symbol`: Error handling mode (:strict, :permissive, :recover)
- `validation_level::Symbol`: Validation depth (:minimal, :standard, :comprehensive)
"""
mutable struct BUFRDecoder
    table_directory::String
    message_buffer::Vector{UInt8}
    table_cache::Dict{String,Any}
    error_handling::Symbol
    validation_level::Symbol
    
    function BUFRDecoder(table_directory::String; 
                         error_handling::Symbol = :standard,
                         validation_level::Symbol = :standard)
        decoder = new()
        decoder.table_directory = table_directory
        decoder.message_buffer = Vector{UInt8}()
        decoder.table_cache = Dict{String,Any}()
        decoder.error_handling = error_handling
        decoder.validation_level = validation_level
        
        # Load essential BUFR tables
        load_bufr_tables!(decoder)
        
        return decoder
    end
end

"""
    PrepBUFRDecoder

Specialized decoder for NCEP PrepBUFR format with quality control integration.

# Fields
- `bufr_decoder::BUFRDecoder`: Underlying BUFR decoder
- `qc_flag_mapping::Dict{Int,Symbol}`: Quality control flag interpretation
- `observation_type_mapping::Dict{Int,Symbol}`: Observation type code mapping
- `error_table::Dict{Symbol,Float64}`: Default observation error assignments
- `usage_flags::Dict{Int,Bool}`: Analysis usage flag interpretation
"""
struct PrepBUFRDecoder
    bufr_decoder::BUFRDecoder
    qc_flag_mapping::Dict{Int,Symbol}
    observation_type_mapping::Dict{Int,Symbol}
    error_table::Dict{Symbol,Float64}
    usage_flags::Dict{Int,Bool}
    
    function PrepBUFRDecoder(table_directory::String)
        bufr_decoder = BUFRDecoder(table_directory)
        
        # Initialize PrepBUFR-specific mappings
        qc_flag_mapping = initialize_qc_mappings()
        obs_type_mapping = initialize_observation_type_mappings()
        error_table = initialize_error_table()
        usage_flags = initialize_usage_flags()
        
        return new(bufr_decoder, qc_flag_mapping, obs_type_mapping, error_table, usage_flags)
    end
end

"""
    NetCDFReader

NetCDF observation file reader with metadata preservation.

# Fields
- `compression_support::Bool`: Enable compressed NetCDF reading
- `parallel_io::Bool`: Use parallel I/O for large files
- `metadata_cache::Dict{String,Any}`: Cached file metadata
- `dimension_mapping::Dict{String,Symbol}`: Standard dimension name mapping
"""
struct NetCDFReader
    compression_support::Bool
    parallel_io::Bool
    metadata_cache::Dict{String,Any}
    dimension_mapping::Dict{String,Symbol}
    
    function NetCDFReader(; compression_support::Bool = true, parallel_io::Bool = false)
        dimension_mapping = Dict(
            "time" => :time,
            "lat" => :latitude, 
            "latitude" => :latitude,
            "lon" => :longitude,
            "longitude" => :longitude,
            "lev" => :level,
            "level" => :level,
            "obs" => :observation
        )
        
        return new(compression_support, parallel_io, Dict{String,Any}(), dimension_mapping)
    end
end

"""
    load_bufr_tables!(decoder::BUFRDecoder)

Load and cache essential BUFR tables for message decoding.
"""
function load_bufr_tables!(decoder::BUFRDecoder)
    table_dir = decoder.table_directory
    
    # Load Table A (Data category)
    table_a_path = joinpath(table_dir, "table_a.txt")
    if isfile(table_a_path)
        decoder.table_cache["table_a"] = load_table_a(table_a_path)
    else
        @warn "BUFR Table A not found at $table_a_path, using defaults"
        decoder.table_cache["table_a"] = default_table_a()
    end
    
    # Load Table B (Element descriptors)  
    table_b_path = joinpath(table_dir, "table_b.txt")
    if isfile(table_b_path)
        decoder.table_cache["table_b"] = load_table_b(table_b_path)
    else
        @warn "BUFR Table B not found at $table_b_path, using defaults"
        decoder.table_cache["table_b"] = default_table_b()
    end
    
    # Load Table D (Sequence descriptors)
    table_d_path = joinpath(table_dir, "table_d.txt") 
    if isfile(table_d_path)
        decoder.table_cache["table_d"] = load_table_d(table_d_path)
    else
        @warn "BUFR Table D not found at $table_d_path, using defaults"
        decoder.table_cache["table_d"] = default_table_d()
    end
end

"""
    decode_bufr(decoder::BUFRDecoder, filename::String) -> Vector{BUFRMessage}

Decode all BUFR messages from a file.

# Arguments
- `decoder::BUFRDecoder`: Configured BUFR decoder instance
- `filename::String`: Path to BUFR file to decode

# Returns
- `Vector{BUFRMessage}`: Array of decoded BUFR messages

# Examples
```julia
decoder = BUFRDecoder("/path/to/bufr/tables")
messages = decode_bufr(decoder, "observations.bufr")

for message in messages
    println("Decoded $(message.subset_count) observation subsets")
end
```
"""
function decode_bufr(decoder::BUFRDecoder, filename::String)::Vector{BUFRMessage}
    messages = BUFRMessage[]
    
    open(filename, "r") do file
        file_size = stat(filename).size
        bytes_read = 0
        
        while bytes_read < file_size
            # Read BUFR message header
            header_bytes = read(file, 8)
            if length(header_bytes) < 8
                break
            end
            
            # Validate BUFR indicator
            if String(header_bytes[1:4]) != "BUFR"
                @warn "Invalid BUFR indicator at byte $bytes_read, skipping"
                seek(file, bytes_read + 1)
                bytes_read += 1
                continue
            end
            
            # Extract message length
            message_length = bytes_to_int(header_bytes[5:7])
            
            # Read complete message
            seek(file, bytes_read)
            message_bytes = read(file, message_length)
            
            if length(message_bytes) != message_length
                @warn "Incomplete BUFR message at byte $bytes_read"
                break
            end
            
            # Decode message
            try
                message = decode_bufr_message(decoder, message_bytes)
                push!(messages, message)
            catch e
                if decoder.error_handling == :strict
                    rethrow(e)
                elseif decoder.error_handling == :permissive
                    @warn "Failed to decode BUFR message at byte $bytes_read: $e"
                end
            end
            
            bytes_read += message_length
        end
    end
    
    return messages
end

"""
    decode_bufr_message(decoder::BUFRDecoder, message_bytes::Vector{UInt8}) -> BUFRMessage

Decode a single BUFR message from raw bytes.
"""
function decode_bufr_message(decoder::BUFRDecoder, message_bytes::Vector{UInt8})::BUFRMessage
    # Parse BUFR sections
    header = parse_bufr_header(message_bytes)
    data_descriptors = parse_bufr_section3(message_bytes, header)
    
    # Decode data section
    data_values, string_values = parse_bufr_section4(
        message_bytes, header, data_descriptors, decoder.table_cache
    )
    
    return BUFRMessage(
        header,
        data_descriptors,
        data_values,
        string_values,
        size(data_values, 1),  # subset_count
        size(data_values, 2)   # parameter_count
    )
end

"""
    decode_prepbufr(decoder::PrepBUFRDecoder, filename::String) -> Vector{ObservationRecord}

Decode PrepBUFR file and convert to standardized observation records.

# Arguments
- `decoder::PrepBUFRDecoder`: Configured PrepBUFR decoder
- `filename::String`: Path to PrepBUFR file

# Returns
- `Vector{ObservationRecord}`: Array of processed observation records with QC flags

# Examples
```julia
decoder = PrepBUFRDecoder("/path/to/bufr/tables")
observations = decode_prepbufr(decoder, "prepbufr.2024010100")

# Filter for temperature observations used in analysis
temp_obs = filter(obs -> obs.observation_type == :temperature && obs.analysis_usage, observations)
```
"""
function decode_prepbufr(decoder::PrepBUFRDecoder, filename::String)::Vector{ObservationRecord{Float64}}
    # Decode BUFR messages
    bufr_messages = decode_bufr(decoder.bufr_decoder, filename)
    
    observations = ObservationRecord{Float64}[]
    
    for message in bufr_messages
        # Process each subset in the message
        for subset_idx in 1:message.subset_count
            obs_records = process_prepbufr_subset(
                decoder, message, subset_idx
            )
            append!(observations, obs_records)
        end
    end
    
    return observations
end

"""
    process_prepbufr_subset(decoder::PrepBUFRDecoder, message::BUFRMessage, subset_idx::Int)

Process a single PrepBUFR subset and extract observation records.
"""
function process_prepbufr_subset(decoder::PrepBUFRDecoder, message::BUFRMessage, 
                                subset_idx::Int)::Vector{ObservationRecord{Float64}}
    observations = ObservationRecord{Float64}[]
    
    # Extract station information
    station_id = extract_station_id(message, subset_idx)
    latitude = extract_parameter(message, subset_idx, "LATITUDE") 
    longitude = extract_parameter(message, subset_idx, "LONGITUDE")
    elevation = extract_parameter(message, subset_idx, "ELEVATION")
    obs_time = extract_observation_time(message, subset_idx)
    
    # Process observation levels (surface, mandatory levels, significant levels)
    levels = extract_levels(message, subset_idx)
    
    for level in levels
        # Extract observation parameters at this level
        obs_data = extract_level_observations(message, subset_idx, level)
        
        for (param_name, param_data) in obs_data
            if ismissing(param_data.value)
                continue
            end
            
            # Convert to standardized observation record
            obs_record = create_observation_record(
                decoder,
                param_name,
                station_id,
                (latitude, longitude, level.pressure),
                obs_time,
                param_data
            )
            
            if obs_record !== nothing
                push!(observations, obs_record)
            end
        end
    end
    
    return observations
end

"""
    read_netcdf_observations(reader::NetCDFReader, filename::String) -> Vector{ObservationRecord}

Read observations from NetCDF format file.

# Arguments  
- `reader::NetCDFReader`: Configured NetCDF reader
- `filename::String`: Path to NetCDF observation file

# Returns
- `Vector{ObservationRecord}`: Standardized observation records

# Examples
```julia
reader = NetCDFReader(compression_support=true)
observations = read_netcdf_observations(reader, "satellite_obs.nc")
```
"""
function read_netcdf_observations(reader::NetCDFReader, filename::String)::Vector{ObservationRecord{Float64}}
    observations = ObservationRecord{Float64}[]
    
    Dataset(filename, "r") do ds
        # Extract dimensions
        n_obs = haskey(ds.dim, "obs") ? ds.dim["obs"] : ds.dim["observations"]
        
        # Read coordinate variables
        latitudes = haskey(ds, "latitude") ? ds["latitude"][:] : ds["lat"][:]
        longitudes = haskey(ds, "longitude") ? ds["longitude"][:] : ds["lon"][:]
        
        # Handle time coordinate
        times = extract_netcdf_times(ds)
        
        # Read observation values and metadata
        obs_values = ds["observation_value"][:]
        obs_errors = haskey(ds, "observation_error") ? ds["observation_error"][:] : 
                    fill(1.0, n_obs)  # Default error
        
        # Read quality flags
        quality_flags = haskey(ds, "quality_flag") ? ds["quality_flag"][:] : 
                       fill(0, n_obs)  # Default good quality
        
        # Read observation type
        obs_types = haskey(ds, "observation_type") ? ds["observation_type"][:] : 
                   fill("unknown", n_obs)
        
        # Create observation records
        for i in 1:n_obs
            if !ismissing(obs_values[i]) && !isnan(obs_values[i])
                obs_record = ObservationRecord{Float64}(
                    Symbol(obs_types[i]),                    # observation_type
                    "netcdf_$(i)",                          # platform_id  
                    (latitudes[i], longitudes[i], 0.0),     # location
                    times[i],                               # time
                    Float64(obs_values[i]),                 # value
                    Float64(obs_errors[i]),                 # observation_error
                    0.0,                                    # background_value (to be filled)
                    quality_flags[i] == 0,                  # analysis_usage
                    Dict(:qc_flag => quality_flags[i]),     # quality_flags
                    Dict{String,Any}()                      # metadata
                )
                
                push!(observations, obs_record)
            end
        end
    end
    
    return observations
end

"""
    validate_format(filename::String) -> ObservationFormat

Automatically detect observation file format.

# Arguments
- `filename::String`: Path to observation file

# Returns  
- `ObservationFormat`: Detected file format

# Examples
```julia
format = validate_format("observations.bufr")
@assert format == BUFR_FORMAT
```
"""
function validate_format(filename::String)::ObservationFormat
    if !isfile(filename)
        throw(ArgumentError("File does not exist: $filename"))
    end
    
    # Check file extension first
    ext = lowercase(splitext(filename)[2])
    if ext == ".nc"
        return NETCDF_FORMAT
    elseif ext == ".bufr" || ext == ".bfr"
        return BUFR_FORMAT
    elseif ext == ".h5" || ext == ".hdf5"
        return HDF5_FORMAT
    elseif ext in [".txt", ".ascii", ".dat"]
        return ASCII_FORMAT
    end
    
    # Check file content
    open(filename, "r") do file
        header = read(file, min(8, stat(filename).size))
        
        # Check for BUFR indicator
        if length(header) >= 4 && String(header[1:4]) == "BUFR"
            return BUFR_FORMAT
        end
        
        # Check for NetCDF magic number
        if length(header) >= 4 && (String(header[1:3]) == "CDF" || header[1] == 0x89)
            return NETCDF_FORMAT
        end
        
        # Check for HDF5 signature
        if length(header) >= 8 && header[1:4] == [0x89, 0x48, 0x44, 0x46]
            return HDF5_FORMAT
        end
    end
    
    # Default to ASCII if no other format detected
    return ASCII_FORMAT
end

"""
    get_observation_count(filename::String) -> Int

Get the total number of observations in a file without full decoding.

# Arguments
- `filename::String`: Path to observation file

# Returns
- `Int`: Total observation count

# Examples
```julia
count = get_observation_count("prepbufr.2024010100")
println("File contains $(count) observations")
```
"""
function get_observation_count(filename::String)::Int
    format = validate_format(filename)
    
    if format == BUFR_FORMAT
        return count_bufr_observations(filename)
    elseif format == NETCDF_FORMAT
        return count_netcdf_observations(filename)
    else
        throw(ArgumentError("Observation counting not implemented for format: $format"))
    end
end

# Helper functions for BUFR processing
function bytes_to_int(bytes::Vector{UInt8})::Int
    result = 0
    for byte in bytes
        result = (result << 8) | byte
    end
    return result
end

function parse_bufr_header(message_bytes::Vector{UInt8})::Dict{String,Any}
    header = Dict{String,Any}()
    
    header["indicator"] = String(message_bytes[1:4])
    header["length"] = bytes_to_int(message_bytes[5:7])
    header["edition"] = message_bytes[8]
    
    return header
end

function parse_bufr_section3(message_bytes::Vector{UInt8}, header::Dict{String,Any})::Vector{Int}
    # Simplified section 3 parsing - would need full implementation
    return [301001, 301002, 301003]  # Placeholder descriptors
end

function parse_bufr_section4(message_bytes::Vector{UInt8}, header::Dict{String,Any},
                             descriptors::Vector{Int}, tables::Dict{String,Any})::Tuple{Matrix{Union{Float64,Missing}}, Matrix{Union{String,Missing}}}
    # Simplified section 4 parsing - would need full implementation
    n_subsets = 1
    n_params = 10
    
    data_values = Matrix{Union{Float64,Missing}}(missing, n_subsets, n_params)
    string_values = Matrix{Union{String,Missing}}(missing, n_subsets, n_params)
    
    # Fill with placeholder data
    for i in 1:n_subsets, j in 1:n_params
        data_values[i, j] = randn() * 100  # Placeholder
    end
    
    return data_values, string_values
end

# Initialize mapping tables
function initialize_qc_mappings()::Dict{Int,Symbol}
    return Dict(
        0 => :good,
        1 => :neutral, 
        2 => :probably_bad,
        3 => :bad,
        4 => :changed,
        5 => :no_check,
        6 => :not_used,
        7 => :purged
    )
end

function initialize_observation_type_mappings()::Dict{Int,Symbol}
    return Dict(
        120 => :radiosonde,
        180 => :surface,
        220 => :aircraft,
        242 => :satwind,
        260 => :surface_marine
    )
end

function initialize_error_table()::Dict{Symbol,Float64}
    return Dict(
        :temperature => 1.0,     # K
        :pressure => 100.0,      # Pa
        :humidity => 0.1,        # kg/kg
        :wind_u => 2.0,         # m/s
        :wind_v => 2.0,         # m/s
        :surface_pressure => 50.0 # Pa
    )
end

function initialize_usage_flags()::Dict{Int,Bool}
    return Dict(
        1 => true,   # Use in analysis
        -1 => false, # Do not use
        0 => false   # Monitoring only
    )
end

# Default BUFR tables (minimal implementations)
function default_table_a()::Dict{Int,String}
    return Dict(
        0 => "Surface data",
        1 => "Sub-surface data", 
        2 => "Vertical soundings",
        3 => "Vertical soundings (satellite)",
        4 => "Single level upper-air data",
        5 => "Single level upper-air data (satellite)"
    )
end

function default_table_b()::Dict{Int,Dict{String,Any}}
    return Dict(
        1001 => Dict("name" => "WMO block number", "unit" => "CODE TABLE", "scale" => 0, "reference" => 0, "width" => 7),
        1002 => Dict("name" => "WMO station number", "unit" => "CODE TABLE", "scale" => 0, "reference" => 0, "width" => 10),
        5001 => Dict("name" => "Latitude", "unit" => "DEGREE", "scale" => 5, "reference" => -9000000, "width" => 25),
        6001 => Dict("name" => "Longitude", "unit" => "DEGREE", "scale" => 5, "reference" => -18000000, "width" => 26)
    )
end

function default_table_d()::Dict{Int,Vector{Int}}
    return Dict(
        301001 => [1001, 1002],      # Station identification
        301002 => [5001, 6001],      # Position
        301003 => [4001, 4002, 4003] # Date/time
    )
end

# Placeholder helper functions (would need full implementation)
function extract_station_id(message::BUFRMessage, subset_idx::Int)::String
    return "PLACEHOLDER_STATION"
end

function extract_parameter(message::BUFRMessage, subset_idx::Int, param_name::String)::Float64
    return 0.0  # Placeholder
end

function extract_observation_time(message::BUFRMessage, subset_idx::Int)::DateTime
    return now()  # Placeholder
end

function extract_levels(message::BUFRMessage, subset_idx::Int)::Vector{Any}
    return [Dict("pressure" => 1013.25)]  # Placeholder
end

function extract_level_observations(message::BUFRMessage, subset_idx::Int, level::Any)::Dict{String,Any}
    return Dict("temperature" => Dict("value" => 20.0, "qc_flag" => 0))  # Placeholder
end

function create_observation_record(decoder::PrepBUFRDecoder, param_name::String, 
                                 station_id::String, location::Tuple{Float64,Float64,Float64},
                                 obs_time::DateTime, param_data::Any)::Union{ObservationRecord{Float64},Nothing}
    # Placeholder implementation
    return ObservationRecord{Float64}(
        Symbol(param_name),
        station_id,
        location,
        obs_time,
        param_data["value"],
        1.0,  # observation_error
        0.0,  # background_value
        true, # analysis_usage
        Dict(:qc_flag => param_data["qc_flag"]),
        Dict{String,Any}()
    )
end

function extract_netcdf_times(ds::Dataset)::Vector{DateTime}
    if haskey(ds, "time")
        time_var = ds["time"]
        time_units = get(time_var.attrib, "units", "seconds since 1970-01-01")
        
        # Parse time units and convert to DateTime
        # Simplified implementation
        return [now() for _ in 1:length(time_var)]
    else
        # Return current time as placeholder
        n_obs = haskey(ds.dim, "obs") ? ds.dim["obs"] : ds.dim["observations"]
        return [now() for _ in 1:n_obs]
    end
end

function count_bufr_observations(filename::String)::Int
    count = 0
    open(filename, "r") do file
        file_size = stat(filename).size
        bytes_read = 0
        
        while bytes_read < file_size
            header_bytes = read(file, min(8, file_size - bytes_read))
            if length(header_bytes) < 8 || String(header_bytes[1:4]) != "BUFR"
                break
            end
            
            message_length = bytes_to_int(header_bytes[5:7])
            seek(file, bytes_read + message_length)
            bytes_read += message_length
            count += 1
        end
    end
    return count
end

function count_netcdf_observations(filename::String)::Int
    Dataset(filename, "r") do ds
        return haskey(ds.dim, "obs") ? ds.dim["obs"] : ds.dim["observations"]
    end
end

end # module DataFormats