# Port scaffold for average_fp.f90 and related footprint handling

module Footprints

using NCDatasets
using Printf: @sprintf
using Dates
using Base.Filesystem: mkpath
using ..FLEXINVERT.DatesUtil: juldate, caldate
using ..ObservationsCore: ObservationRecord, Observations
using ..Settings: Files, Config
using ..CoreTypes: Domain

export FootprintData, read_footprint_ncdf, load_observation_footprints, average_footprints!

const FOOTPRINT_NUMSCALE = 1.0e12
const DEFAULT_SURFACE_LAYER_HEIGHT = 100.0

struct FootprintData
    grid::Array{Float64,3}      # (lon, lat, time) regional domain
    gtime::Vector{Float64}      # Julian dates per time slice
    lon::Vector{Float64}
    lat::Vector{Float64}
    global_grid::Union{Array{Float64,3},Nothing}
    global_lon::Vector{Float64}
    global_lat::Vector{Float64}
end

FootprintData(grid::Array{Float64,3}, gtime::Vector{Float64}, lon::Vector{Float64}, lat::Vector{Float64};
              global_grid::Union{Array{Float64,3},Nothing}=nothing,
              global_lon::Vector{Float64}=Float64[],
              global_lat::Vector{Float64}=Float64[]) =
    FootprintData(grid, gtime, lon, lat, global_grid, global_lon, global_lat)


function find_axis(dim_names::Vector{String}, candidates::Vector{String})
    for cand in candidates
        idx = findfirst(==(cand), dim_names)
        if idx !== nothing
            return idx
        end
    end
    return nothing
end

function build_nc_candidates(base::AbstractString, station::AbstractString, adate::AbstractString, areldate::AbstractString, areltime::AbstractString)
    isempty(base) && return String[]
    return String[
        joinpath(base, station, adate, "grid_time_$(areldate)$(areltime)_001.nc"),
        joinpath(base, station, "grid_time_$(areldate)$(areltime)_001.nc"),
        joinpath(base, station, adate, "grid_time_$(areldate)$(areltime).nc"),
        joinpath(base, station, "grid_time_$(areldate)$(areltime).nc"),
    ]
end

function load_first_footprint(paths::Vector{String}, scaling::Float64)
    for path in paths
        endswith(path, ".nc") || continue
        if isfile(path)
            try
                grid, gtime, lon, lat = read_footprint_ncdf(path)
                grid ./= scaling
                return (grid=grid, gtime=gtime, lon=lon, lat=lat)
            catch e
                @warn "Failed to read footprint file $path" exception=e
            end
        end
    end
    return nothing
end

function build_bin_candidates(base::AbstractString, station::AbstractString, adate::AbstractString, areldate::AbstractString, areltime::AbstractString; nested::Bool=false)
    isempty(base) && return String[]
    prefix = nested ? "grid_time_nest_" : "grid_time_"
    return String[
        joinpath(base, station, adate, "$(prefix)$(areldate)$(areltime)_001"),
        joinpath(base, station, "$(prefix)$(areldate)$(areltime)_001"),
    ]
end

"""
    read_footprint_binary(filename::String, domain::Domain) -> (grid, gtime, lon, lat)

Read a FLEXPART binary footprint file written by Fortran (unformatted sequential records)
and return the regional grid cube and per-slice Julian times.
"""
function read_footprint_binary(filename::String, domain::Domain)
    isfile(filename) || error("read_footprint_binary: cannot find file: $filename")

    nx = domain.global_nxgrid > 0 ? domain.global_nxgrid : domain.nxregrid
    ny = domain.global_nygrid > 0 ? domain.global_nygrid : domain.nyregrid
    nx == 0 && error("read_footprint_binary: unknown nx (domain not initialized)")
    ny == 0 && error("read_footprint_binary: unknown ny (domain not initialized)")
    xshift = domain.global_nxshift

    grid_slices = Vector{Array{Float64,2}}()
    times = Float64[]

    open(filename, "r") do io
        while !eof(io)
            # Fortran unformatted record helpers
            rec = try
                read_fortran_record(io)
            catch err
                break
            end
            length(rec) == 4 || error("Malformed record for jjjjmmdd in $filename")
            jjjjmmdd = reinterpret(Int32, rec)[1]

            rec = read_fortran_record(io)
            length(rec) == 4 || error("Malformed record for hhmiss in $filename")
            hhmiss = reinterpret(Int32, rec)[1]

            # Next: counts and sparse payloads
            sp_i_rec = read_fortran_record(io)
            n_sp_i = reinterpret(Int32, sp_i_rec)[1]
            sp_i_vals = reinterpret(Int32, read_fortran_record(io))
            @assert length(sp_i_vals) == n_sp_i "Index payload length mismatch"

            sp_r_rec = read_fortran_record(io)
            n_sp_r = reinterpret(Int32, sp_r_rec)[1]
            sp_r_vals = reinterpret(Float32, read_fortran_record(io))
            @assert length(sp_r_vals) == n_sp_r "Value payload length mismatch"

            # Decompress into grid (Fortran 1-based logic replicated)
            g2d = zeros(Float64, nx, ny)
            smallnum = 1e-38f0
            scaleconc = 1.0f12
            fact = 1.0f0
            ii = 0
            n = 0
            @inbounds for ir = 1:n_sp_r
                val = sp_r_vals[ir]
                if val * fact > smallnum
                    ii += 1
                    n = sp_i_vals[ii]
                    fact = -fact
                else
                    n += 1
                end
                jy = (n - nx*ny) ÷ nx
                ix = n - nx*ny - nx*jy
                if 1 <= ix+1 <= nx && 1 <= jy+1 <= ny
                    g2d[ix+1, jy+1] = abs(Float64(val)) * Float64(scaleconc)
                end
            end

            # cyclic shift along longitude
            if xshift != 0
                g2d = circshift(g2d, (-(xshift), 0))
            end

            # Compose time stamp from date components (YYYYMMDD, HHMMSS)
            y = Int(jjjjmmdd ÷ 10000)
            m = Int((jjjjmmdd ÷ 100) % 100)
            d = Int(jjjjmmdd % 100)
            H = Int(hhmiss ÷ 10000)
            Mi = Int((hhmiss ÷ 100) % 100)
            S = Int(hhmiss % 100)
            dt = try
                DateTime(y, m, d, H, Mi, S)
            catch
                DateTime(y, m, d, H)
            end
            push!(times, Dates.datetime2julian(dt))
            push!(grid_slices, g2d)
        end
    end

    ngrid = length(grid_slices)
    ngrid == 0 && error("read_footprint_binary: no slices decoded from $filename")
    grid = Array{Float64, 3}(undef, nx, ny, ngrid)
    for (k, sl) in enumerate(grid_slices)
        grid[:, :, k] = sl
    end

    # Ensure monotonic increasing time ordering
    perm = sortperm(times)
    grid = grid[:, :, perm]
    gtime = collect(times)[perm]

    lon = !isempty(domain.global_lon) ? domain.global_lon : range(domain.rllx, step=domain.rdx, length=nx) |> collect
    lat = !isempty(domain.global_lat) ? domain.global_lat : range(domain.rlly, step=domain.rdy, length=ny) |> collect

    return grid, gtime, lon, lat
end

function read_fortran_record(io::IO)
    # Try 4-byte marker first; fall back to 8-byte if needed
    pos = position(io)
    try
        hdr = read(io, UInt32)
        len_bytes = Int(hdr)
        if len_bytes <= 0 || len_bytes > 100_000_000
            throw(ArgumentError("implausible record length"))
        end
        data = read(io, Vector{UInt8}, len_bytes)
        trl = read(io, UInt32)
        @assert trl == hdr "Fortran record length mismatch ($hdr != $trl)"
        return data
    catch
        seek(io, pos)
        hdr = read(io, UInt64)
        len_bytes = Int(hdr)
        if len_bytes <= 0 || len_bytes > 100_000_000
            throw(ArgumentError("implausible record length (8-byte)"))
        end
        data = read(io, Vector{UInt8}, len_bytes)
        trl = read(io, UInt64)
        @assert trl == hdr "Fortran record length mismatch (8) ($hdr != $trl)"
        return data
    end
end

function reorder_to_lon_lat_time(grid_raw, dim_names::Vector{String}, lon_len::Int, lat_len::Int, time_len::Int)
    lon_idx = find_axis(dim_names, ["longitude", "lon", "x"])
    lat_idx = find_axis(dim_names, ["latitude", "lat", "y"])
    time_idx = find_axis(dim_names, ["time", "timesteps", "t"])
    if any(isnothing, (lon_idx, lat_idx, time_idx))
        # Fallback to size-based matching if names missing
        dims = size(grid_raw)
        lon_idx = lon_idx === nothing ? findfirst(==(lon_len), dims) : lon_idx
        lat_idx = lat_idx === nothing ? findlast(==(lat_len), dims) : lat_idx
        time_idx = time_idx === nothing ? findfirst(==(time_len), dims) : time_idx
    end
    if any(isnothing, (lon_idx, lat_idx, time_idx))
        error("Unable to determine footprint dimension order (dims=$(size(grid_raw)), names=$(dim_names))")
    end
    perm = (lon_idx, lat_idx, time_idx)
    return perm == (1, 2, 3) ? grid_raw : permutedims(grid_raw, perm)
end

"""
    read_footprint_ncdf(filename::String) -> (grid::Array{Float32,3}, gtime::Vector{Float64}, lon::Vector{Float64}, lat::Vector{Float64})

Read a footprint NetCDF file and return the grid data, time stamps, and coordinates.
Returns grid with dimensions (longitude, latitude, time) and corresponding coordinate vectors.
"""
function read_footprint_ncdf(filename::String)
    isfile(filename) || error("read_footprint_ncdf: cannot find file: $filename")

    NCDataset(filename) do ds
        # Identify coordinate variables
        lon_name = first(filter(n -> haskey(ds, n), ["longitude", "lon", "x"]))
        lat_name = first(filter(n -> haskey(ds, n), ["latitude", "lat", "y"]))
        lon = Float64.(vec(ds[lon_name][:]))
        lat = Float64.(vec(ds[lat_name][:]))

        time_var_name = first(filter(n -> haskey(ds, n), ["time", "timesteps", "t"]))
        raw_time = vec(ds[time_var_name][:])
        time_units = haskey(ds[time_var_name].attrib, "units") ? String(ds[time_var_name].attrib["units"]) : nothing
        gtime = convert_time_values(raw_time, time_units)

        if haskey(ds, "grid")
            var = ds["grid"]
            grid_raw = Array(var)
            dim_names = collect(String.(dimnames(var)))
            grid_aligned = reorder_to_lon_lat_time(grid_raw, dim_names, length(lon), length(lat), length(gtime))
            return Float64.(grid_aligned), gtime, lon, lat
        elseif haskey(ds, "spec001_mr")
            var = ds["spec001_mr"]
            # Dimensions ordered as (lon, lat, height, time, pointspec, nageclass)
            data = Float64.(var[:, :, :, :, 1, 1])
            data = sum(data, dims=3)  # sum over height
            data = dropdims(data, dims=3)
            dim_names = collect(String.(dimnames(var)))[1:3]  # assume first three correspond to lon/lat/time
            grid_aligned = reorder_to_lon_lat_time(data, dim_names, length(lon), length(lat), length(gtime))
            return grid_aligned, gtime, lon, lat
        else
            available = join(keys(ds), ", ")
            error("No recognized footprint variable in $filename. Available variables: $available")
        end
    end
end

function load_observation_footprints(files::Files, cfg::Config, domain::Domain, obs::Observations)
    n_obs = length(obs.jdates)
    footprints = Vector{Union{Nothing,FootprintData}}(undef, n_obs)
    layer_height = domain.surface_layer_height > 0 ? domain.surface_layer_height : DEFAULT_SURFACE_LAYER_HEIGHT
    # Fortran divides footprints by NUMSCALE and the surface layer height (simulate.f90:
    # grid = grid/outheight(1); grid = grid/numscale).  Apply the same composite scaling so
    # the observation operator matches the reference solver.
    scaling = FOOTPRINT_NUMSCALE * layer_height
    debug_enabled = get(ENV, "FLEXINVERT_DEBUG_OUTPUT", "0") in ("1", "true", "TRUE")
    stats_io = nothing
    stats_path = ""
    if debug_enabled && !isempty(files.path_output)
        stats_dir = joinpath(files.path_output, "footprint_debug")
        try
            mkpath(stats_dir)
            stats_path = joinpath(stats_dir, "footprint_stats.txt")
        catch err
            @warn "Failed to create footprint debug directory" stats_dir err
        end
    end

    for i in 1:n_obs
        station = strip(obs.stations[i])
        yyyymmdd, hhmmss = caldate(obs.jdates[i])
        yyyymm = yyyymmdd ÷ 100
        adate = @sprintf("%06d", yyyymm)
        areldate = @sprintf("%08d", yyyymmdd)
        areltime = @sprintf("%06d", hhmmss)

        # Prefer binary footprints when average_fp is false (matches Fortran analytic path)
        global_candidates = String[]
        nested_candidates = String[]
        if !cfg.average_fp
            global_candidates = build_bin_candidates(files.path_flexpart, station, adate, areldate, areltime; nested=false)
            if cfg.nested
                nested_candidates = build_bin_candidates(files.path_flexpart_nest, station, adate, areldate, areltime; nested=true)
            end
        end
        # Always keep NetCDF as fallback
        append!(global_candidates, build_nc_candidates(files.path_flexncdf, station, adate, areldate, areltime))
        if cfg.nested
            append!(nested_candidates, build_nc_candidates(files.path_flexncdf_nest, station, adate, areldate, areltime))
        end

        unique!(global_candidates)
        unique!(nested_candidates)

        # Try binary first (no scaling at read; we apply composite scaling below)
        global_data = nothing
        for path in global_candidates
            if endswith(path, ".nc")
                continue
            elseif isfile(path)
                try
                    grid, gtime, lon, lat = read_footprint_binary(path, domain)
                    # Apply composite scaling to match Fortran simulate.f90
                    grid ./= scaling
                    global_data = (grid=grid, gtime=gtime, lon=lon, lat=lat)
                    break
                catch e
                    @warn "Failed to read binary footprint" path exception=e
                end
            end
        end
        if global_data === nothing
            global_data = load_first_footprint(global_candidates, scaling)
        end

        nested_data = nothing
        if cfg.nested
            for path in nested_candidates
                if endswith(path, ".nc")
                    continue
                elseif isfile(path)
                    try
                        grid, gtime, lon, lat = read_footprint_binary(path, domain)
                        grid ./= scaling
                        nested_data = (grid=grid, gtime=gtime, lon=lon, lat=lat)
                        break
                    catch e
                        @warn "Failed to read binary nested footprint" path exception=e
                    end
                end
            end
            if nested_data === nothing
                nested_data = load_first_footprint(nested_candidates, scaling)
            end
        end

        chosen = nested_data !== nothing ? nested_data : global_data

        if chosen === nothing
            search_dirs = String[]
            push!(search_dirs, joinpath(files.path_flexpart, station, adate))
            push!(search_dirs, joinpath(files.path_flexncdf, station, adate))
            push!(search_dirs, joinpath(files.path_flexncdf, station))
            if cfg.nested
                push!(search_dirs, joinpath(files.path_flexpart_nest, station, adate))
                push!(search_dirs, joinpath(files.path_flexncdf_nest, station, adate))
                push!(search_dirs, joinpath(files.path_flexncdf_nest, station))
            end
            search_dirs = filter(isdir, search_dirs)
            found = find_alternative_footprint(search_dirs)
            if found !== nothing && endswith(found, ".nc")
                try
                    grid, gtime, lon, lat = read_footprint_ncdf(found)
                    grid ./= scaling
                    chosen = (grid=grid, gtime=gtime, lon=lon, lat=lat)
                    if cfg.nested && occursin("nest", lowercase(found))
                        nested_data = chosen
                    else
                        global_data = chosen
                    end
                catch e
                    @warn "Failed to read fallback footprint $found" exception=e
                end
            end
        end

        if chosen === nothing
            @warn "No footprint available for observation $i ($station $areldate $areltime)"
            footprints[i] = nothing
            continue
        end

        grid = chosen.grid
        gtime = chosen.gtime
        lon = chosen.lon
        lat = chosen.lat

        global_grid = global_data === nothing ? nothing : global_data.grid
        global_lon = global_data === nothing ? Float64[] : global_data.lon
        global_lat = global_data === nothing ? Float64[] : global_data.lat

        footprints[i] = FootprintData(grid, gtime, lon, lat;
                                      global_grid=global_grid,
                                      global_lon=global_lon,
                                      global_lat=global_lat)

        if debug_enabled && !isempty(stats_path)
            scaled_min = minimum(grid)
            scaled_max = maximum(grid)
            scaled_sum = sum(grid)
            raw_min = scaled_min * scaling
            raw_max = scaled_max * scaling
            raw_sum = scaled_sum * scaling
            obs_time = i <= length(obs.jdates) ? obs.jdates[i] : NaN
            try
                open(stats_path, "a") do io
                    println(io, "obs=$(i) station=$(station) time=$(obs_time)")
                    println(io, @sprintf("  scaled grid: min=%.6e max=%.6e sum=%.6e", scaled_min, scaled_max, scaled_sum))
                    println(io, @sprintf("  raw grid (~*%.3e): min=%.6e max=%.6e sum=%.6e", scaling, raw_min, raw_max, raw_sum))
                    if global_grid !== nothing
                        g_scaled_min = minimum(global_grid)
                        g_scaled_max = maximum(global_grid)
                        g_scaled_sum = sum(global_grid)
                        println(io, @sprintf("  global scaled: min=%.6e max=%.6e sum=%.6e", g_scaled_min, g_scaled_max, g_scaled_sum))
                    end
                end
            catch err
                @warn "Failed to append footprint stats" stats_path err
            end
        end
    end

    return footprints
end

function convert_time_values(raw_time::AbstractVector, units::Union{String, Nothing})
    if isempty(raw_time)
        return Float64[]
    end

    if eltype(raw_time) <: Dates.AbstractDateTime
        return Float64.(Dates.datetime2julian.(raw_time))
    end

    numeric_time = raw_time isa AbstractVector{<:Real} ? Float64.(raw_time) : Float64.(collect(raw_time))

    # Case 1: Units provided in a standard "<unit> since <origin>" format
    if units !== nothing
        lower_units = lowercase(strip(units))
        if occursin("since", lower_units)
            parts = split(lower_units, "since")
            unit_part = strip(parts[0])
            origin_str = strip(parts[1])
            origin_dt = parse_time_origin(origin_str)
            base_julian = Dates.datetime2julian(origin_dt)
            factor = unit_to_day_multiplier(unit_part)
            converted = base_julian .+ numeric_time .* factor
            return interpret_calendar_times(converted)
        end
    end

    # Case 2: No usable units; attempt to decode calendar-like integers
    decoded = similar(numeric_time)
    ok = falses(length(numeric_time))
    for i in eachindex(numeric_time)
        val, changed = decode_calendar_like_value(numeric_time[i])
        decoded[i] = val
        ok[i] = changed
    end

    if any(ok)
        # If some entries decoded and others did not, synthesize a regular hourly timeline
        # around the nearest decoded anchor so downstream time-index lookups behave.
        if !all(ok)
            # Choose the last decoded index as anchor (typically the most recent slice)
            anchor = findlast(ok)
            anchor === nothing && (anchor = findfirst(ok))
            anchor === nothing && return decoded  # should not happen
            step = 1.0 / 24.0  # one hour in days
            # Back-fill
            for i in reverse(eachindex(decoded))[1:anchor-1]
                decoded[i] = decoded[anchor] - step * (anchor - i)
            end
            # Forward-fill
            for i in anchor+1:length(decoded)
                decoded[i] = decoded[anchor] + step * (i - anchor)
            end
        end
        return decoded
    end

    # Case 3: Nothing decodable; fall back to a minimal monotonic sequence with hourly spacing
    base = 0.0
    step = 1.0 / 24.0
    for i in eachindex(numeric_time)
        decoded[i] = base + step * (i - 1)
    end
    return decoded
end

function interpret_calendar_times(values::Vector{Float64})
    decoded = similar(values)
    changed = false
    for (i, val) in pairs(values)
        new_val, did_change = decode_calendar_like_value(val)
        decoded[i] = new_val
        changed |= did_change
    end
    return changed ? decoded : values
end

function decode_calendar_like_value(val::Float64)
    if !isfinite(val)
        return val, false
    end

    rounded = Int(round(val))
    absval = abs(rounded)

    # Skip small magnitudes that clearly are not encoded calendar stamps
    if absval < 1_000_000
        return val, false
    end

    s = string(absval)
    year = month = day = hour = minute = second = 0
    parsed = false
    try
        if length(s) == 10  # YYYYMMDDHH
            year = parse(Int, s[1:4])
            month = parse(Int, s[5:6])
            day = parse(Int, s[7:8])
            hour = parse(Int, s[9:10])
            parsed = true
        elseif length(s) == 8  # YYYYMMDD
            year = parse(Int, s[1:4])
            month = parse(Int, s[5:6])
            day = parse(Int, s[7:8])
            parsed = true
        elseif length(s) == 12  # YYYYMMDDHHMM
            year = parse(Int, s[1:4])
            month = parse(Int, s[5:6])
            day = parse(Int, s[7:8])
            hour = parse(Int, s[9:10])
            minute = parse(Int, s[11:12])
            parsed = true
        end
    catch
        parsed = false
    end

    if !parsed
        return val, false
    end

    # Basic range checks; if invalid, keep original value
    if !(1 <= month <= 12 && 1 <= day <= 31 && 0 <= hour <= 23 && 0 <= minute <= 59)
        return val, false
    end

    dt = try
        DateTime(year, month, day, hour, minute, second)
    catch
        return val, false
    end

    return Dates.datetime2julian(dt), true
end

function find_alternative_footprint(directories::Vector{String})
    for dir in directories
        if isdir(dir)
            files = filter(f -> endswith(f, ".nc"), readdir(dir; join=true))
            if !isempty(files)
                sort!(files)
                return files[1]
            end
        end
    end
    return nothing
end

function parse_time_origin(origin_str::AbstractString)
    cleaned = replace(String(origin_str), "T" => " ")
    formats = (
        dateformat"yyyy-mm-dd HH:MM:SS",
        dateformat"yyyy-mm-dd HH:MM",
        dateformat"yyyy-mm-dd"
    )

    for fmt in formats
        try
            return DateTime(cleaned, fmt)
        catch
        end
    end

    error("Unable to parse time origin '$origin_str'")
end

function unit_to_day_multiplier(unit_str::AbstractString)
    str = lowercase(String(unit_str))
    if startswith(str, "day")
        return 1.0
    elseif startswith(str, "hour")
        return 1.0 / 24.0
    elseif startswith(str, "minute")
        return 1.0 / 1440.0
    elseif startswith(str, "second")
        return 1.0 / 86400.0
    else
        @warn "Unrecognized time unit '$unit_str' for footprint; assuming seconds"
        return 1.0 / 86400.0
    end
end

"""
    find_release_window(obs_jdate::Float64, obs_avetime::Float64, releases::Matrix{Float64}) -> (start_idx::Int, end_idx::Int)

Find the range of releases that fall within the observation averaging window.
Returns indices into the releases array.
"""
function find_release_window(obs_jdate::Float64, obs_avetime::Float64, releases::Matrix{Float64})
    numpoint = size(releases, 1)

    # Find first release that starts at or after observation time
    start_idx = 1
    while start_idx <= numpoint && releases[start_idx, 1] < obs_jdate
        start_idx += 1
    end

    # Find last release that ends before observation + averaging time
    end_idx = start_idx
    obs_end_time = obs_jdate + obs_avetime
    while end_idx <= numpoint && releases[end_idx, 2] <= obs_end_time
        end_idx += 1
    end
    end_idx = min(end_idx - 1, numpoint)

    return start_idx, end_idx
end

"""
    average_footprints!(obs::Observations, cfg, files)

Average footprints over multiple releases to match observation time windows.
This is the main function that replicates the Fortran average_fp.f90 functionality.

For each observation, this function:
1. Finds all footprint files within the observation averaging window
2. Reads and sums the footprint grids
3. Averages by the number of footprints
4. Saves the averaged footprint as NetCDF file

The averaged footprints are saved in files.path_flexncdf with naming convention:
`{station}/{YYYYMM}/grid_time_{YYYYMMDDHHMMSS}_001.nc`
"""
function average_footprints!(obs::Observations, cfg, files)
    if !cfg.average_fp
        @info "average_fp disabled in configuration, skipping footprint averaging"
        return
    end

    @info "Starting footprint averaging for $(length(obs.jdates)) observations"

    # Storage for release information (similar to Fortran's releases array)
    releases = zeros(Float64, 1000, 2)  # max 1000 releases, [start_time, end_time]
    current_station = ""
    current_month = 0
    numpoint = 0

    for i in 1:length(obs.jdates)
        @info "Processing observation $i: $(obs.stations[i]) at $(obs.jdates[i])"

        # Extract date components
        yyyymmdd, hhmmss = caldate(obs.jdates[i])
        month = yyyymmdd ÷ 100  # YYYYMM

        # Build paths
        adate = @sprintf("%06d", month)
        path_flexrec = joinpath(files.path_flexpart, obs.stations[i], adate)
        header_file = joinpath(path_flexrec, "header")

        # Check if averaged footprint already exists
        areldate = @sprintf("%08d", yyyymmdd)
        areltime = @sprintf("%06d", hhmmss)
        output_dir = joinpath(files.path_flexncdf, obs.stations[i], adate)
        output_file = joinpath(output_dir, "grid_time_$(areldate)$(areltime)_001.nc")

        if isfile(output_file)
            @info "Averaged footprint already exists: $output_file"
            continue
        end

        # Read header if needed (new station or month)
        if obs.stations[i] != current_station || month != current_month
            @info "Reading header: $header_file"
            if !isfile(header_file)
                @warn "Cannot find header file: $header_file"
                continue
            end

            try
                releases, numpoint = read_flexpart_header(header_file)
                current_station = obs.stations[i]
                current_month = month
            catch e
                @warn "Failed to read header $header_file: $e"
                continue
            end
        end

        # Find releases within observation window
        start_idx, end_idx = find_release_window(obs.jdates[i], obs.avetime[i], releases)

        if start_idx > end_idx
            @warn "No releases found for observation $i"
            continue
        end

        @info "Found $(end_idx - start_idx + 1) releases for averaging"

        # Initialize averaging variables
        grid_sum = nothing
        gtime_ref = nothing
        lon_ref = nothing
        lat_ref = nothing
        nr_footprints = 0

        # Process each release in the window
        for rel_idx in start_idx:end_idx
            rel_jdate = releases[rel_idx, 1]
            rel_yyyymmdd, rel_hhmmss = caldate(rel_jdate)
            rel_hhmmss = (rel_hhmmss ÷ 100) * 100  # Round seconds

            rel_month = rel_yyyymmdd ÷ 100
            rel_adate = @sprintf("%06d", rel_month)
            rel_areldate = @sprintf("%08d", rel_yyyymmdd)
            rel_areltime = @sprintf("%06d", rel_hhmmss)

            rel_path_flexrec = joinpath(files.path_flexpart, obs.stations[i], rel_adate)
            footprint_file = joinpath(rel_path_flexrec, "grid_time_$(rel_areldate)$(rel_areltime)_001")

            # Try to read NetCDF file first (preferred), then binary
            ncdf_file = footprint_file * ".nc"
            if isfile(ncdf_file)
                try
                    grid, gtime, lon, lat = read_footprint_ncdf(ncdf_file)
                    @info "Read NetCDF footprint: $ncdf_file"

                    if grid_sum === nothing
                        # Initialize on first successful read
                        grid_sum = copy(grid)
                        gtime_ref = copy(gtime)
                        lon_ref = copy(lon)
                        lat_ref = copy(lat)
                        nr_footprints = 1
                    else
                        # Add to sum, matching time indices
                        for (new_t, new_time) in enumerate(gtime)
                            for (ref_t, ref_time) in enumerate(gtime_ref)
                                if abs(new_time - ref_time) < 1e-6  # Match times
                                    grid_sum[:, :, ref_t] .+= grid[:, :, new_t]
                                    break
                                end
                            end
                        end
                        nr_footprints += 1
                    end
                catch e
                    @warn "Failed to read $ncdf_file: $e"
                end
            elseif isfile(footprint_file)
                @warn "Binary footprint reading not yet implemented: $footprint_file"
            else
                @warn "Cannot find footprint file: $footprint_file"
            end
        end

        if grid_sum === nothing || nr_footprints == 0
            @warn "No footprints could be read for observation $i"
            continue
        end

        # Compute average
        grid_avg = grid_sum ./ Float32(nr_footprints)
        @info "Averaged $nr_footprints footprints"

        # Create output directory
        mkpath(output_dir)

        # Save averaged footprint
        save_averaged_footprint(output_file, grid_avg, gtime_ref, lon_ref, lat_ref)
        @info "Saved averaged footprint: $output_file"
    end

    @info "Footprint averaging completed"
end

"""
    read_flexpart_header(filename::String) -> (releases::Matrix{Float64}, numpoint::Int)

Read FLEXPART header file to extract release information.
Returns matrix of [start_time, end_time] in Julian days and number of release points.
"""
function read_flexpart_header(filename::String)
    # This is a simplified version - full implementation would need to handle
    # the complete Fortran binary format. For now, we'll create a mock implementation
    # that demonstrates the interface.

    @warn "read_flexpart_header: Using mock implementation - binary header reading not fully implemented"

    # Mock data - in reality this would parse the binary header file
    numpoint = 24  # 24 hourly releases
    releases = zeros(Float64, 1000, 2)

    base_date = juldate(20200312, 50000)  # Example base date
    for i in 1:numpoint
        releases[i, 1] = base_date + (i-1)/24.0  # start time
        releases[i, 2] = base_date + i/24.0      # end time
    end

    return releases, numpoint
end

"""
    save_averaged_footprint(filename::String, grid::Array{Float32,3}, gtime::Vector{Float64},
                           lon::Vector{Float64}, lat::Vector{Float64})

Save averaged footprint data to NetCDF file with the same format as input footprints.
"""
function save_averaged_footprint(filename::String, grid::Array{Float32,3},
                                 gtime::Vector{Float64}, lon::Vector{Float64}, lat::Vector{Float64})

    # Grid dimensions: (longitude, latitude, time)
    nlon, nlat, ntime = size(grid)

    NCDataset(filename, "c") do ds
        # Define dimensions
        defDim(ds, "longitude", nlon)
        defDim(ds, "latitude", nlat)
        defDim(ds, "time", ntime)

        # Define coordinate variables
        lon_var = defVar(ds, "longitude", Float32, ("longitude",))
        lat_var = defVar(ds, "latitude", Float32, ("latitude",))
        time_var = defVar(ds, "time", Float64, ("time",))

        # Define data variable - save as (time, latitude, longitude) to match input format
        grid_var = defVar(ds, "grid", Float32, ("time", "latitude", "longitude"))

        # Add attributes
        lon_var.attrib["title"] = "longitude"
        lat_var.attrib["title"] = "latitude"
        time_var.attrib["title"] = "time"
        grid_var.attrib["title"] = "grid"
        grid_var.attrib["unit"] = "sm3/kg"

        # Write coordinate data
        lon_var[:] = lon
        lat_var[:] = lat
        time_var[:] = gtime

        # Write grid data - need to transpose back to (time, lat, lon)
        grid_transposed = permutedims(grid, (3, 2, 1))
        grid_var[:, :, :] = grid_transposed
    end
end

end # module
