"""
    test_types.jl

Test suite for core types and data structures in GSICoreAnalysis.jl.
"""

using Test
using LinearAlgebra
using GSICoreAnalysis

@testset "Core Types Tests" begin
    
    @testset "AnalysisConfig Construction and Properties" begin
        # Test default construction
        config_default = AnalysisConfig()
        @test config_default isa AnalysisConfig{Float64}
        @test config_default.precision == Float64
        @test config_default.grid_size == (360, 180, 64)  # Default
        
        # Test custom construction
        config_custom = AnalysisConfig{Float64}(
            grid_size = (48, 24, 16),
            ensemble_size = 25,
            hybrid_coeff = 0.75,
            use_hybrid = true,
            max_iterations = 100,
            convergence_tol = 1e-6
        )
        
        @test config_custom.grid_size == (48, 24, 16)
        @test config_custom.ensemble_size == 25
        @test config_custom.hybrid_coeff == 0.75
        @test config_custom.use_hybrid == true
        @test config_custom.max_iterations == 100
        @test config_custom.convergence_tol == 1e-6
        
        # Test Float32 precision
        config32 = AnalysisConfig{Float32}(grid_size = (32, 16, 8))
        @test config32 isa AnalysisConfig{Float32}
        @test config32.precision == Float32
        @test config32.hybrid_coeff isa Float32
        @test config32.convergence_tol isa Float32
    end
    
    @testset "GridDimensions" begin
        # Test construction from individual parameters
        grid1 = GridDimensions(10, 8, 5)
        @test grid1.nx == 10
        @test grid1.ny == 8
        @test grid1.nz == 5
        @test grid1.total == 400
        
        # Test construction from tuple
        grid2 = GridDimensions((12, 6, 4))
        @test grid2.nx == 12
        @test grid2.ny == 6
        @test grid2.nz == 4
        @test grid2.total == 288
    end
    
    @testset "AnalysisVariables" begin
        # Test default variables
        vars_default = default_analysis_variables()
        @test vars_default isa AnalysisVariables
        @test length(vars_default.variables) == 9  # u, v, t, q, ps, oz, cw, stl, sti
        @test vars_default.total_size > 0
        
        # Check specific variables
        var_names = [var.name for var in vars_default.variables]
        @test "u" in var_names
        @test "v" in var_names
        @test "t" in var_names
        @test "q" in var_names
        @test "ps" in var_names
        @test "oz" in var_names
        @test "cw" in var_names
        @test "stl" in var_names
        @test "sti" in var_names
        
        # Test custom variables
        custom_var_configs = [
            ("temp", 10, false),
            ("wind_u", 10, true),
            ("pressure", 1, false)
        ]
        vars_custom = AnalysisVariables(custom_var_configs)
        @test length(vars_custom.variables) == 3
        @test vars_custom.total_size == 21  # 10 + 10 + 1
        
        # Check variable properties
        @test vars_custom.variables[1].name == "temp"
        @test vars_custom.variables[1].levels == 10
        @test vars_custom.variables[1].has_bias == false
        @test vars_custom.variables[1].start_index == 1
        @test vars_custom.variables[1].end_index == 10
        
        @test vars_custom.variables[2].name == "wind_u"
        @test vars_custom.variables[2].has_bias == true
        @test vars_custom.variables[2].start_index == 11
        @test vars_custom.variables[2].end_index == 20
    end
    
    @testset "VariableInfo" begin
        var_info = VariableInfo("temperature", 64, 1, 64, false)
        @test var_info.name == "temperature"
        @test var_info.levels == 64
        @test var_info.start_index == 1
        @test var_info.end_index == 64
        @test var_info.has_bias == false
    end
    
    @testset "Abstract Types" begin
        # Test that abstract types are properly defined
        @test AbstractAnalysisConfig isa Type
        @test AbstractControlVector isa Type
        @test AbstractStateVector isa Type
        
        # Test type hierarchy
        config = AnalysisConfig{Float64}()
        @test config isa AbstractAnalysisConfig
    end

    @testset "AnalysisConfig EnKF Smoke" begin
        config_enkf = AnalysisConfig(
            grid_size = (10, 10, 5),
            ensemble_size = 20,
            analysis_method = "EnKF",
            model_type = "GFS",
            model_params = Dict(:grid_type => "gaussian"),
            params = (localization_radius = 150.0, optimizer = "lbfgs"),
            log_level = 1,
        )

        @test config_enkf.analysis_method == "EnKF"
        @test config_enkf.ensemble_size == 20
        @test config_enkf.model_params isa Dict{String,Any}
        @test config_enkf.model_params["grid_type"] == "gaussian"
        @test config_enkf.params isa Dict{String,Any}
        @test config_enkf.params["localization_radius"] == 150.0
        @test config_enkf.params["optimizer"] == "lbfgs"
        @test config_enkf.params["log_level"] == 1

        config_hybrid = AnalysisConfig(
            grid_size = (4, 4, 2),
            analysis_method = "Hybrid",
            model_params = (model_variant = "regional",),
            params = Dict(:hybrid_weight => 0.6, :generate_plots => true),
        )

        @test config_hybrid.analysis_method == "Hybrid"
        @test config_hybrid.model_params["model_variant"] == "regional"
        @test config_hybrid.params["hybrid_weight"] == 0.6
        @test config_hybrid.params["generate_plots"] == true
        @test all(isa(k, String) for k in keys(config_hybrid.params))
        @test all(isa(k, String) for k in keys(config_hybrid.model_params))
    end
end

println("Core types tests completed successfully.")

@testset "EnKF Driver Minimal Assimilation" begin
    nx, ny, nz = 2, 1, 1
    ensemble_members = Array{Float64,4}(undef, nx, ny, nz, 3)
    ensemble_members[:, 1, 1, 1] = [1.0, 0.0]
    ensemble_members[:, 1, 1, 2] = [0.5, -0.4]
    ensemble_members[:, 1, 1, 3] = [1.2, 0.3]

    ensemble_data = Dict("members" => ensemble_members)

    observations = Dict(
        "values" => [0.9, 0.1],
        "errors" => [0.2, 0.3],
        "state_indices" => [1, 2],
    )

    output_dir = mktempdir()
    try
        config = AnalysisConfig(
            grid_size = (nx, ny, nz),
            ensemble_size = 3,
            analysis_method = "EnKF",
            output_path = output_dir,
            params = Dict("variable_names" => ["state"]),
        )

        result = GSICoreAnalysis.run_enkf_analysis(ensemble_data, observations, config)

        @test haskey(result, "analysis_ensemble")
        @test size(result["analysis_ensemble"]) == (nx, ny, nz, 3)
        @test result["analysis_mean"] isa Array{Float64,3}
        @test length(result["innovation_vector"]) == 2
        @test length(result["innovation_fits"]) == 2
        @test all(isfinite, result["innovation_fits"])
        @test all(isfinite, result["ensemble_spread"])

        report_path = joinpath(output_dir, "enkf_diagnostic_report.txt")
        @test isfile(report_path)
    finally
        rm(output_dir; recursive = true, force = true)
    end
end

@testset "Hybrid Driver Smoke" begin
    nx, ny, nz = 2, 1, 1
    ensemble_members = Array{Float64,4}(undef, nx, ny, nz, 3)
    ensemble_members[:, 1, 1, 1] = [0.6, -0.1]
    ensemble_members[:, 1, 1, 2] = [0.9, 0.2]
    ensemble_members[:, 1, 1, 3] = [0.3, -0.4]

    ensemble_data = Dict("members" => ensemble_members)

    observations = Dict(
        "values" => [0.8, 0.05],
        "errors" => [0.1, 0.2],
        "state_indices" => [1, 2],
    )

    control_vector = Dict(
        "background_state" => [0.5, -0.2],
    )

    output_dir = mktempdir()
    try
        config = AnalysisConfig(
            grid_size = (nx, ny, nz),
            ensemble_size = 3,
            analysis_method = "Hybrid",
            output_path = output_dir,
            params = Dict(
                "variable_names" => ["state"],
                "static_covar_weight" => 0.4,
                "ensemble_covar_weight" => 0.6,
            ),
        )

        result = GSICoreAnalysis.run_hybrid_analysis(control_vector, ensemble_data, observations, config)

        @test haskey(result, "analysis_state")
        @test length(result["analysis_state"]) == 2
        @test haskey(result, "hybrid_covariance")
        @test haskey(result, "enkf_background")
        @test result["hybrid_weights"].static ≈ 0.4 / (0.4 + 0.6)
        @test result["hybrid_weights"].ensemble ≈ 0.6 / (0.4 + 0.6)

        B = result["hybrid_covariance"]["covariance"]
        H = result["observation_operator"]
        y = observations["values"]
        xb = result["background_state"]
        R = Diagonal(observations["errors"].^2)
        innovation = y - H * xb
        S = H * B * H' + R
        K = (B * H') * inv(Matrix(S))
        expected_analysis = xb + K * innovation

        @test isapprox(result["analysis_state"], expected_analysis; atol = 1e-6)
        @test all(isfinite, result["analysis_increment"])

        report_path = joinpath(output_dir, "enkf_diagnostic_report.txt")
        @test isfile(report_path)
    finally
        rm(output_dir; recursive = true, force = true)
    end
end

@testset "Hybrid 4DVar Path" begin
    nx, ny, nz = 2, 1, 1
    ensemble_members = Array{Float64,4}(undef, nx, ny, nz, 3)
    ensemble_members[:, 1, 1, 1] = [0.4, -0.1]
    ensemble_members[:, 1, 1, 2] = [0.6, 0.0]
    ensemble_members[:, 1, 1, 3] = [0.8, 0.2]

    ensemble_data = Dict("members" => ensemble_members)

    obs_values = [0.7, 0.05]
    obs_errors = [0.1, 0.15]
    observations = Dict(
        "values" => obs_values,
        "errors" => obs_errors,
        "state_indices" => [1, 2],
        "time_series" => Dict(0 => Dict("values" => obs_values, "errors" => obs_errors)),
    )

    control_vector = Dict(
        "background_state" => [0.5, -0.2],
        "observation_operators" => Dict(0 => (x -> x)),
    )

    output_dir = mktempdir()
    try
        config = AnalysisConfig(
            grid_size = (nx, ny, nz),
            ensemble_size = 3,
            analysis_method = "Hybrid",
            output_path = output_dir,
            params = Dict(
                "variable_names" => ["state"],
                "static_covar_weight" => 0.3,
                "ensemble_covar_weight" => 0.7,
                "variational_method" => "4DVar",
                "use_localization" => true,
                "localization_radius" => 250.0,
                "ensemble_size" => 3,
                "time_window" => 1,
                "max_outer_loops" => 1,
                "max_inner_loops" => 10,
            ),
        )

        result = GSICoreAnalysis.run_hybrid_analysis(control_vector, ensemble_data, observations, config)

        @test haskey(result, "analysis_state")
        @test length(result["analysis_state"]) == length(obs_values)
        @test haskey(result, "drp4dvar_statistics")
        @test haskey(result, "hybrid_covariance")
        @test size(result["hybrid_covariance"]["covariance"]) == (length(obs_values), length(obs_values))
        @test all(isfinite, result["analysis_increment"])
        @test result["hybrid_covariance"]["static_weight"] ≈ 0.3 / (0.3 + 0.7)
        @test result["hybrid_covariance"]["ensemble_weight"] ≈ 0.7 / (0.3 + 0.7)
        @test result["background_covariance"] == result["hybrid_covariance"]["covariance"]
        @test haskey(result, "enkf_background")
        @test haskey(result["drp4dvar_statistics"], "observation_errors")
    finally
        rm(output_dir; recursive = true, force = true)
    end
end

@testset "Implicit Background Localization" begin
    grid_cfg = (nx = 2, ny = 2, nsig = 1, dx = 10_000.0, dy = 10_000.0)
    bg_cfg = GSICoreAnalysis.BackgroundError.BackgroundErrorConfig()
    bg_interface = GSICoreAnalysis.FourDVar.GSIIntegration.create_gsi_background_error(grid_cfg, bg_cfg)

    state_length = 4 * grid_cfg.nx * grid_cfg.ny * grid_cfg.nsig + grid_cfg.nx * grid_cfg.ny
    background_state = zeros(Float64, state_length)

    projection = GSICoreAnalysis.FourDVar.ensemble_perturbations(
        background_state,
        bg_interface,
        3;
        localization_radius = 250.0,
        inflation = 1.0
    )

    @test size(projection.P_x, 1) == state_length
    @test size(projection.P_x, 2) == 3
    @test all(isfinite, projection.P_x)
    @test projection.explained_variance >= 0
end

@testset "EnKF Multi-variable Linear Operator" begin
    nx, ny, nz = 3, 1, 1
    n_members = 4
    ensemble_members = Array{Float64,4}(undef, nx, ny, nz, n_members)
    ensemble_members[:, 1, 1, 1] = [1.0, -0.5, 0.3]
    ensemble_members[:, 1, 1, 2] = [0.8, -0.6, 0.4]
    ensemble_members[:, 1, 1, 3] = [1.2, -0.4, 0.2]
    ensemble_members[:, 1, 1, 4] = [0.9, -0.3, 0.1]

    ensemble_data = Dict("members" => ensemble_members)

    H = [
        1.0  0.0  0.0;
        0.0  1.0  0.0;
        0.4  0.4 -0.1;
    ]

    observations = Dict(
        "values" => [1.05, -0.35, 0.18],
        "errors" => [0.2, 0.3, 0.25],
        "operator" => H,
    )

    output_dir = mktempdir()
    try
        config = AnalysisConfig(
            grid_size = (nx, ny, nz),
            ensemble_size = n_members,
            analysis_method = "EnKF",
            output_path = output_dir,
            params = Dict("variable_names" => ["state"]),
        )

        result = GSICoreAnalysis.run_enkf_analysis(ensemble_data, observations, config)

        @test size(result["analysis_ensemble"]) == (nx, ny, nz, n_members)
        @test size(result["kalman_gain"]) == (nx, 3)
        @test norm(result["innovation_fits"]) < norm(result["innovation_vector"])
        @test all(isfinite, result["kalman_gain"])
    finally
        rm(output_dir; recursive = true, force = true)
    end
end
