#!/usr/bin/env julia
"""
Simple test to verify cost function and gradient computation work correctly.
"""

using GSICoreAnalysis
using LinearAlgebra
using Printf
using Random

Random.seed!(42)

println("="^80)
println("Simple Cost Function Test")
println("="^80)

# Create small configuration
config = AnalysisConfig(
    grid_size = (5, 5, 3),
    ensemble_size = 0,
    use_hybrid = false,
    max_iterations = 10,
    convergence_tol = 1e-6
)

println("\n1. Creating state vector...")
state_vector = GSICoreAnalysis.StateVectors.StateVector(config)
state_array = GSICoreAnalysis.CostFunctions.state_vector_to_array(state_vector)
n_state = length(state_array)
println(@sprintf("   State size: %d", n_state))

# Create observations
n_obs = 20
obs_indices = GSICoreAnalysis.CostFunctions.create_observation_indices(n_state, n_obs, :uniform)
println(@sprintf("   Number of observations: %d", n_obs))
println(@sprintf("   Observation indices: %s", join(obs_indices[1:min(5, n_obs)], ", ")))

# Simple setup
background = randn(n_state)
observations = randn(n_obs)
obs_errors = ones(n_obs)

println("\n2. Creating cost function...")
cost_func = GSICoreAnalysis.CostFunctions.CostFunction(config)
cost_func.background_state = background
cost_func.observations = observations
cost_func.observation_errors = obs_errors
cost_func.observation_indices = obs_indices

# Create observation operator
H_forward, H_adjoint = GSICoreAnalysis.CostFunctions.create_sampling_operator(obs_indices, n_state)
cost_func.observation_operator = H_forward
cost_func.observation_operator_adjoint = H_adjoint

# Setup covariances
bg_variances = ones(n_state)
obs_variances = obs_errors.^2
GSICoreAnalysis.CostFunctions.setup_diagonal_covariances!(cost_func, bg_variances, obs_variances)

println("\n3. Evaluating cost at background...")
J_bg = GSICoreAnalysis.CostFunctions.evaluate_cost(cost_func, background)
println(@sprintf("   Cost: %.6e", J_bg))

println("\n4. Computing gradient...")
grad = GSICoreAnalysis.CostFunctions.compute_gradient(cost_func, background)
println(@sprintf("   Gradient norm: %.6e", norm(grad)))
println(@sprintf("   Gradient size: %d", length(grad)))

println("\n5. Testing gradient descent step...")
alpha = 0.01
new_state = background - alpha .* grad
J_new = GSICoreAnalysis.CostFunctions.evaluate_cost(cost_func, new_state)
println(@sprintf("   New cost: %.6e", J_new))
println(@sprintf("   Cost change: %.6e", J_new - J_bg))

if J_new < J_bg
    println("   ✓ Cost decreased in gradient direction")
else
    println("   ✗ WARNING: Cost did not decrease!")
end

println("\n6. Testing background and observation terms separately...")
J_b = GSICoreAnalysis.CostFunctions.evaluate_background_term(cost_func, background)
J_o = GSICoreAnalysis.CostFunctions.evaluate_observation_term(cost_func, background)
println(@sprintf("   Background term: %.6e", J_b))
println(@sprintf("   Observation term: %.6e", J_o))
println(@sprintf("   Sum: %.6e", J_b + J_o))
println(@sprintf("   Total cost: %.6e", J_bg))
println(@sprintf("   Difference: %.6e", abs((J_b + J_o) - J_bg)))

if abs((J_b + J_o) - J_bg) < 1e-10
    println("   ✓ Cost decomposition is correct")
else
    println("   ✗ WARNING: Cost decomposition has errors!")
end

println("\n" * "="^80)
println("Simple test completed!")
println("="^80)
