"""
    test_controlvectors.jl

Test suite for the ControlVectors module of GSICoreAnalysis.jl.
This module tests all control vector operations including allocation,
vector operations, I/O, and numerical properties.
"""

using Test
using GSICoreAnalysis
using GSICoreAnalysis.ControlVectors
using LinearAlgebra
using Random

# Set reproducible seed
Random.seed!(123)

@testset "ControlVectors Module Tests" begin
    
    @testset "ControlVector Construction" begin
        # Test basic construction
        config = AnalysisConfig{Float64}(grid_size = (8, 4, 2))
        cv = ControlVector(config)
        
        @test cv isa ControlVector{Float64}
        @test cv.config === config
        @test cv.is_allocated == true
        @test length(cv.values) > 0
        
        # Test precision specification
        config32 = AnalysisConfig{Float32}(grid_size = (4, 4, 2))
        cv32 = ControlVector{Float32}(config32)
        @test cv32 isa ControlVector{Float32}
        @test eltype(cv32.values) == Float32
        
        # Test different grid sizes
        small_config = AnalysisConfig{Float64}(grid_size = (2, 2, 1))
        large_config = AnalysisConfig{Float64}(grid_size = (16, 8, 4))
        
        cv_small = ControlVector(small_config)
        cv_large = ControlVector(large_config)
        
        @test length(cv_small.values) < length(cv_large.values)
        @test cv_small.is_allocated == true
        @test cv_large.is_allocated == true
    end
    
    @testset "Memory Management" begin
        config = AnalysisConfig{Float64}(grid_size = (6, 6, 3))
        cv = ControlVector(config)
        
        # Test initial allocation
        @test cv.is_allocated == true
        original_length = length(cv.values)
        @test original_length > 0
        
        # Test deallocation
        deallocate_cv(cv)
        @test cv.is_allocated == false
        @test length(cv.values) == 0
        @test length(cv.bias_predictors) == 0
        @test length(cv.ensemble_control) == 0
        @test length(cv.time_derivatives) == 0
        
        # Test re-allocation
        allocate_cv(cv)
        @test cv.is_allocated == true
        @test length(cv.values) == original_length
        
        # Test double allocation (should be safe)
        allocate_cv(cv)
        @test cv.is_allocated == true
        @test length(cv.values) == original_length
        
        # Test double deallocation (should be safe)
        deallocate_cv(cv)
        deallocate_cv(cv)
        @test cv.is_allocated == false
    end
    
    @testset "Vector Operations" begin
        config = AnalysisConfig{Float64}(
            grid_size = (4, 4, 2),
            ensemble_size = 10,
            use_hybrid = true
        )
        
        cv1 = ControlVector(config)
        cv2 = ControlVector(config)
        cv3 = ControlVector(config)
        
        # Fill with test data
        randn!(cv1.values)
        randn!(cv1.bias_predictors)
        randn!(cv1.ensemble_control)
        randn!(cv1.time_derivatives)
        
        randn!(cv2.values)
        randn!(cv2.bias_predictors)
        randn!(cv2.ensemble_control)
        randn!(cv2.time_derivatives)
        
        # Test dot product
        @testset "Dot Product" begin
            dot1 = dot_product(cv1, cv2)
            dot2 = dot_product(cv2, cv1)  # Should be symmetric
            
            @test dot1 isa Float64
            @test dot1 ≈ dot2 atol=1e-12  # Symmetry
            
            # Test with self
            self_dot = dot_product(cv1, cv1)
            @test self_dot >= 0.0  # Positive semi-definite
            @test self_dot ≈ norm_cv(cv1)^2 atol=1e-10
            
            # Test linearity: (a*x, y) = a*(x, y)
            α = 2.5
            assign!(cv3, cv1)
            for i in eachindex(cv3.values)
                cv3.values[i] *= α
            end
            for i in eachindex(cv3.bias_predictors)
                cv3.bias_predictors[i] *= α
            end
            for i in eachindex(cv3.ensemble_control)
                cv3.ensemble_control[i] *= α
            end
            for i in eachindex(cv3.time_derivatives)
                cv3.time_derivatives[i] *= α
            end
            
            dot_scaled = dot_product(cv3, cv2)
            dot_expected = α * dot_product(cv1, cv2)
            @test dot_scaled ≈ dot_expected atol=1e-10
        end
        
        # Test AXPY operation: y = y + α*x
        @testset "AXPY Operation" begin
            # Store original values
            cv2_orig = ControlVector(config)
            assign!(cv2_orig, cv2)
            
            α = 1.5
            axpy!(α, cv1, cv2)
            
            # Verify each component
            expected_values = cv2_orig.values + α * cv1.values
            @test cv2.values ≈ expected_values atol=1e-12
            
            expected_bias = cv2_orig.bias_predictors + α * cv1.bias_predictors
            @test cv2.bias_predictors ≈ expected_bias atol=1e-12
            
            if !isempty(cv1.ensemble_control)
                expected_ens = cv2_orig.ensemble_control + α * cv1.ensemble_control
                @test cv2.ensemble_control ≈ expected_ens atol=1e-12
            end
            
            expected_time = cv2_orig.time_derivatives + α * cv1.time_derivatives
            @test cv2.time_derivatives ≈ expected_time atol=1e-12
            
            # Test with α = 0 (should be no-op)
            cv_before = ControlVector(config)
            assign!(cv_before, cv2)
            axpy!(0.0, cv1, cv2)
            @test cv2.values ≈ cv_before.values atol=1e-15
        end
        
        # Test assignment operation
        @testset "Assignment Operation" begin
            assign!(cv3, cv1)
            
            @test cv3.values ≈ cv1.values atol=1e-15
            @test cv3.bias_predictors ≈ cv1.bias_predictors atol=1e-15
            @test cv3.ensemble_control ≈ cv1.ensemble_control atol=1e-15
            @test cv3.time_derivatives ≈ cv1.time_derivatives atol=1e-15
            
            # Modify cv1 and ensure cv3 is independent
            original_cv3_vals = copy(cv3.values)
            cv1.values .+= 1.0
            @test cv3.values ≈ original_cv3_vals atol=1e-15
        end
    end
    
    @testset "Random Control Vector" begin
        config = AnalysisConfig{Float64}(grid_size = (6, 4, 2))
        cv = ControlVector(config)
        
        # Fill with zeros first
        fill!(cv.values, 0.0)
        fill!(cv.bias_predictors, 0.0)
        fill!(cv.time_derivatives, 0.0)
        
        @test all(cv.values .== 0.0)
        @test all(cv.bias_predictors .== 0.0)
        @test all(cv.time_derivatives .== 0.0)
        
        # Generate random values
        random_cv!(cv)
        
        @test !all(cv.values .== 0.0)
        @test !all(cv.bias_predictors .== 0.0)
        @test !all(cv.time_derivatives .== 0.0)
        
        # Test statistics of random values (should be approximately Gaussian)
        @test abs(mean(cv.values)) < 0.5  # Mean near zero
        @test 0.5 < std(cv.values) < 2.0  # Standard deviation near 1
    end
    
    @testset "Norms and Statistics" begin
        config = AnalysisConfig{Float64}(grid_size = (4, 4, 2))
        cv = ControlVector(config)
        
        # Test with known values
        fill!(cv.values, 2.0)
        fill!(cv.bias_predictors, 1.0)
        fill!(cv.ensemble_control, 0.5) 
        fill!(cv.time_derivatives, 0.0)
        
        # Test norm
        norm_val = norm_cv(cv)
        expected_norm = sqrt(
            length(cv.values) * 4.0 +          # 2.0^2 * n_values
            length(cv.bias_predictors) * 1.0 +  # 1.0^2 * n_bias
            length(cv.ensemble_control) * 0.25 + # 0.5^2 * n_ens
            length(cv.time_derivatives) * 0.0    # 0.0^2 * n_time
        )
        @test norm_val ≈ expected_norm atol=1e-10
        
        # Test maximum absolute value
        max_val = maxval_cv(cv)
        @test max_val == 2.0  # Maximum of [2.0, 1.0, 0.5, 0.0]
        
        # Test with negative values
        cv.values[1] = -3.0
        max_val_neg = maxval_cv(cv)
        @test max_val_neg == 3.0  # abs(-3.0)
    end
    
    @testset "Ensemble Control Vectors" begin
        # Test with hybrid configuration
        config_hybrid = AnalysisConfig{Float64}(
            grid_size = (4, 4, 2),
            ensemble_size = 20,
            use_hybrid = true
        )
        
        cv_hybrid = ControlVector(config_hybrid)
        @test length(cv_hybrid.ensemble_control) == 20
        
        # Test without hybrid
        config_no_hybrid = AnalysisConfig{Float64}(
            grid_size = (4, 4, 2),
            ensemble_size = 0,
            use_hybrid = false
        )
        
        cv_no_hybrid = ControlVector(config_no_hybrid)
        @test length(cv_no_hybrid.ensemble_control) == 0
        
        # Test operations with mixed configurations
        randn!(cv_hybrid.values)
        randn!(cv_hybrid.bias_predictors)
        randn!(cv_hybrid.ensemble_control)
        randn!(cv_hybrid.time_derivatives)
        
        randn!(cv_no_hybrid.values)
        randn!(cv_no_hybrid.bias_predictors)
        randn!(cv_no_hybrid.time_derivatives)
        
        # Dot product should handle different ensemble sizes
        # Note: This is a design decision - we'll only compare overlapping components
        @test_nowarn dot_product(cv_hybrid, cv_no_hybrid)
    end
    
    @testset "File I/O" begin
        config = AnalysisConfig{Float64}(grid_size = (3, 3, 2))
        cv_orig = ControlVector(config)
        
        # Fill with distinctive values
        cv_orig.values .= 1.0:length(cv_orig.values)
        cv_orig.bias_predictors .= 100.0:100.0+length(cv_orig.bias_predictors)-1
        cv_orig.time_derivatives .= -10.0:-1.0:-10.0-length(cv_orig.time_derivatives)+1
        
        # Write to file
        test_file = tempname()
        write_cv(cv_orig, test_file)
        
        @test isfile(test_file)
        
        # Read back
        cv_read = ControlVector(config)
        read_cv(cv_read, test_file)
        
        # Compare (note: read_cv implementation is simplified, so this tests basic functionality)
        @test cv_read.is_allocated == true
        
        # Clean up
        rm(test_file)
    end
    
    @testset "Error Handling" begin
        config = AnalysisConfig{Float64}(grid_size = (4, 4, 2))
        cv1 = ControlVector(config)
        cv2 = ControlVector(config)
        
        # Test operations on unallocated vectors
        deallocate_cv(cv1)
        deallocate_cv(cv2)
        
        @test_throws AssertionError dot_product(cv1, cv2)
        @test_throws AssertionError axpy!(1.0, cv1, cv2)
        @test_throws AssertionError assign!(cv1, cv2)
        @test_throws AssertionError norm_cv(cv1)
        @test_throws AssertionError maxval_cv(cv1)
        @test_throws AssertionError random_cv!(cv1)
        
        # Test I/O on unallocated vector
        @test_throws AssertionError write_cv(cv1, "dummy.txt")
        @test_throws AssertionError read_cv(cv1, "nonexistent.txt")
    end
    
    @testset "Numerical Stability" begin
        config = AnalysisConfig{Float64}(grid_size = (4, 4, 2))
        
        # Test with very small numbers
        cv_small = ControlVector(config)
        fill!(cv_small.values, 1e-15)
        fill!(cv_small.bias_predictors, 1e-15)
        fill!(cv_small.time_derivatives, 1e-15)
        
        norm_small = norm_cv(cv_small)
        @test norm_small >= 0.0
        @test isfinite(norm_small)
        
        # Test with very large numbers
        cv_large = ControlVector(config)
        fill!(cv_large.values, 1e10)
        fill!(cv_large.bias_predictors, 1e10)
        fill!(cv_large.time_derivatives, 1e10)
        
        norm_large = norm_cv(cv_large)
        @test isfinite(norm_large)
        
        # Test dot product doesn't overflow
        dot_val = dot_product(cv_large, cv_large)
        @test isfinite(dot_val)
        @test dot_val >= 0.0
    end
    
    @testset "Performance Considerations" begin
        # Test that operations are reasonably efficient
        config = AnalysisConfig{Float64}(grid_size = (20, 20, 10))
        cv1 = ControlVector(config)
        cv2 = ControlVector(config)
        
        randn!(cv1.values)
        randn!(cv1.bias_predictors)
        randn!(cv1.time_derivatives)
        
        randn!(cv2.values)
        randn!(cv2.bias_predictors)
        randn!(cv2.time_derivatives)
        
        # These operations should complete quickly
        @test (@elapsed dot_product(cv1, cv2)) < 1.0  # Less than 1 second
        @test (@elapsed axpy!(1.5, cv1, cv2)) < 1.0
        @test (@elapsed assign!(cv2, cv1)) < 1.0
        @test (@elapsed norm_cv(cv1)) < 1.0
    end
end

println("ControlVectors module tests completed successfully.")