# Test suite for MALA implementation
using Test
using LinearAlgebra
using Random
using Statistics

# Add the src directory to the load path for testing
push!(LOAD_PATH, joinpath(@__DIR__, "..", "src"))

using MCMCTypes
using Posterior
using MALA

"""
Simple test posterior for validating MALA implementation.
"""
struct TestPosterior{T<:AbstractFloat}
    μ::Vector{T}           # Mean
    Σ_inv::Matrix{T}       # Inverse covariance
    log_det_Σ::T           # Log determinant of covariance

    function TestPosterior{T}(μ::Vector{T}, Σ::Matrix{T}) where T
        Σ_inv = inv(Σ)
        log_det_Σ = logdet(Σ)
        new{T}(μ, Σ_inv, log_det_Σ)
    end
end

TestPosterior(μ, Σ) = TestPosterior{Float64}(μ, Σ)

function evaluate_test_posterior!(posterior::TestPosterior, state::MCMCState)
    diff = state.x_chi - posterior.μ
    state.log_posterior = -0.5 * (dot(diff, posterior.Σ_inv * diff) +
                                 posterior.log_det_Σ + length(diff) * log(2π))
    state.log_likelihood = state.log_posterior
    state.log_prior = 0.0
    return state.log_posterior
end

function compute_test_gradient!(posterior::TestPosterior, state::MCMCState)
    # ∇ log p(x) = -Σ^{-1} (x - μ)
    diff = state.x_chi - posterior.μ
    state.gradient .= -posterior.Σ_inv * diff
    return state.gradient
end

@testset "MALA Implementation Tests" begin

    @testset "MALA Options and State" begin
        # Test MALAOptions construction
        options = MALAOptions(
            step_size = 0.05,
            target_acceptance = 0.6,
            adapt_step_size = true
        )

        @test options.step_size == 0.05
        @test options.target_acceptance == 0.6
        @test options.adapt_step_size == true
        @test options.fallback_to_random_walk == true

        # Test MALAState construction
        state = MALAState(options)
        @test state.current_step_size == 0.05
        @test state.total_proposals == 0
        @test state.total_accepted == 0
    end

    @testset "MALA Proposal Generation" begin
        Random.seed!(42)

        # Set up simple 2D Gaussian test case
        n_dim = 2
        μ = [1.0, -0.5]
        Σ = [2.0 0.3; 0.3 1.0]
        posterior = TestPosterior(μ, Σ)

        # Create MALA proposal
        options = MALAOptions(step_size = 0.1, adapt_step_size = false)
        mala = MALAProposal(options)

        # Initialize states
        current_state = MCMCState(n_dim)
        proposal_state = MCMCState(n_dim)

        # Set initial state
        current_state.x_chi .= [0.0, 0.0]
        evaluate_test_posterior!(posterior, current_state)
        compute_test_gradient!(posterior, current_state)

        # Test proposal generation
        rng = MersenneTwister(123)

        # Create mock evaluator (we'll use our test functions directly)
        forward_model = x -> x  # Identity for testing
        gradient_func = x -> -posterior.Σ_inv * (x - posterior.μ)

        success = MALA.propose_mala!(
            proposal_state, current_state, nothing, mala, rng
        )

        # Verify proposal was generated (manual evaluation needed)
        @test success || !options.fallback_to_random_walk
        @test length(proposal_state.x_chi) == n_dim
    end

    @testset "MALA Proposal Density" begin
        # Test proposal density computation
        x_curr = [0.0, 0.0]
        x_prop = [0.1, -0.05]
        grad_curr = [1.0, -0.5]
        h = 0.1

        log_q = MALA.mala_proposal_density(x_prop, x_curr, grad_curr, h)

        # Verify it's a real number
        @test isfinite(log_q)

        # Test symmetry properties for zero gradient
        grad_zero = [0.0, 0.0]
        log_q1 = MALA.mala_proposal_density(x_prop, x_curr, grad_zero, h)
        log_q2 = MALA.mala_proposal_density(x_curr, x_prop, grad_zero, h)

        @test abs(log_q1 - log_q2) < 1e-12  # Should be symmetric when gradient is zero
    end

    @testset "Step Size Adaptation" begin
        # Test step size adaptation
        options = MALAOptions(
            step_size = 0.1,
            target_acceptance = 0.6,
            adapt_step_size = true,
            adaptation_window = 10
        )
        mala = MALAProposal(options)

        # Simulate high acceptance rate (should increase step size)
        mala.state.n_proposals_window = 10
        mala.state.n_accepted_window = 9  # 90% acceptance

        old_step_size = mala.state.current_step_size
        MALA.adapt_step_size!(mala)

        @test mala.state.current_step_size > old_step_size
        @test mala.state.n_proposals_window == 0  # Should reset
        @test mala.state.n_accepted_window == 0

        # Simulate low acceptance rate (should decrease step size)
        mala.state.n_proposals_window = 10
        mala.state.n_accepted_window = 1  # 10% acceptance

        old_step_size = mala.state.current_step_size
        MALA.adapt_step_size!(mala)

        @test mala.state.current_step_size < old_step_size
    end

    @testset "Block Structure Creation" begin
        # Test block creation
        n_params = 15
        block_size = 4
        blocks = MALA.create_mala_blocks(n_params, block_size)

        @test length(blocks) == 4  # ceil(15/4) = 4 blocks
        @test blocks[1] == [1, 2, 3, 4]
        @test blocks[2] == [5, 6, 7, 8]
        @test blocks[3] == [9, 10, 11, 12]
        @test blocks[4] == [13, 14, 15]  # Last block may be smaller

        # Verify all parameters are covered exactly once
        all_indices = vcat(blocks...)
        @test sort(all_indices) == collect(1:n_params)
    end

    @testset "Numerical Stability" begin
        # Test gradient clipping
        options = MALAOptions(
            gradient_clip_threshold = 10.0,
            step_size = 0.1
        )

        # Create state with large gradient
        state = MCMCState(2)
        state.gradient .= [100.0, -200.0]  # Large gradient

        grad_norm_before = norm(state.gradient)
        @test grad_norm_before > options.gradient_clip_threshold

        # Test the clipping logic manually
        if grad_norm_before > options.gradient_clip_threshold
            state.gradient .*= options.gradient_clip_threshold / grad_norm_before
        end

        grad_norm_after = norm(state.gradient)
        @test abs(grad_norm_after - options.gradient_clip_threshold) < 1e-12
    end

    @testset "MALA Diagnostics" begin
        options = MALAOptions(step_size = 0.05)
        mala = MALAProposal(options)

        # Simulate some proposals and acceptances
        mala.state.total_proposals = 100
        mala.state.total_accepted = 57
        mala.state.n_clipped_gradients = 5
        mala.state.n_fallback_steps = 3

        diagnostics = MALA.mala_diagnostics(mala)

        @test diagnostics.total_acceptance_rate == 0.57
        @test diagnostics.current_step_size == 0.05
        @test diagnostics.clipped_gradients == 5
        @test diagnostics.fallback_steps == 3
    end

    @testset "State Copying" begin
        # Test state copying utility
        state1 = MCMCState(3)
        state1.x_chi .= [1.0, 2.0, 3.0]
        state1.x_phys .= [1.1, 2.1, 3.1]
        state1.log_posterior = -5.0
        state1.log_likelihood = -3.0
        state1.log_prior = -2.0
        state1.gradient .= [0.1, 0.2, 0.3]

        state2 = MCMCState(3)
        MALA.copy_state!(state2, state1)

        @test state2.x_chi == state1.x_chi
        @test state2.x_phys == state1.x_phys
        @test state2.log_posterior == state1.log_posterior
        @test state2.log_likelihood == state1.log_likelihood
        @test state2.log_prior == state1.log_prior
        @test state2.gradient == state1.gradient
    end

    @testset "Integration with MCMCTypes" begin
        # Test that MALA integrates properly with existing MCMC infrastructure
        @test MALA ∈ instances(ProposalType)

        # Test that MALA can be specified in MCMCConfiguration
        config = MCMCConfiguration(
            proposal_type = MALA,
            mala_step_size = 0.02,
            adapt_step_size = true
        )

        @test config.proposal_type == MALA
        @test config.mala_step_size == 0.02
        @test config.adapt_step_size == true
    end

end

# Performance benchmark (optional - can be commented out for faster testing)
@testset "MALA Performance Benchmark" begin
    @testset "High-Dimensional Gaussian" begin
        Random.seed!(12345)

        # Large-scale test
        n_dim = 1000
        μ = randn(n_dim)

        # Create a well-conditioned covariance matrix
        A = randn(n_dim, n_dim)
        Σ = A' * A + I
        Σ ./= tr(Σ) / n_dim  # Normalize trace

        posterior = TestPosterior(μ, Σ)

        # Create MALA with reasonable settings for high dimensions
        options = MALAOptions(
            step_size = 0.01,
            adapt_step_size = true,
            target_acceptance = 0.4,  # Lower target for high dimensions
            gradient_clip_threshold = 50.0
        )
        mala = MALAProposal(options)

        # Initialize state
        state = MCMCState(n_dim)
        state.x_chi .= randn(n_dim) * 0.1  # Start near origin

        # Time a few proposal generations
        n_proposals = 10
        elapsed_time = @elapsed begin
            rng = MersenneTwister(999)
            for i in 1:n_proposals
                proposal_state = MCMCState(n_dim)
                evaluate_test_posterior!(posterior, state)
                compute_test_gradient!(posterior, state)

                # This is a simplified test since we don't have full evaluator
                # In practice, the gradient computation would be the bottleneck
                success = norm(state.gradient) < options.gradient_clip_threshold
                @test success
            end
        end

        avg_time_per_proposal = elapsed_time / n_proposals
        @test avg_time_per_proposal < 0.1  # Should be reasonably fast

        @info "MALA Performance" n_dim avg_time_per_proposal
    end
end