# Metropolis-adjusted Langevin algorithm (MALA) for FLEXINVERT Bayesian inversion
# Implements gradient-based MCMC proposals with detailed balance

module MALA

using LinearAlgebra
using Random
using Statistics
using ..MCMCTypes
using ..Posterior: LogPosteriorEvaluator, evaluate_log_posterior!, compute_gradient!

export MALAProposal, MALAOptions, MALAState, MALABlockState
export propose_mala!, mala_proposal_density, mala_step!, adapt_step_size!
export create_mala_blocks, mala_block_step!, propose_mala_block!

"""
    MALAOptions

Configuration options for MALA proposals.
"""
struct MALAOptions{T<:AbstractFloat}
    # Step size parameters
    step_size::T                        # Base step size h
    min_step_size::T                    # Minimum allowed step size
    max_step_size::T                    # Maximum allowed step size

    # Adaptation parameters
    adapt_step_size::Bool               # Whether to adapt step size
    target_acceptance::T                # Target acceptance rate (0.574 for MALA)
    adaptation_window::Int              # Window size for adaptation
    adaptation_rate::T                  # Adaptation learning rate

    # Numerical stability
    gradient_clip_threshold::T          # Clip gradients above this norm
    min_gradient_norm::T                # Minimum gradient norm (for stability)
    max_proposal_norm::T                # Maximum proposal step norm

    # Block-wise options
    block_structure::Vector{Vector{Int}} # Block definitions for parameters
    block_step_sizes::Vector{T}         # Per-block step sizes

    # Robust handling
    fallback_to_random_walk::Bool       # Fallback when gradient fails
    random_walk_scale::T                # Scale for random walk fallback

    function MALAOptions{T}(;
        step_size::T = T(0.01),
        min_step_size::T = T(1e-6),
        max_step_size::T = T(1.0),
        adapt_step_size::Bool = true,
        target_acceptance::T = T(0.574),  # Optimal for MALA
        adaptation_window::Int = 50,
        adaptation_rate::T = T(0.1),
        gradient_clip_threshold::T = T(100.0),
        min_gradient_norm::T = T(1e-12),
        max_proposal_norm::T = T(10.0),
        block_structure::Vector{Vector{Int}} = Vector{Int}[],
        block_step_sizes::Vector{T} = T[],
        fallback_to_random_walk::Bool = true,
        random_walk_scale::T = T(0.1)
    ) where T
        new{T}(step_size, min_step_size, max_step_size, adapt_step_size,
               target_acceptance, adaptation_window, adaptation_rate,
               gradient_clip_threshold, min_gradient_norm, max_proposal_norm,
               block_structure, block_step_sizes, fallback_to_random_walk,
               random_walk_scale)
    end
end

MALAOptions(args...; kwargs...) = MALAOptions{Float64}(args...; kwargs...)

"""
    MALABlockState{T}

State tracking for individual blocks in block-wise MALA.
"""
mutable struct MALABlockState{T<:AbstractFloat}
    step_size::T
    n_proposals::Int
    n_accepted::Int

    MALABlockState{T}(step_size::T) where T = new{T}(step_size, 0, 0)
end

"""
    MALAState{T}

State tracking for MALA proposals including adaptation statistics.
"""
mutable struct MALAState{T<:AbstractFloat}
    # Current step size (may be adapted)
    current_step_size::T

    # Adaptation tracking
    n_proposals_window::Int             # Proposals in current adaptation window
    n_accepted_window::Int              # Acceptances in current adaptation window
    adaptation_iteration::Int           # Current adaptation iteration

    # Statistics
    total_proposals::Int                # Total MALA proposals made
    total_accepted::Int                 # Total MALA proposals accepted
    gradient_failures::Int             # Number of gradient computation failures

    # Numerical stability tracking
    n_clipped_gradients::Int            # Number of times gradients were clipped
    n_fallback_steps::Int               # Number of fallback random walk steps

    # Block-wise state (if using blocks)
    block_states::Vector{MALABlockState{T}}

    function MALAState{T}(options::MALAOptions{T}) where T
        block_states = [MALABlockState{T}(options.block_step_sizes[i])
                       for i in 1:length(options.block_structure)]
        new{T}(options.step_size, 0, 0, 0, 0, 0, 0, 0, 0, block_states)
    end
end

MALAState(options::MALAOptions) = MALAState{Float64}(options)

"""
    MALAProposal{T}

Main MALA proposal structure combining options and state.
"""
struct MALAProposal{T<:AbstractFloat}
    options::MALAOptions{T}
    state::MALAState{T}

    function MALAProposal{T}(options::MALAOptions{T}) where T
        state = MALAState{T}(options)
        new{T}(options, state)
    end
end

MALAProposal(options::MALAOptions) = MALAProposal{Float64}(options)

"""
    propose_mala!(proposal_state::MCMCState, current_state::MCMCState,
                  evaluator::LogPosteriorEvaluator, mala::MALAProposal, rng::AbstractRNG)

Generate MALA proposal step.

The MALA proposal is:
x' = x + (h²/2)∇log π(x) + h ξ

where h is the step size, ∇log π(x) is the gradient of log-posterior,
and ξ ~ N(0, I) is standard Gaussian noise.
"""
function propose_mala!(
    proposal_state::MCMCState{T},
    current_state::MCMCState{T},
    evaluator::LogPosteriorEvaluator,
    mala::MALAProposal{T},
    rng::AbstractRNG
) where T

    # Compute gradient at current state
    gradient_success = compute_gradient_safe!(current_state, evaluator, mala.options)

    if !gradient_success && mala.options.fallback_to_random_walk
        # Fallback to random walk proposal
        return propose_random_walk_fallback!(proposal_state, current_state, mala, rng)
    elseif !gradient_success
        # If no fallback, return rejection (copy current state)
        copy_state!(proposal_state, current_state)
        return false
    end

    # Get current step size
    h = mala.state.current_step_size

    # Apply gradient clipping for numerical stability
    grad_norm = norm(current_state.gradient)
    if grad_norm > mala.options.gradient_clip_threshold
        current_state.gradient .*= mala.options.gradient_clip_threshold / grad_norm
        mala.state.n_clipped_gradients += 1
    end

    # Compute drift term: (h²/2)∇log π(x)
    drift = (h^2 / 2) * current_state.gradient

    # Generate noise term: h ξ where ξ ~ N(0, I)
    noise = h * randn(rng, T, length(current_state.x_chi))

    # Check proposal step size for stability
    proposal_step = drift + noise
    step_norm = norm(proposal_step)
    if step_norm > mala.options.max_proposal_norm
        proposal_step .*= mala.options.max_proposal_norm / step_norm
    end

    # Generate proposal: x' = x + drift + noise
    proposal_state.x_chi .= current_state.x_chi + proposal_step

    # Evaluate log-posterior at proposal
    evaluate_log_posterior!(evaluator, proposal_state)

    return true
end

"""
    compute_gradient_safe!(state::MCMCState, evaluator::LogPosteriorEvaluator,
                          options::MALAOptions)

Safely compute gradient with error handling.
"""
function compute_gradient_safe!(
    state::MCMCState{T},
    evaluator::LogPosteriorEvaluator,
    options::MALAOptions{T}
) where T
    try
        compute_gradient!(evaluator, state)

        # Check for numerical issues
        if any(isnan, state.gradient) || any(isinf, state.gradient)
            return false
        end

        # Check if gradient norm is too small (near stationary point)
        if norm(state.gradient) < options.min_gradient_norm
            return false
        end

        return true
    catch e
        @debug "Gradient computation failed: $e"
        return false
    end
end

"""
    propose_random_walk_fallback!(proposal_state::MCMCState, current_state::MCMCState,
                                  mala::MALAProposal, rng::AbstractRNG)

Fallback random walk proposal when gradient computation fails.
"""
function propose_random_walk_fallback!(
    proposal_state::MCMCState{T},
    current_state::MCMCState{T},
    mala::MALAProposal{T},
    rng::AbstractRNG
) where T
    # Random walk proposal: x' = x + σ ξ
    scale = mala.options.random_walk_scale
    proposal_state.x_chi .= current_state.x_chi + scale * randn(rng, T, length(current_state.x_chi))

    # Copy other state information
    proposal_state.log_posterior = current_state.log_posterior
    proposal_state.log_likelihood = current_state.log_likelihood
    proposal_state.log_prior = current_state.log_prior
    proposal_state.gradient .= current_state.gradient

    mala.state.n_fallback_steps += 1
    return true
end

"""
    mala_proposal_density(x_prop::Vector{T}, x_curr::Vector{T}, grad_curr::Vector{T},
                          h::T) where T

Compute MALA proposal density q(x_prop | x_curr) for Metropolis correction.

For MALA: q(x' | x) = N(x' | x + (h²/2)∇log π(x), h² I)
"""
function mala_proposal_density(
    x_prop::Vector{T},
    x_curr::Vector{T},
    grad_curr::Vector{T},
    h::T
) where T
    # Mean of proposal distribution: μ = x + (h²/2)∇log π(x)
    mean_prop = x_curr + (h^2 / 2) * grad_curr

    # Residual: x' - μ
    residual = x_prop - mean_prop

    # Log-density: log N(x' | μ, h² I) = -0.5 * ||x' - μ||² / h² - 0.5 * n * log(2π h²)
    n = length(x_prop)
    quadratic_form = sum(abs2, residual) / h^2
    log_normalization = n * log(2π * h^2)

    return -0.5 * (quadratic_form + log_normalization)
end

"""
    mala_step!(current_state::MCMCState, evaluator::LogPosteriorEvaluator,
               mala::MALAProposal, rng::AbstractRNG)

Complete MALA step with Metropolis acceptance.
"""
function mala_step!(
    current_state::MCMCState{T},
    evaluator::LogPosteriorEvaluator,
    mala::MALAProposal{T},
    rng::AbstractRNG
) where T
    # Create proposal state
    proposal_state = MCMCState{T}(length(current_state.x_chi))

    # Generate proposal
    proposal_success = propose_mala!(proposal_state, current_state, evaluator, mala, rng)

    if !proposal_success
        # Proposal generation failed
        update_acceptance!(current_state, false)
        mala.state.total_proposals += 1
        mala.state.n_proposals_window += 1
        return false
    end

    # Compute acceptance probability using detailed balance
    accept_prob = compute_mala_acceptance_probability(
        current_state, proposal_state, evaluator, mala
    )

    # Metropolis acceptance test
    accepted = rand(rng) < accept_prob

    if accepted
        # Accept proposal
        copy_state!(current_state, proposal_state)
        update_acceptance!(current_state, true)
        mala.state.total_accepted += 1
        mala.state.n_accepted_window += 1
    else
        # Reject proposal
        update_acceptance!(current_state, false)
    end

    # Update statistics
    mala.state.total_proposals += 1
    mala.state.n_proposals_window += 1

    # Adapt step size if enabled
    if mala.options.adapt_step_size &&
       mala.state.n_proposals_window >= mala.options.adaptation_window
        adapt_step_size!(mala)
    end

    return accepted
end

"""
    compute_mala_acceptance_probability(current_state::MCMCState, proposal_state::MCMCState,
                                       evaluator::LogPosteriorEvaluator, mala::MALAProposal)

Compute MALA acceptance probability with detailed balance.

α(x, x') = min(1, π(x') q(x | x') / (π(x) q(x' | x)))
"""
function compute_mala_acceptance_probability(
    current_state::MCMCState{T},
    proposal_state::MCMCState{T},
    evaluator::LogPosteriorEvaluator,
    mala::MALAProposal{T}
) where T
    # Compute gradient at proposal state for reverse proposal density
    grad_success = compute_gradient_safe!(proposal_state, evaluator, mala.options)

    if !grad_success
        # If gradient computation fails at proposal, use simplified acceptance
        # This reduces to standard Metropolis acceptance
        log_alpha = proposal_state.log_posterior - current_state.log_posterior
        return min(one(T), exp(log_alpha))
    end

    h = mala.state.current_step_size

    # Forward proposal density: q(x' | x)
    log_q_forward = mala_proposal_density(
        proposal_state.x_chi, current_state.x_chi, current_state.gradient, h
    )

    # Reverse proposal density: q(x | x')
    log_q_reverse = mala_proposal_density(
        current_state.x_chi, proposal_state.x_chi, proposal_state.gradient, h
    )

    # Log acceptance probability
    log_alpha = (proposal_state.log_posterior + log_q_reverse) -
                (current_state.log_posterior + log_q_forward)

    # Return acceptance probability
    return min(one(T), exp(log_alpha))
end

"""
    adapt_step_size!(mala::MALAProposal)

Adapt MALA step size based on acceptance rate in current window.
"""
function adapt_step_size!(mala::MALAProposal{T}) where T
    # Compute acceptance rate in current window
    if mala.state.n_proposals_window == 0
        return
    end

    acceptance_rate = mala.state.n_accepted_window / mala.state.n_proposals_window
    target_rate = mala.options.target_acceptance

    # Adaptive step size using Robbins-Monro-like update
    # If acceptance rate is too high, increase step size
    # If acceptance rate is too low, decrease step size
    log_h = log(mala.state.current_step_size)
    adaptation_rate = mala.options.adaptation_rate / sqrt(mala.state.adaptation_iteration + 1)

    log_h += adaptation_rate * (acceptance_rate - target_rate)

    # Apply bounds
    new_step_size = exp(log_h)
    new_step_size = clamp(new_step_size,
                         mala.options.min_step_size,
                         mala.options.max_step_size)

    mala.state.current_step_size = new_step_size

    # Reset window counters
    mala.state.n_proposals_window = 0
    mala.state.n_accepted_window = 0
    mala.state.adaptation_iteration += 1

    @debug "MALA step size adapted" new_step_size acceptance_rate target_rate
end

"""
    create_mala_blocks(n_params::Int, block_size::Int) -> Vector{Vector{Int}}

Create default block structure for block-wise MALA.
"""
function create_mala_blocks(n_params::Int, block_size::Int)
    blocks = Vector{Vector{Int}}()

    for i in 1:block_size:n_params
        block_end = min(i + block_size - 1, n_params)
        push!(blocks, collect(i:block_end))
    end

    return blocks
end

"""
    mala_block_step!(current_state::MCMCState, evaluator::LogPosteriorEvaluator,
                     mala::MALAProposal, block_idx::Int, rng::AbstractRNG)

Perform MALA step on a specific block of parameters.
"""
function mala_block_step!(
    current_state::MCMCState{T},
    evaluator::LogPosteriorEvaluator,
    mala::MALAProposal{T},
    block_idx::Int,
    rng::AbstractRNG
) where T

    if block_idx > length(mala.options.block_structure)
        error("Block index $block_idx exceeds number of blocks $(length(mala.options.block_structure))")
    end

    block_indices = mala.options.block_structure[block_idx]
    block_state = mala.state.block_states[block_idx]

    # Create proposal state (copy current)
    proposal_state = MCMCState{T}(length(current_state.x_chi))
    copy_state!(proposal_state, current_state)

    # Generate block proposal
    accepted = propose_mala_block!(
        proposal_state, current_state, evaluator, mala,
        block_indices, block_state, rng
    )

    if accepted
        copy_state!(current_state, proposal_state)
        block_state.n_accepted += 1
    end

    block_state.n_proposals += 1
    current_state.current_block = block_idx

    return accepted
end

"""
    propose_mala_block!(proposal_state::MCMCState, current_state::MCMCState,
                        evaluator::LogPosteriorEvaluator, mala::MALAProposal,
                        block_indices::Vector{Int}, block_state::MALABlockState,
                        rng::AbstractRNG)

Generate MALA proposal for a specific block of parameters.
"""
function propose_mala_block!(
    proposal_state::MCMCState{T},
    current_state::MCMCState{T},
    evaluator::LogPosteriorEvaluator,
    mala::MALAProposal{T},
    block_indices::Vector{Int},
    block_state::MALABlockState{T},
    rng::AbstractRNG
) where T

    # Compute full gradient at current state
    gradient_success = compute_gradient_safe!(current_state, evaluator, mala.options)

    if !gradient_success
        return false
    end

    # Extract block gradient
    block_gradient = current_state.gradient[block_indices]
    h = block_state.step_size

    # Apply gradient clipping to block
    grad_norm = norm(block_gradient)
    if grad_norm > mala.options.gradient_clip_threshold
        block_gradient .*= mala.options.gradient_clip_threshold / grad_norm
    end

    # Generate block proposal
    n_block = length(block_indices)
    drift = (h^2 / 2) * block_gradient
    noise = h * randn(rng, T, n_block)

    # Update only the block parameters
    proposal_state.x_chi[block_indices] .= current_state.x_chi[block_indices] + drift + noise

    # Evaluate log-posterior at proposal
    evaluate_log_posterior!(evaluator, proposal_state)

    # Compute gradient at proposal for detailed balance
    gradient_success_prop = compute_gradient_safe!(proposal_state, evaluator, mala.options)

    # Compute acceptance probability
    if !gradient_success_prop
        # Fallback to standard Metropolis
        log_alpha = proposal_state.log_posterior - current_state.log_posterior
        accept_prob = min(one(T), exp(log_alpha))
    else
        # Full MALA acceptance with detailed balance
        block_gradient_prop = proposal_state.gradient[block_indices]

        # Proposal densities for the block
        log_q_forward = mala_proposal_density(
            proposal_state.x_chi[block_indices],
            current_state.x_chi[block_indices],
            block_gradient, h
        )

        log_q_reverse = mala_proposal_density(
            current_state.x_chi[block_indices],
            proposal_state.x_chi[block_indices],
            block_gradient_prop, h
        )

        log_alpha = (proposal_state.log_posterior + log_q_reverse) -
                    (current_state.log_posterior + log_q_forward)
        accept_prob = min(one(T), exp(log_alpha))
    end

    # Metropolis test
    return rand(rng) < accept_prob
end

"""
    copy_state!(dest::MCMCState, src::MCMCState)

Copy MCMC state from source to destination.
"""
function copy_state!(dest::MCMCState, src::MCMCState)
    dest.x_chi .= src.x_chi
    dest.x_phys .= src.x_phys
    dest.log_posterior = src.log_posterior
    dest.log_likelihood = src.log_likelihood
    dest.log_prior = src.log_prior
    dest.gradient .= src.gradient
    dest.hyperparams = deepcopy(src.hyperparams)
    dest.current_block = src.current_block
end

"""
    mala_diagnostics(mala::MALAProposal)

Return diagnostic information about MALA performance.
"""
function mala_diagnostics(mala::MALAProposal)
    total_rate = mala.state.total_proposals > 0 ?
                 mala.state.total_accepted / mala.state.total_proposals : 0.0

    return (
        total_acceptance_rate = total_rate,
        current_step_size = mala.state.current_step_size,
        gradient_failures = mala.state.gradient_failures,
        clipped_gradients = mala.state.n_clipped_gradients,
        fallback_steps = mala.state.n_fallback_steps,
        adaptation_iteration = mala.state.adaptation_iteration
    )
end

"""
    reset_mala_adaptation!(mala::MALAProposal)

Reset adaptation statistics (useful when switching from adaptation to production).
"""
function reset_mala_adaptation!(mala::MALAProposal)
    mala.state.n_proposals_window = 0
    mala.state.n_accepted_window = 0
    mala.state.adaptation_iteration = 0
end

end # module MALA