/* Copyright (c) 2025 Beijing Semidrive Technology Corporation
 * SPDX-License-Identifier: Apache-2.0
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef INCLUDE_ARMV8R_SPINLOCK_H
#define INCLUDE_ARMV8R_SPINLOCK_H

#include <Compiler.h>

#define SPIN_LOCK_INITIAL_VALUE (0)

#ifndef ASSEMBLY


/* spin lock type */
typedef unsigned long spin_lock_t;

#ifdef CFG_MULTI_CORE_SMP
/* PRQA S 0602 1 */
static inline int __arch_spin_lock_testset(spin_lock_t *spinlock, int value)
{
    int result = 0;
    ASM volatile
    (
        "1:\n"
        "\tldrex %0, [%1]\n"
        "\tcmp %0, %2\n"
        "\tbeq 2f\n"
        "\tstrex %0, %2, [%1]\n"
        "\tcmp %0, %2\n"
        "\tbeq 1b\n"
        "2:\n"
        "\tdmb ish\n"
        : "=&r" (result)
        : "r" (spinlock), "r" (value)
        : "cc", "memory"
    );

    return result;
}
#else
/* PRQA S 0602 1 */
static inline int __arch_spin_lock_testset(spin_lock_t *spinlock, int value)
{
    int result = 0;

    ASM volatile("dmb" ::: "memory");

    if (SPIN_LOCK_INITIAL_VALUE == *spinlock) {
        *spinlock = 1U;
    } else {
        result = 1U;
    }

    ASM volatile("dsb" ::: "memory");

    return result;
}
#endif

/*
 * arch spinlock init.
 *
 * @spinlock    spinlock address.
 */

static inline void arch_spin_lock_init(spin_lock_t *spinlock)
{
    *spinlock = SPIN_LOCK_INITIAL_VALUE;
}

/*
 * arch spin lock.
 *
 * @spinlock    spinlock address.
 */
static inline void arch_spin_lock(spin_lock_t *spinlock)
{
    /* spin until lock is acquired */
    while (__arch_spin_lock_testset(spinlock, 1)) {
        ASM volatile("wfe");
    };
}

/*
 * arch spin trylock.
 *
 * @spinlock    spinlock address.
 */
static inline int arch_spin_trylock(spin_lock_t *spinlock)
{
    return __arch_spin_lock_testset(spinlock, 1) == 0;
}

/*
 * arch spin unlock.
 *
 * @spinlock    spinlock address.
 */
static inline void arch_spin_unlock(spin_lock_t *spinlock)
{
    ASM volatile("dmb" ::: "memory");
    *spinlock = 0;
    ASM volatile("dsb" ::: "memory");
    ASM volatile("sev");
}


#endif

#endif
