/** @file
 * IPRT - Assembly Functions.
 */

/*
 * Copyright (C) 2006-2020 Oracle Corporation
 *
 * This file is part of VirtualBox Open Source Edition (OSE), as
 * available from http://www.virtualbox.org. This file is free software;
 * you can redistribute it and/or modify it under the terms of the GNU
 * General Public License (GPL) as published by the Free Software
 * Foundation, in version 2 as it comes in the "COPYING" file of the
 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
 *
 * The contents of this file may alternatively be used under the terms
 * of the Common Development and Distribution License Version 1.0
 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
 * VirtualBox OSE distribution, in which case the provisions of the
 * CDDL are applicable instead of those of the GPL.
 *
 * You may elect to license modified versions of this file under the
 * terms and conditions of either the GPL or the CDDL or both.
 */

#ifndef _INCLUDE_ASM_H_
#define _INCLUDE_ASM_H_

#include <wdm.h>
#include "CommonDef.h"

#if defined(_MSC_VER)
# pragma warning(push)
# pragma warning(disable:4668) /* Several incorrect __cplusplus uses. */
# pragma warning(disable:4255) /* Incorrect __slwpcb prototype. */
# include <intrin.h>
# pragma warning(pop)
/* Emit the intrinsics at all optimization levels. */
# pragma intrinsic(_InterlockedExchange)
# pragma intrinsic(_InterlockedCompareExchange)
# pragma intrinsic(_InterlockedIncrement)
# pragma intrinsic(_InterlockedDecrement)
# pragma intrinsic(_InterlockedExchangeAdd)
# pragma intrinsic(_bittest)
# pragma intrinsic(_bittestandset)
# pragma intrinsic(_bittestandreset)
# pragma intrinsic(_interlockedbittestandset)
# pragma intrinsic(_BitScanReverse)
#endif

 /**
  * Atomically Exchange an unsigned 32-bit value, ordered.
  *
  * @returns Current *pu32 value
  * @param   pu32    Pointer to the 32-bit variable to update.
  * @param   u32     The 32-bit value to assign to *pu32.
  *
  * @remarks Does not work on 286 and earlier.
  */
DECLINLINE(UINT32) ASMAtomicXchgU32(volatile UINT32 *pu32, UINT32 u32)
{
    return _InterlockedExchange((long *)pu32, u32);
}

DECLINLINE(void) ASMMemoryFence(void)
{
    KeMemoryBarrier();
}

DECLINLINE(void) ASMAtomicWriteBool(volatile bool *pf, bool f)
{
    _InterlockedExchange8((CHAR volatile *)pf, f);
}

DECLINLINE(bool) ASMAtomicCmpXchgBool(volatile bool *pf, const bool fNew, const bool fOld)
{
    return _InterlockedCompareExchange8((volatile CHAR*)pf, (CHAR)fNew, (CHAR)fOld);
}

/**
 * Atomically reads an unsigned 32-bit value, ordered.
 *
 * @returns Current *pu32 value
 * @param   pu32    Pointer to the 32-bit variable to read.
 */
DECLINLINE(UINT32) ASMAtomicReadU32(volatile UINT32 *pu32)
{
    return _InterlockedExchangeAdd((volatile long *)pu32, 0);
}

DECLINLINE(UINT32) ASMAtomicReadU64(volatile UINT64* pu64)
{
    return InterlockedExchangeAdd64((volatile long long*)pu64, 0);
}

DECLINLINE(UINT32) ASMAtomicCmpXchgU32(volatile UINT32 *pu32, UINT32 newVal, UINT32 oldVal)
{
    return _InterlockedCompareExchange((long volatile *)pu32, newVal, oldVal);
}

/**
 * Atomically writes an unsigned 32-bit value, ordered.
 *
 * @param   pu32    Pointer to the 32-bit variable to write.
 * @param   u32     The 32-bit value to assign to *pu32.
 */
DECLINLINE(void) ASMAtomicWriteU32(volatile UINT32 *pu32, UINT32 u32)
{
    ASMAtomicXchgU32(pu32, u32);
}

/**
 * Atomically writes an unsigned 32-bit value, unordered.
 *
 * @param   pu32    Pointer to the 32-bit variable to write.
 * @param   u32     The 32-bit value to assign to *pu32.
 */
DECLINLINE(void) ASMAtomicUoWriteU32(volatile UINT32 *pu32, UINT32 u32)
{
    ASMAtomicXchgU32(pu32, u32);
}

/**
 * Atomically increment a 32-bit value, ordered.
 *
 * @returns The new value.
 * @param   pu32        Pointer to the value to increment.
 *
 * @remarks x86: Requires a 486 or later.
 */
DECLINLINE(UINT32) ASMAtomicIncU32(UINT32 volatile *pu32)
{
    return (UINT32)_InterlockedIncrement((long *)pu32);
}

/**
 * Atomically decrement an unsigned 32-bit value, ordered.
 *
 * @returns The new value.
 * @param   pu32        Pointer to the value to decrement.
 *
 * @remarks x86: Requires a 486 or later.
 */
DECLINLINE(UINT32) ASMAtomicDecU32(UINT32 volatile *pu32)
{
    return (UINT32)_InterlockedDecrement((long *)pu32);
}

DECLINLINE(UINT32) ASMAtomicAddU32(volatile UINT32 *pu32, UINT32 u32) {
    return (UINT32)_InterlockedAdd((long*)pu32, u32);
}

DECLINLINE(UINT32) ASMAtomicSubU32(volatile UINT32 *pu32, UINT32 u32) {
    return ASMAtomicAddU32(pu32, (UINT32)-(INT32)u32);
}

/**
 * Tests if a bit in a bitmap is set.
 *
 * @returns true if the bit is set.
 * @returns false if the bit is clear.
 *
 * @param   pvBitmap    Pointer to the bitmap.
 * @param   iBit        The bit to test.
 *
 * @remarks The 32-bit aligning of pvBitmap is not a strict requirement.
 *          However, doing so will yield better performance as well as avoiding
 *          traps accessing the last bits in the bitmap.
 */
DECLINLINE(bool) ASMBitTest(const volatile void *pvBitmap, INT32 iBit)
{
    union { bool f; UINT32 u32; UINT8 u8; } rc{0};

    rc.u32 = _bittest((long *)pvBitmap, iBit);
    return rc.f;
}

/**
 * Sets a bit in a bitmap.
 *
 * @param   pvBitmap    Pointer to the bitmap. This should be 32-bit aligned.
 * @param   iBit        The bit to set.
 *
 * @remarks The 32-bit aligning of pvBitmap is not a strict requirement.
 *          However, doing so will yield better performance as well as avoiding
 *          traps accessing the last bits in the bitmap.
 */
DECLINLINE(void) ASMBitSet(volatile void *pvBitmap, INT32 iBit)
{
    _bittestandset((long *)pvBitmap, iBit);
}

/**
 * Clears a bit in a bitmap.
 *
 * @param   pvBitmap    Pointer to the bitmap.
 * @param   iBit        The bit to clear.
 *
 * @remarks The 32-bit aligning of pvBitmap is not a strict requirement.
 *          However, doing so will yield better performance as well as avoiding
 *          traps accessing the last bits in the bitmap.
 */
DECLINLINE(void) ASMBitClear(volatile void *pvBitmap, INT32 iBit)
{
    _bittestandreset((long *)pvBitmap, iBit);
}

/**
 * Atomically sets a bit in a bitmap, ordered.
 *
 * @param   pvBitmap    Pointer to the bitmap. Must be 32-bit aligned, otherwise
 *                      the memory access isn't atomic!
 * @param   iBit        The bit to set.
 *
 * @remarks x86: Requires a 386 or later.
 */
DECLINLINE(void) ASMAtomicBitSet(volatile void *pvBitmap, INT32 iBit)
{
    _interlockedbittestandset((long *)pvBitmap, iBit);
}

/**
 * Finds the first set bit in a bitmap.
 *
 * @returns Index of the first set bit.
 * @returns -1 if no clear bit was found.
 * @param   pvBitmap    Pointer to the bitmap.
 * @param   cBits       The number of bits in the bitmap. Multiple of 32.
 */
DECLINLINE(INT32) ASMBitFirstSet(const volatile void *pvBitmap, UINT32 cBits)
{
    if (cBits)
    {
        UINT32           iBit = 0;
        UINT8 volatile* pu8 = (UINT8 volatile*)pvBitmap;
        while (iBit < cBits)
        {
            UINT8 u8 = *pu8;
            if (u8 != 0)
            {
                while (!(u8 & 1))
                {
                    u8 >>= 1;
                    iBit++;
                }
                if (iBit >= cBits)
                    return -1;
                return iBit;
            }

            iBit += 8;
            pu8++;
        }
        return -1;
    }
    return -1;
}

/**
 * Finds the next set bit in a bitmap.
 *
 * @returns Index of the next set bit.
 * @returns -1 if no set bit was found.
 * @param   pvBitmap    Pointer to the bitmap.
 * @param   cBits       The number of bits in the bitmap. Multiple of 32.
 * @param   iBitPrev    The bit returned from the last search.
 *                      The search will start at iBitPrev + 1.
 */
DECLINLINE(int) ASMBitNextSet(const volatile void *pvBitmap, UINT32 cBits, UINT32 iBitPrev)
{
    const volatile UINT32 *pau32Bitmap = (const volatile UINT32 *)pvBitmap;
    int                             iBit = ++iBitPrev & 31;
    if (iBit) {
        /*
         * Inspect the 32-bit word containing the unaligned bit.
         */
        UINT32  u32 = pau32Bitmap[iBitPrev / 32] >> iBit;

        unsigned long ulBit = 0;
        if (_BitScanForward(&ulBit, u32))
            return ulBit + iBitPrev;

        if (iBit >= 0)
            return iBit + iBitPrev;

        /*
         * Skip ahead and see if there is anything left to search.
         */
        iBitPrev |= 31;
        iBitPrev++;
        if (cBits <= (UINT32)iBitPrev)
            return -1;
    }

    /*
     * 32-bit aligned search, let ASMBitFirstClear do the dirty work.
     */
    iBit = ASMBitFirstSet(&pau32Bitmap[iBitPrev / 32], cBits - iBitPrev);
    if (iBit >= 0)
        iBit += iBitPrev;
    return iBit;
}

/**
 * Finds the first clear bit in a bitmap.
 *
 * @returns Index of the first zero bit.
 * @returns -1 if no clear bit was found.
 * @param   pvBitmap    Pointer to the bitmap.
 * @param   cBits       The number of bits in the bitmap. Multiple of 32.
 */
DECLINLINE(INT32) ASMBitFirstClear(const volatile void *pvBitmap, UINT32 cBits)
{
    if (cBits)
    {
        UINT32 iBit = 0;
        UINT8 volatile *pu8 = (UINT8 volatile*)pvBitmap;

        while (iBit < cBits)
        {
            UINT8 u8 = *pu8;
            if (u8 != 0xffui8)
            {
                while (u8 & 1)
                {
                    u8 >>= 1;
                    iBit++;
                }
                if (iBit >= cBits)
                    return -1;
                return iBit;
            }

            iBit += 8;
            pu8++;
        }
    }
    return -1;
}

DECLINLINE(unsigned) ASMBitLastSetU32(UINT32 u32)
{
    unsigned long iBit;
    if (_BitScanReverse(&iBit, u32))
        iBit++;
    else
        iBit = 0;
    return iBit;
}

DECLINLINE(UINT64) ASMAtomicXchgU64(volatile UINT64 *pu64, UINT64 u64)
{
   return _InterlockedExchange64((__int64 *)pu64, u64);
}

DECLINLINE(void) ASMAtomicWriteU64(volatile UINT64 *pu64, UINT64 u64)
{
    ASMAtomicXchgU64(pu64, u64);
}

DECLINLINE(UINT64) ASMAtomicIncU64(volatile UINT64* pu64)
{
    return InterlockedIncrement64((__int64*)pu64);
}

DECLINLINE(void) ASMMemFill32(volatile void *pv, size_t cb, UINT32 u32)
{
    Assert(!(cb & 3));
    size_t cFills = cb / sizeof(UINT32);
    UINT32 *pu32Dst = (UINT32 *)pv;

    while (cFills >= 8)
    {
        pu32Dst[0] = u32;
        pu32Dst[1] = u32;
        pu32Dst[2] = u32;
        pu32Dst[3] = u32;
        pu32Dst[4] = u32;
        pu32Dst[5] = u32;
        pu32Dst[6] = u32;
        pu32Dst[7] = u32;
        pu32Dst += 8;
        cFills  -= 8;
    }

    while (cFills > 0)
    {
        *pu32Dst++ = u32;
        cFills -= 1;
    }
}

#endif
