#include "HeapAllocator.h"
#include "Atomic.h"
#include "Assertions.h"

namespace CEngine
{

static const TRACKING_UNIT TU_ALL_PAGES_USED = 0xFFFFFFFFFFFFFFFF;
static const TRACKING_UNIT TU_IN_USE = 0x01;
static const U32 PAGES_PER_TU = sizeof(TRACKING_UNIT) * 8;
/*
HeapAllocator::HeapAllocator(U64 heapSize)
    :   pageSize(4096),
        heapPointer(NULL),
        trackingUnitArray(NULL),
        headerArray(NULL)
{
    ASSERT(heapSize % pageSize == 0);

    freePages = totalPages = heapSize / pageSize;

    U64 requireTU = totalPages / PAGES_PER_TU;
    if (totalPages % PAGES_PER_TU != 0)
        ++requireTU;

    heapPointer = new U8[heapSize];
    trackingUnitArray = new TRACKING_UNIT[requireTU];
    headerArray = new AllocationHeader[totalPages];

    for (U64 i = 0; i < requireTU; ++i) {
        trackingUnitArray[i] = 0;
    }

    AllocationHeader temp;
    temp.size = 0;
    for (U64 i = 0; i < totalPages; ++i) {
        headerArray[i] = temp;
    }
}*/

HeapAllocator::HeapAllocator(U64 heapSize, U64 ps)
    :   pageSize(ps),
        heapPointer(NULL),
        trackingUnitArray(NULL),
        headerArray(NULL)
{
    ASSERT(heapSize % pageSize == 0);

    freePages = totalPages = heapSize / pageSize;

    U32 requireTU = totalPages / PAGES_PER_TU;
    if (totalPages % PAGES_PER_TU != 0)
        ++requireTU;

    heapPointer = new U8[heapSize];
    trackingUnitArray = new TRACKING_UNIT[requireTU];
    headerArray = new AllocationHeader[totalPages];

    for (U32 i = 0; i < requireTU; ++i) {
        trackingUnitArray[i] = 0;
    }


    TRACKING_UNIT mask = 0;
    for (U8 i = 0; i < (totalPages % PAGES_PER_TU); ++i) {
        mask = mask << 1;
        ++mask;
    }
    trackingUnitArray[requireTU - 1] = mask;

    AllocationHeader temp;
    temp.size = 0;
    for (U64 i = 0; i < totalPages; ++i) {
        headerArray[i] = temp;
    }
}

HeapAllocator::~HeapAllocator(void)
{
    if(heapPointer) {
        delete (U8*)heapPointer;
        heapPointer = NULL;
    }
    if(trackingUnitArray) {
        delete trackingUnitArray;
        trackingUnitArray = NULL;
    }
    if(headerArray) {
        delete headerArray;
        headerArray = NULL;
    }
}

void*
HeapAllocator::alloc(U64 bytes) {
    U32 totalTU = totalPages / PAGES_PER_TU;
    if (totalPages % PAGES_PER_TU != 0)
        ++totalTU;

    U64 requiredPages = bytes / pageSize;
    if (bytes % pageSize != 0)
        ++requiredPages;

    U32 requiredTU = (requiredPages / PAGES_PER_TU);
    if (requiredPages % PAGES_PER_TU != 0)
        ++requiredTU;

    if (requiredPages > totalPages)
        return NULL;

    U32 beginningTU = 0;
    while (beginningTU < totalTU) {
        TRACKING_UNIT preBitMask = 0;
        U64 preOffset = 0;
        if(requiredPages < PAGES_PER_TU) {
            preBitMask = TU_ALL_PAGES_USED << (PAGES_PER_TU - requiredPages);
        }
        else {
            preBitMask = TU_ALL_PAGES_USED;
        }

        for (; preOffset < PAGES_PER_TU; ++preOffset) {
            if (( ~(trackingUnitArray[beginningTU]) & preBitMask) == preBitMask)
                break;
            preBitMask = preBitMask >> 1;
        }

        if (preOffset == PAGES_PER_TU) {
            ++beginningTU;
            while(trackingUnitArray[beginningTU] == TU_ALL_PAGES_USED)
                ++beginningTU;
            continue;
        }

        I32 remainingPagesNeeded = requiredPages - (PAGES_PER_TU - preOffset);

        if (remainingPagesNeeded <= 0) {
            trackingUnitArray[beginningTU] |= preBitMask;

            U64 address = (U64)heapPointer + (((beginningTU * PAGES_PER_TU) + preOffset) * pageSize);

            AllocationHeader header;
            header.size = bytes;

            headerArray[(beginningTU * PAGES_PER_TU) + preOffset] = header;

            freePages -= requiredPages;

            return (void*)address;
        }

        U64 nextTU = (beginningTU + 1) % totalTU;
        if (nextTU < beginningTU)
            return NULL;

        if (remainingPagesNeeded <= PAGES_PER_TU) {
            TRACKING_UNIT postBitMask = TU_ALL_PAGES_USED << (PAGES_PER_TU - remainingPagesNeeded);

            if ((~(trackingUnitArray[nextTU]) & postBitMask) == postBitMask) {
                trackingUnitArray[beginningTU] |= preBitMask;
                trackingUnitArray[nextTU] |= postBitMask;

                U64 address = (U64)heapPointer + (((beginningTU * PAGES_PER_TU) + preOffset) * pageSize);

                AllocationHeader header;
                header.size = bytes;

                headerArray[(beginningTU * PAGES_PER_TU) + preOffset] = header;

                freePages -= requiredPages;

                return (void*)address;
            }
            beginningTU = nextTU;
            continue;
        }

        U32 sequencialTUAvailable = 0;
        U32 sequencialTUNeeded = remainingPagesNeeded / PAGES_PER_TU;

        for (U32 i = nextTU; i < totalTU && sequencialTUAvailable < sequencialTUNeeded; ++i) {
            if (trackingUnitArray[i] == 0) {
                ++sequencialTUAvailable;
            }
            else
                break;
        }

        if (sequencialTUAvailable != sequencialTUNeeded) {
            beginningTU = nextTU + sequencialTUAvailable;
            continue;
        }

        remainingPagesNeeded -= sequencialTUAvailable * PAGES_PER_TU;

        if (remainingPagesNeeded <= 0) {
            trackingUnitArray[beginningTU] |= preBitMask;
            for (U32 i = nextTU; i < (nextTU + sequencialTUAvailable); ++i) {
                trackingUnitArray[i] |= TU_ALL_PAGES_USED;
            }

            U64 address = (U64)heapPointer + (((beginningTU * PAGES_PER_TU) + preOffset) * pageSize);

            AllocationHeader header;
            header.size = bytes;

            headerArray[(beginningTU * PAGES_PER_TU) + preOffset] = header;

            freePages -= requiredPages;

            return (void*)address;
        }

        nextTU = (beginningTU + sequencialTUAvailable + 1) % totalTU;
        if (nextTU <= beginningTU)
            return NULL;

        TRACKING_UNIT postBitMask = TU_ALL_PAGES_USED << (PAGES_PER_TU - remainingPagesNeeded);

        if ((~(trackingUnitArray[nextTU]) & postBitMask) == postBitMask) {
            trackingUnitArray[beginningTU] |= preBitMask;
            for (U32 i = beginningTU + 1; i < nextTU; ++i) {
                trackingUnitArray[i] |= TU_ALL_PAGES_USED;
            }
            trackingUnitArray[nextTU] |= postBitMask;

            U64 address = (U64)heapPointer + (((beginningTU * PAGES_PER_TU) + preOffset) * pageSize);

            AllocationHeader header;
            header.size = bytes;

            headerArray[(beginningTU * PAGES_PER_TU) + preOffset] = header;

            freePages -= requiredPages;

            return (void*)address;
        }

        beginningTU = nextTU;
    }

    return NULL;
}

void*
HeapAllocator::alignedAlloc(U64 bytes, U64 alignment) {
    ASSERT(alignment != 0);

    U64 mask = alignment - 1;
    U64 misalignment = ((U64)heapPointer & mask);
    U64 adjustment = alignment - misalignment;

    if (misalignment == 0)
        adjustment = 0;

    bytes += adjustment;

    U32 totalTU = totalPages / PAGES_PER_TU;
    if (totalPages % PAGES_PER_TU != 0)
        ++totalTU;

    U64 requiredPages = bytes / pageSize;
    if (bytes % pageSize != 0)
        ++requiredPages;

    U32 requiredTU = (requiredPages / PAGES_PER_TU);
    if (requiredPages % PAGES_PER_TU != 0)
        ++requiredTU;

    if (requiredPages > totalPages)
        return NULL;

    U32 beginningTU = 0;
    while (beginningTU < totalTU) {
        TRACKING_UNIT preBitMask = 0;
        U64 preOffset = 0;
        if(requiredPages < PAGES_PER_TU) {
            preBitMask = TU_ALL_PAGES_USED << (PAGES_PER_TU - requiredPages);
        }
        else {
            preBitMask = TU_ALL_PAGES_USED;
        }

        for (; preOffset < PAGES_PER_TU; ++preOffset) {
            if (( ~(trackingUnitArray[beginningTU]) & preBitMask) == preBitMask)
                break;
            preBitMask = preBitMask >> 1;
        }

        if (preOffset == PAGES_PER_TU) {
            ++beginningTU;
            while(trackingUnitArray[beginningTU] == TU_ALL_PAGES_USED)
                ++beginningTU;
            continue;
        }

        I32 remainingPagesNeeded = requiredPages - (PAGES_PER_TU - preOffset);

        if (remainingPagesNeeded <= 0) {
            trackingUnitArray[beginningTU] |= preBitMask;

            U64 address = (U64)heapPointer + (((beginningTU * PAGES_PER_TU) + preOffset) * pageSize);
            address += adjustment;

            AllocationHeader header;
            header.size = bytes;

            headerArray[(beginningTU * PAGES_PER_TU) + preOffset] = header;

            freePages -= requiredPages;

            return (void*)address;
        }

        U32 nextTU = (beginningTU + 1) % totalTU;
        if (nextTU < beginningTU)
            return NULL;

        if (remainingPagesNeeded <= PAGES_PER_TU) {
            TRACKING_UNIT postBitMask = TU_ALL_PAGES_USED << (PAGES_PER_TU - remainingPagesNeeded);

            if ((~(trackingUnitArray[nextTU]) & postBitMask) == postBitMask) {
                trackingUnitArray[beginningTU] |= preBitMask;
                trackingUnitArray[nextTU] |= postBitMask;

                U64 address = (U64)heapPointer + (((beginningTU * PAGES_PER_TU) + preOffset) * pageSize);
                address += adjustment;

                AllocationHeader header;
                header.size = bytes;

                headerArray[(beginningTU * PAGES_PER_TU) + preOffset] = header;

                freePages -= requiredPages;

                return (void*)address;
            }
            beginningTU = nextTU;
            continue;
        }

        U32 sequencialTUAvailable = 0;
        U32 sequencialTUNeeded = remainingPagesNeeded / PAGES_PER_TU;

        for (U32 i = nextTU; i < totalTU && sequencialTUAvailable < sequencialTUNeeded; ++i) {
            if (trackingUnitArray[i] == 0) {
                ++sequencialTUAvailable;
            }
            else
                break;
        }

        if (sequencialTUAvailable != sequencialTUNeeded) {
            beginningTU = nextTU + sequencialTUAvailable;
            continue;
        }

        remainingPagesNeeded -= sequencialTUAvailable * PAGES_PER_TU;

        if (remainingPagesNeeded <= 0) {
            trackingUnitArray[beginningTU] |= preBitMask;
            for (U32 i = nextTU; i < (nextTU + sequencialTUAvailable); ++i) {
                trackingUnitArray[i] |= TU_ALL_PAGES_USED;
            }

            U64 address = (U64)heapPointer + (((beginningTU * PAGES_PER_TU) + preOffset) * pageSize);
            address += adjustment;

            AllocationHeader header;
            header.size = bytes;

            headerArray[(beginningTU * PAGES_PER_TU) + preOffset] = header;

            freePages -= requiredPages;

            return (void*)address;
        }

        nextTU = (beginningTU + sequencialTUAvailable + 1) % totalTU;
        if (nextTU <= beginningTU)
            return NULL;

        TRACKING_UNIT postBitMask = TU_ALL_PAGES_USED << (PAGES_PER_TU - remainingPagesNeeded);

        if ((~(trackingUnitArray[nextTU]) & postBitMask) == postBitMask) {
            trackingUnitArray[beginningTU] |= preBitMask;
            for (U32 i = beginningTU + 1; i < nextTU; ++i) {
                trackingUnitArray[i] |= TU_ALL_PAGES_USED;
            }
            trackingUnitArray[nextTU] |= postBitMask;

            U64 address = (U64)heapPointer + (((beginningTU * PAGES_PER_TU) + preOffset) * pageSize);
            address += adjustment;

            AllocationHeader header;
            header.size = bytes;

            headerArray[(beginningTU * PAGES_PER_TU) + preOffset] = header;

            freePages -= requiredPages;

            return (void*)address;
        }

        beginningTU = nextTU;
    }

    return NULL;
}

void
HeapAllocator::free(void* pointer) {
    if (pointer == NULL) {
        return;
    }

    U64 pageIndice = ((U64)pointer - (U64)heapPointer) / (pageSize);
    U32 TUIndice = pageIndice / PAGES_PER_TU;

    AllocationHeader header = headerArray[pageIndice];

    TRACKING_UNIT firstPage;
    firstPage = TU_IN_USE << ((PAGES_PER_TU - (pageIndice % PAGES_PER_TU)) - 1);
    if (( ~(trackingUnitArray[TUIndice]) & firstPage) == firstPage)
        return;

    I32 usedTU = (header.size + ((pageIndice % PAGES_PER_TU) * pageSize)) / (pageSize * PAGES_PER_TU);
    if ((header.size + ((pageIndice % PAGES_PER_TU) * pageSize)) % (pageSize * PAGES_PER_TU) != 0)
        ++usedTU;

    I32 usedPages = header.size / pageSize;
    if (header.size % pageSize != 0)
        ++usedPages;

    freePages += usedPages;

    TRACKING_UNIT firstTUMask = 1;

    if (usedTU == 1) {
        --usedPages;
        for (U32 i = 1; i < (PAGES_PER_TU - (pageIndice % PAGES_PER_TU)); ++i) {
            firstTUMask = firstTUMask << 1;
            if (usedPages > 0)
                ++firstTUMask;
            --usedPages;
        }

        trackingUnitArray[TUIndice] ^= firstTUMask;

        header.size = 0;
        headerArray[pageIndice] = header;

        return;
    }

    for (U32 i = 1; i < (PAGES_PER_TU - (pageIndice % PAGES_PER_TU)); ++i) {
        firstTUMask = firstTUMask << 1;
        ++firstTUMask;
    }

    TRACKING_UNIT remainingPages = usedPages - (PAGES_PER_TU - (pageIndice % PAGES_PER_TU));

    /*if (remainingPages <= PAGES_PER_TU) {
        TRACKING_UNIT lastTUMask = TU_ALL_PAGES_USED;
        lastTUMask = lastTUMask << remainingPages;

        trackingUnitArray[TUIndice] ^= firstTUMask;
        trackingUnitArray[TUIndice + 1] ^= lastTUMask;

        header.size = 0;
        headerArray[pageIndice] = header;

        return;
    }*/

    trackingUnitArray[TUIndice] ^= firstTUMask;

    U32 remainingTU = remainingPages / PAGES_PER_TU;
    if (remainingPages % PAGES_PER_TU != 0)
        ++remainingTU;

    for (U32 i = 1; i < remainingTU; ++i) {
        trackingUnitArray[TUIndice + i] = 0;
    }

    remainingPages -= (remainingTU - 1) * PAGES_PER_TU;
    TRACKING_UNIT lastTUMask = TU_ALL_PAGES_USED;
    lastTUMask = lastTUMask << (PAGES_PER_TU - remainingPages);

    trackingUnitArray[TUIndice + remainingTU] ^= lastTUMask;

    header.size = 0;
    headerArray[pageIndice] = header;

    return;
}

void
HeapAllocator::clear(void) {
    freePages = totalPages;

    U32 totalTU = totalPages / PAGES_PER_TU;
    if (totalPages % PAGES_PER_TU != 0)
        ++totalTU;

    for (U32 i = 0; i < totalTU; ++i) {
        trackingUnitArray[i] = 0;
    }

    if (totalPages % PAGES_PER_TU == 1) {
        U64 mask = TU_ALL_PAGES_USED / 2;
        trackingUnitArray[totalTU - 1] = mask;
    }
    else if (totalPages % PAGES_PER_TU > 0) {
        U64 mask = TU_ALL_PAGES_USED / 2;
        mask = mask >> ((totalPages % PAGES_PER_TU) - 1);
        trackingUnitArray[totalTU - 1] = mask;
    }

    AllocationHeader temp;
    temp.size = 0;
    for (U64 i = 0; i < totalPages; ++i) {
        headerArray[i] = temp;
    }
}

TRACKING_UNIT*
HeapAllocator::debugReturnTUArray(U64& length) {
    length = totalPages;

    return trackingUnitArray;
}

AllocationHeader
HeapAllocator::debugReturnHeader(void* pointer) {
    if (pointer == NULL) {
        AllocationHeader header;
        header.size = 0;
        return header;
    }
    U64 pageIndice = ((U64)pointer - (U64)heapPointer) / (pageSize);

    AllocationHeader header = headerArray[pageIndice];
    return header;
}

}
