#include "NightMemory.h"

#ifdef _WIN32_WINNT
#define _HAS_WIN32_WINNT
#pragma push_macro("_WIN32_WINNT")
#undef _WIN32_WINNT
#endif

#include "nedmalloc/nedmalloc.c"

#ifdef _HAS_WIN32_WINNT
#pragma pop_macro("_WIN32_WINNT")
#undef _HAS_WIN32_WINNT
#endif


namespace NE
{

	namespace MemInternal
	{

		const size_t kPoolCount = 14; // Needs to be greater than 4
		void* gPoolFootprint = reinterpret_cast<void*>(0x004E0045);
		nedalloc::nedpool* gPools[kPoolCount + 1] = { 0 };

		size_t getPoolIndexFromSize(size_t nReqSize)
		{
			// Requests size 16 or smaller are allocated at a 4 byte granularity.
			// Requests size 17 or larger are allocated at a 16 byte granularity.
			// With a s_poolCount of 14, requests size 177 or larger go in the default pool.

			// spreadsheet style =IF(B35<=16; FLOOR((B35-1)/4;1); MIN(FLOOR((B35-1)/16; 1) + 3; 14))

			size_t nPoolIndex = 0;

			if (nReqSize > 0)
			{
				if (nReqSize <= 16)
				{
					nPoolIndex = (nReqSize - 1) >> 2;
				}
				else
				{
					size_t nCalculate = ((nReqSize - 1) >> 4) + 3;
					nPoolIndex = nCalculate < kPoolCount ? nCalculate : kPoolCount;
				}
			}

			return nPoolIndex;
		}

	}


	void* MemoryAllocator::allocateBytes(size_t sz)
	{
		size_t nPoolIndex = MemInternal::getPoolIndexFromSize(sz);
		nedalloc::nedpool* pool(0);		// A pool pointer of 0 means the default pool.

		if (nPoolIndex < MemInternal::kPoolCount)
		{
			if (MemInternal::gPools[nPoolIndex] == 0)
			{
				// Init pool if first use
				MemInternal::gPools[nPoolIndex] = nedalloc::nedcreatepool(0, 8);
				nedalloc::nedpsetvalue(MemInternal::gPools[nPoolIndex], MemInternal::gPoolFootprint);	// All pools are stamped with a footprint
			}

			pool = MemInternal::gPools[nPoolIndex];
		}

		return nedalloc::nedpmalloc(pool, sz);
	}


	void MemoryAllocator::deallocBytes(void* ptr)
	{
		if (!ptr)
			return;
		nedalloc::nedpool* pool(0);

		// nedalloc lets us get the pool pointer from the memory pointer
		void* footprint = nedalloc::nedgetvalue(&pool, ptr);

		// Check footprint
		if (footprint == MemInternal::gPoolFootprint)
		{
			// If we allocated the pool, deallocate from this pool...
			nedalloc::nedpfree(pool, ptr);
		}
		else
		{
			// ...otherwise let nedalloc handle it.
			nedalloc::nedfree(ptr);
		}
	}



	MemoryPool::MemoryPool(size_t capacity /*= 0*/)
		: mPool(0), mCapacity(capacity)
	{
		mPool = nedalloc::nedcreatepool(capacity, 8);
	}


	MemoryPool::~MemoryPool(void)
	{
		nedalloc::neddestroypool(reinterpret_cast<nedalloc::nedpool*>(mPool));
	}


	void* MemoryPool::allocateBytes(size_t sz)
	{
		return nedalloc::nedpmalloc(reinterpret_cast<nedalloc::nedpool*>(mPool), sz);
	}


	void MemoryPool::deallocBytes(void* ptr)
	{
		nedalloc::nedpfree(reinterpret_cast<nedalloc::nedpool*>(mPool), ptr);
	}


	void MemoryPool::clear()
	{
		nedalloc::neddestroypool(reinterpret_cast<nedalloc::nedpool*>(mPool));
		mPool = nedalloc::nedcreatepool(mCapacity, 8);
	}

}