#include "StdAfx.h"
#include "PageMappingHeap.h"





namespace
{
	template <typename T>
	size_t ntz(T v)
	{
		size_t n = 0;

		v = ~v & (v - 1);
		while (v)
		{
			++ n;
			v >>= 1;
		}

		return n;
	}

	template <typename Func>
	void FindZeroRanges(const uint32* str, size_t strLen, Func& yield)
	{
		size_t carry = 0;
		size_t bitIdx = 0;

		for (size_t wordIdx = 0; wordIdx < strLen; ++ wordIdx)
		{
			size_t wordBitIdx = 0;
			int64 word = str[wordIdx];

			// Set up sign extension to insert bits that are the last bit, inverted.

			if (!(word & 0x80000000))
				reinterpret_cast<uint64&>(word) |= 0xffffffff00000000ULL;

			do 
			{
				size_t wordZeroRunLen = ntz(word);

				wordBitIdx += wordZeroRunLen;
				carry += wordZeroRunLen;

				if (wordBitIdx == 32)
					break;

				yield(bitIdx, carry);
				word >>= wordZeroRunLen;
				bitIdx += carry;
				carry = 0;

				size_t wordOneRunLen = ntz(~word);
				bitIdx += wordOneRunLen;
				wordBitIdx += wordOneRunLen;

				if (wordBitIdx == 32)
					break;

				word >>= wordOneRunLen;
			}
			while (true);
		}

		if (carry)
		{
			yield(bitIdx, carry);
		}
	}

	struct DLMMapFindBest
	{
		DLMMapFindBest(size_t requiredLength)
			: requiredLength(requiredLength)
			, bestPosition(-1)
			, bestFragmentLength(0)
		{
		}

		bool operator () (size_t position, size_t length)
		{
			if (length == requiredLength)
			{
				bestPosition = position;
				bestFragmentLength = 0;
				return false;
			}
			else if (length > requiredLength)
			{
				size_t fragment = length - requiredLength;
				if (fragment > bestFragmentLength)
				{
					bestPosition = position;
					bestFragmentLength = fragment;
				}
			}

			return true;
		}

		size_t requiredLength;
		ptrdiff_t bestPosition;
		size_t bestFragmentLength;
	};
}



























CPageMappingHeap::CPageMappingHeap(UINT_PTR addressSpaceStart, UINT_PTR addressSpaceEnd, const char* sName)
{
	Init(addressSpaceStart, addressSpaceEnd, sName);
}

CPageMappingHeap::CPageMappingHeap(size_t addressSpace, const char* sName)
{
	UINT_PTR start = PlatformReserveSpace(addressSpace);
	Init(start, start + addressSpace, sName);
}

CPageMappingHeap::~CPageMappingHeap()
{
	if (m_addressSpaceStart)
		PlatformReleaseSpace(m_addressSpaceStart, m_addressSpaceEnd);
}

void CPageMappingHeap::Release()
{
	delete this;
}

size_t CPageMappingHeap::GetGranularity() const
{
	return m_pageSize;
}

bool CPageMappingHeap::IsInAddressRange(void* ptr) const
{
	UINT_PTR u = reinterpret_cast<UINT_PTR>(ptr);
	return m_addressSpaceStart <= u && u < m_addressSpaceEnd;
}

void* CPageMappingHeap::Map(size_t length)
{
	CryAutoLock<CryCriticalSectionNonRecursive> lock(m_lock);

	const size_t pageBitmapElemBitSize = (sizeof(uint32) * 8);
	const size_t pageSize = m_pageSize;
	const size_t numPages = (m_addressSpaceEnd - m_addressSpaceStart) / pageSize;

	if (length % pageSize)
	{
		__debugbreak();
		length = (length + (pageSize - 1)) & ~(pageSize - 1);
	}

	UINT_PTR mapAddress = 0;
	DLMMapFindBest findBest(length / pageSize);
	FindZeroRanges(&m_pageBitmap[0], m_pageBitmap.size(), findBest);

	if ((findBest.bestPosition == -1) || (findBest.bestPosition >= (int)numPages))
	{
		return NULL;
	}

	mapAddress = m_addressSpaceStart + pageSize * findBest.bestPosition;

	for (size_t pageIdx = findBest.bestPosition, pageIdxEnd = pageIdx + length / pageSize; pageIdx != pageIdxEnd; ++ pageIdx)
	{
		UINT_PTR base = m_addressSpaceStart + pageIdx * pageSize;

		if (!PlatformMapPage(reinterpret_cast<void*>(base), pageSize))
		{
			// Unwind the pages we've already mapped.
			for (; pageIdx > static_cast<size_t>(findBest.bestPosition); -- pageIdx)
			{
				UINT_PTR unmapBase = m_addressSpaceStart + (pageIdx - 1) * pageSize;
				PlatformUnMapPage(reinterpret_cast<void*>(unmapBase), pageSize);
			}

			return NULL;
		}
	}

	for (size_t pageIdx = findBest.bestPosition, pageIdxEnd = pageIdx + length / pageSize; pageIdx != pageIdxEnd; ++ pageIdx)
	{
		size_t pageSegment = pageIdx / pageBitmapElemBitSize;
		uint32 pageMask = 1U << static_cast<uint32>(pageIdx % pageBitmapElemBitSize);

		m_pageBitmap[pageSegment] |= pageMask;
		
#if CAPTURE_REPLAY_LOG
		UINT_PTR base = m_addressSpaceStart + pageIdx * pageSize;
		CryGetIMemReplay()->MapPage(reinterpret_cast<void*>(base), pageSize);
#endif
	}

	return reinterpret_cast<void*>(mapAddress);
}

void CPageMappingHeap::Unmap(void* mem, size_t length)
{
	CryAutoLock<CryCriticalSectionNonRecursive> lock(m_lock);
	const size_t pageSize = m_pageSize;

	if (length % pageSize)
	{
		__debugbreak();
		length = (length + (pageSize - 1)) & ~(pageSize - 1);
	}

	UINT_PTR mapAddress = reinterpret_cast<UINT_PTR>(mem);
	for (size_t pageIdx = (mapAddress - m_addressSpaceStart) / pageSize, pageIdxEnd = pageIdx + length / pageSize; pageIdx != pageIdxEnd; ++ pageIdx)
	{
		UINT_PTR base = m_addressSpaceStart + pageSize * pageIdx;

		PlatformUnMapPage(reinterpret_cast<void*>(base), pageSize);

#if CAPTURE_REPLAY_LOG
		CryGetIMemReplay()->UnMapPage(reinterpret_cast<void*>(base), pageSize);
#endif

		const size_t pageBitmapElemBitSize = (sizeof(uint32) * 8);

		size_t pageSegment = pageIdx / pageBitmapElemBitSize;
		uint32 pageMask = ~(1U << static_cast<uint32>(pageIdx % pageBitmapElemBitSize));

		m_pageBitmap[pageSegment] &= pageMask;
	}
}

void CPageMappingHeap::Init(UINT_PTR start, UINT_PTR end, const char* sName)
{
	m_addressSpaceStart = start;
	m_addressSpaceEnd = end;

	size_t addressSpace = end - start;
	m_pageSize = PlatformGetPageSize();
	size_t numPages = (addressSpace + m_pageSize - 1) / m_pageSize;
	m_pageBitmap.resize((numPages + 31) / 32);

	size_t pageCapacity = m_pageBitmap.size() * 32;
	size_t numUnavailablePages = pageCapacity - numPages;
	if (numUnavailablePages > 0)
		m_pageBitmap.back() = ~((1 << (32 - numUnavailablePages)) - 1);

	if (m_addressSpaceStart && sName && sName[0])
		CryGetIMemReplay()->RegisterFixedAddressRange(reinterpret_cast<void*>(m_addressSpaceStart), addressSpace, sName);
}

#if defined(XENON) || defined(WIN32) || defined(gringo)

size_t CPageMappingHeap::PlatformGetPageSize()
{



	SYSTEM_INFO si;
	GetSystemInfo(&si);
	return si.dwPageSize;

}

UINT_PTR CPageMappingHeap::PlatformReserveSpace(size_t size)
{
	DWORD flags = MEM_RESERVE;



	LPVOID p = VirtualAlloc(NULL, size, flags, PAGE_READWRITE);
	return reinterpret_cast<UINT_PTR>(p);
}

void CPageMappingHeap::PlatformReleaseSpace(UINT_PTR start, UINT_PTR end)
{
	VirtualFree(reinterpret_cast<void*>(start), 0, MEM_RELEASE);
}

void* CPageMappingHeap::PlatformMapPage(void* base, size_t pageSize)
{
	DWORD flags = MEM_COMMIT;



	return VirtualAlloc(base, pageSize, flags, PAGE_READWRITE);
}

void CPageMappingHeap::PlatformUnMapPage(void* base, size_t pageSize)
{
	// Disable warning about only decommitting pages, and not releasing them
#pragma warning( push )
#pragma warning( disable : 6250 )
	VirtualFree(base, pageSize, MEM_DECOMMIT);
#pragma warning( pop )
}

#endif




















































































































#if defined(LINUX)
size_t CPageMappingHeap::PlatformGetPageSize()
{
  assert(FALSE);
  return 0;
}

UINT_PTR CPageMappingHeap::PlatformReserveSpace(size_t size)
{
  assert(FALSE);
  return NULL;
}

void CPageMappingHeap::PlatformReleaseSpace(UINT_PTR start, UINT_PTR end)
{
  assert(FALSE);
}

void* CPageMappingHeap::PlatformMapPage(void* base, size_t pageSize)
{
  assert(FALSE);
  return NULL;
}

void CPageMappingHeap::PlatformUnMapPage(void* base, size_t pageSize)
{
  assert(FALSE);
}
#endif
