#include "VulkanMemory.h"
#include "VulkanResource.h"
#include "VulkanDevice.h"

namespace Alice
{
	#define UE_VK_MEMORY_MAX_SUB_ALLOCATION (64llu << 20llu) // set to 0 to disable
	enum
	{
		GPU_ONLY_HEAP_PAGE_SIZE = 128 * 1024 * 1024,
		STAGING_HEAP_PAGE_SIZE = 32 * 1024 * 1024,
		ANDROID_MAX_HEAP_PAGE_SIZE = 16 * 1024 * 1024,
		ANDROID_MAX_HEAP_IMAGE_PAGE_SIZE = 16 * 1024 * 1024,
		ANDROID_MAX_HEAP_BUFFER_PAGE_SIZE = 4 * 1024 * 1024,
	};
	bool MetaTypeCanEvict(EVulkanAllocationMetaType MetaType)
	{
		switch(MetaType)
		{
		case EVulkanAllocationMetaImageOther: return true;
		default: return false;
		}
	}
	static VkMemoryPropertyFlags GetMemoryPropertyFlags(EVulkanAllocationFlags AllocFlags, bool bHasUnifiedMemory)
	{
		VkMemoryPropertyFlags MemFlags = 0;

		checkf(!(EnumHasAnyFlags(AllocFlags, EVulkanAllocationFlags::PreferBAR) && !EnumHasAnyFlags(AllocFlags, EVulkanAllocationFlags::HostVisible)), TEXT("PreferBAR should always be used with HostVisible."));

		if (EnumHasAnyFlags(AllocFlags, EVulkanAllocationFlags::HostCached))
		{
			MemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
		}
		else if (EnumHasAnyFlags(AllocFlags, EVulkanAllocationFlags::HostVisible))
		{
			MemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;

			if (EnumHasAnyFlags(AllocFlags, EVulkanAllocationFlags::PreferBAR))
			{
				MemFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
			}
		}
		else
		{
			MemFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;

			if (bHasUnifiedMemory)
			{
				MemFlags |= (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
			}
		}

		if (EnumHasAnyFlags(AllocFlags, EVulkanAllocationFlags::Memoryless))
		{
			MemFlags = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
		}

		return MemFlags;
	}
	FStagingBuffer::FStagingBuffer(VulkanDevice* InDevice)
		: mDevice(InDevice)
		, Buffer(VK_NULL_HANDLE)
		, MemoryReadFlags(VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
		, BufferSize(0)
	{
	}
	VkBuffer FStagingBuffer::GetHandle() const
	{
		return Buffer;
	}

	FStagingBuffer::~FStagingBuffer()
	{
		mDevice->GetMemoryManager().FreeVulkanAllocation(Allocation);
	}


	void* FStagingBuffer::GetMappedPointer()
	{
		return Allocation.GetMappedPointer(mDevice);
	}

	uint32 FStagingBuffer::GetSize() const
	{
		return BufferSize;
	}

	VkDeviceMemory FStagingBuffer::GetDeviceMemoryHandle() const
	{
		return Allocation.GetDeviceMemoryHandle(mDevice);
	}

	void FStagingBuffer::FlushMappedMemory()
	{
		Allocation.FlushMappedMemory(mDevice);
	}

	void FStagingBuffer::InvalidateMappedMemory()
	{
		Allocation.InvalidateMappedMemory(mDevice);
	}


	void FStagingBuffer::Destroy()
	{
		//// Does not need to go in the deferred deletion queue
		vkDestroyBuffer(mDevice->GetDevice(), Buffer, nullptr);
		Buffer = VK_NULL_HANDLE;
		mDevice->GetMemoryManager().FreeVulkanAllocation(Allocation);
	}
	FStagingBuffer* FStagingManager::AcquireBuffer(uint32 Size, VkBufferUsageFlags InUsageFlags, VkMemoryPropertyFlagBits InMemoryReadFlags)
	{
#if VULKAN_ENABLE_AGGRESSIVE_STATS
		SCOPE_CYCLE_COUNTER(STAT_VulkanStagingBuffer);
#endif
		//LLM_SCOPE_VULKAN(ELLMTagVulkan::VulkanStagingBuffers);

		const bool IsHostCached = (InMemoryReadFlags == VK_MEMORY_PROPERTY_HOST_CACHED_BIT);
		if (IsHostCached)
		{
			Size = AlignArbitrary(Size, (uint32)Device->GetLimits().nonCoherentAtomSize);
		}

		// Add both source and dest flags
		if ((InUsageFlags & (VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT)) != 0)
		{
			InUsageFlags |= (VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT);
		}

		// For descriptors buffers
		if (Device->GetOptionalExtensions().HasBufferDeviceAddress)
		{
			InUsageFlags |= VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT;
		}

		//#todo-rco: Better locking!
		{
			/*FScopeLock Lock(&StagingLock);
			for (int32 Index = 0; Index < FreeStagingBuffers.Num(); ++Index)
			{
				FFreeEntry& FreeBuffer = FreeStagingBuffers[Index];
				if (FreeBuffer.StagingBuffer->GetSize() == Size && FreeBuffer.StagingBuffer->MemoryReadFlags == InMemoryReadFlags)
				{
					FStagingBuffer* Buffer = FreeBuffer.StagingBuffer;
					FreeStagingBuffers.RemoveAtSwap(Index, EAllowShrinking::No);
					UsedStagingBuffers.Add(Buffer);
					VULKAN_FILL_TRACK_INFO(Buffer->Track, __FILE__, __LINE__);
					return Buffer;
				}
			}*/
		}

		FStagingBuffer* StagingBuffer = new FStagingBuffer(Device);
		StagingBuffer->MemoryReadFlags = InMemoryReadFlags;
		StagingBuffer->BufferSize = Size;

		VkBufferCreateInfo StagingBufferCreateInfo;
		ZeroVulkanStruct(StagingBufferCreateInfo, VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
		StagingBufferCreateInfo.size = Size;
		StagingBufferCreateInfo.usage = InUsageFlags;
		vkCreateBuffer(Device->GetDevice(), &StagingBufferCreateInfo, nullptr, &StagingBuffer->Buffer);

		// Set minimum alignment to 16 bytes, as some buffers are used with CPU SIMD instructions
		uint32 ForcedMinAlignment = 16u;
		static const bool bIsAmd = false;//(Device->GetDeviceProperties().vendorID == (uint32)EGpuVendorId::Amd);
		if (IsHostCached || bIsAmd)
		{
			ForcedMinAlignment = AlignArbitrary(ForcedMinAlignment, (uint32)Device->GetLimits().nonCoherentAtomSize);
		}

		const EVulkanAllocationFlags AllocFlags = EVulkanAllocationFlags::AutoBind |
			(IsHostCached ? EVulkanAllocationFlags::HostCached : EVulkanAllocationFlags::HostVisible);

		Device->GetMemoryManager().AllocateBufferMemory(StagingBuffer->Allocation, StagingBuffer->Buffer, AllocFlags, TEXT("StagingBuffer"), ForcedMinAlignment);

		{
			//FScopeLock Lock(&StagingLock);
			UsedStagingBuffers.push_back(StagingBuffer);
			UsedMemory += StagingBuffer->GetSize();
			PeakUsedMemory = MathUtils::Max(UsedMemory, PeakUsedMemory);
		}

		//VULKAN_FILL_TRACK_INFO(StagingBuffer->Track, __FILE__, __LINE__);
		return StagingBuffer;
	}
	void FStagingManager::ReleaseBuffer(VulkanCommandBuffer* CmdBuffer, FStagingBuffer*& StagingBuffer)
	{
#if VULKAN_ENABLE_AGGRESSIVE_STATS
		SCOPE_CYCLE_COUNTER(STAT_VulkanStagingBuffer);
#endif

		//FScopeLock Lock(&StagingLock);
		//UsedStagingBuffers.RemoveSingleSwap(StagingBuffer, EAllowShrinking::No);

		if (CmdBuffer)
		{
			//FPendingItemsPerCmdBuffer* ItemsForCmdBuffer = FindOrAdd(CmdBuffer);
			//FPendingItemsPerCmdBuffer::FPendingItems* ItemsForFence = ItemsForCmdBuffer->FindOrAddItemsForFence(CmdBuffer->GetFenceSignaledCounterA());
			//check(StagingBuffer);
			//ItemsForFence->Resources.Add(StagingBuffer);
		}
		else
		{
			//FreeStagingBuffers.Add({StagingBuffer, GFrameNumberRenderThread});
		}
		StagingBuffer = nullptr;
	}
	FDeviceMemoryManager::FDeviceMemoryManager()
	{
		
	}
    void FDeviceMemoryManager::Init(VulkanDevice* InDevice)
    {
        check(Device == nullptr);
        Device = InDevice;
        NumAllocations = 0;
        PeakNumAllocations = 0;

        bHasUnifiedMemory = false;//FVulkanPlatform::HasUnifiedMemory();

        DeviceHandle = Device->GetDevice();
        UpdateMemoryProperties();

        PrimaryHeapIndex = -1;
        uint64 PrimaryHeapSize = 0;
        uint32 NonLocalHeaps = 0;

        for(uint32 i = 0; i < MemoryProperties.memoryHeapCount; ++i)
        {
			
            if (VKHasAllFlags(MemoryProperties.memoryHeaps[i].flags, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT))
            {
                if(MemoryProperties.memoryHeaps[i].size > PrimaryHeapSize)
                {
                    PrimaryHeapIndex = i;
                    PrimaryHeapSize = MemoryProperties.memoryHeaps[i].size;
                }
            }
            else
            {
                NonLocalHeaps++;
            }
        }
        if(0 == NonLocalHeaps)
        {
            PrimaryHeapIndex = -1; // if there are no non-local heaps, disable eviction and defragmentation
        }

        // Update bMemoryless support
        bSupportsMemoryless = false;
        for (uint32 i = 0; i < MemoryProperties.memoryTypeCount && !bSupportsMemoryless; ++i)
        {
            bSupportsMemoryless = VKHasAllFlags(MemoryProperties.memoryTypes[i].propertyFlags, VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT);
        }

        HeapInfos.resize(MemoryProperties.memoryHeapCount);

        //PrintMemInfo();
    }
    void FDeviceMemoryManager::UpdateMemoryProperties()
    {
        if (false)//Device->GetOptionalExtensions().HasMemoryBudget)
        {
            /*VkPhysicalDeviceMemoryProperties2 MemoryProperties2;
            ZeroVulkanStruct(MemoryBudget, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT);
            ZeroVulkanStruct(MemoryProperties2, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2);
            MemoryProperties2.pNext = &MemoryBudget;
            VulkanRHI::vkGetPhysicalDeviceMemoryProperties2(Device->GetPhysicalHandle(), &MemoryProperties2);
            FMemory::Memcpy(MemoryProperties, MemoryProperties2.memoryProperties);

            for (uint32 Heap = 0; Heap < VK_MAX_MEMORY_HEAPS; ++Heap)
            {
                MemoryBudget.heapBudget[Heap] = GVulkanBudgetPercentageScale * MemoryBudget.heapBudget[Heap] / 100;
            }

            VkDeviceSize BudgetX = 0;
            for (uint32 Heap = 6; Heap < VK_MAX_MEMORY_HEAPS; ++Heap)
            {
                BudgetX += MemoryBudget.heapBudget[Heap];
            }
            SET_DWORD_STAT(STAT_VulkanMemoryBudget0, MemoryBudget.heapBudget[0]);
            SET_DWORD_STAT(STAT_VulkanMemoryBudget1, MemoryBudget.heapBudget[1]);
            SET_DWORD_STAT(STAT_VulkanMemoryBudget2, MemoryBudget.heapBudget[2]);
            SET_DWORD_STAT(STAT_VulkanMemoryBudget3, MemoryBudget.heapBudget[3]);
            SET_DWORD_STAT(STAT_VulkanMemoryBudget4, MemoryBudget.heapBudget[4]);
            SET_DWORD_STAT(STAT_VulkanMemoryBudget5, MemoryBudget.heapBudget[5]);
            SET_DWORD_STAT(STAT_VulkanMemoryBudgetX, BudgetX);

            SET_DWORD_STAT(STAT_VulkanMemoryUsage0, MemoryBudget.heapUsage[0]);
            SET_DWORD_STAT(STAT_VulkanMemoryUsage1, MemoryBudget.heapUsage[1]);
            SET_DWORD_STAT(STAT_VulkanMemoryUsage2, MemoryBudget.heapUsage[2]);
            SET_DWORD_STAT(STAT_VulkanMemoryUsage3, MemoryBudget.heapUsage[3]);
            SET_DWORD_STAT(STAT_VulkanMemoryUsage4, MemoryBudget.heapUsage[4]);
            SET_DWORD_STAT(STAT_VulkanMemoryUsage5, MemoryBudget.heapUsage[5]);*/
        }
        else
        {
            vkGetPhysicalDeviceMemoryProperties(Device->GetPhysicalHandle(), &MemoryProperties);
        }
    }
	VkResult FDeviceMemoryManager::GetMemoryTypeFromProperties(uint32 TypeBits, VkMemoryPropertyFlags Properties, uint32* OutTypeIndex)
    {
    	//#todo-rco: Might need to revisit based on https://gitlab.khronos.org/vulkan/vulkan/merge_requests/1165
    	// Search memtypes to find first index with those properties
    	for (uint32 i = 0; i < MemoryProperties.memoryTypeCount && TypeBits; i++)
    	{
    		if ((TypeBits & 1) == 1)
    		{
    			// Type is available, does it match user properties?
    			if ((MemoryProperties.memoryTypes[i].propertyFlags & Properties) == Properties)
    			{
    				*OutTypeIndex = i;
    				return VK_SUCCESS;
    			}
    		}
    		TypeBits >>= 1;
    	}

    	// No memory types matched, return failure
    	return VK_ERROR_FEATURE_NOT_PRESENT;
    }
	const VkPhysicalDeviceMemoryProperties& FDeviceMemoryManager::GetMemoryProperties() const
	{
		return MemoryProperties;
	}
	uint32 FDeviceMemoryManager::GetHeapIndex(uint32 MemoryTypeIndex)
	{
		return MemoryProperties.memoryTypes[MemoryTypeIndex].heapIndex;
	}
	VkResult FDeviceMemoryManager::GetMemoryTypeFromPropertiesExcluding(uint32 TypeBits, VkMemoryPropertyFlags Properties, uint32 ExcludeTypeIndex, uint32* OutTypeIndex)
	{
		// Search memtypes to find first index with those properties
		for (uint32 i = 0; i < MemoryProperties.memoryTypeCount && TypeBits; i++)
		{
			if ((TypeBits & 1) == 1)
			{
				// Type is available, does it match user properties?
				if ((MemoryProperties.memoryTypes[i].propertyFlags & Properties) == Properties && ExcludeTypeIndex != i)
				{
					*OutTypeIndex = i;
					return VK_SUCCESS;
				}
			}
			TypeBits >>= 1;
		}

		// No memory types matched, return failure
		return VK_ERROR_FEATURE_NOT_PRESENT;
	}
	FDeviceMemoryAllocation* FDeviceMemoryManager::Alloc(bool bCanFail, VkDeviceSize AllocationSize, uint32 MemoryTypeBits, VkMemoryPropertyFlags MemoryPropertyFlags, void* DedicatedAllocateInfo, float Priority, bool bExternal, const char* File, uint32 Line)
	{
		uint32 MemoryTypeIndex = ~0;
		VERIFYVULKANRESULT(this->GetMemoryTypeFromProperties(MemoryTypeBits, MemoryPropertyFlags, &MemoryTypeIndex));
		return Alloc(bCanFail, AllocationSize, MemoryTypeIndex, DedicatedAllocateInfo, Priority, bExternal, File, Line);
	}

	FDeviceMemoryAllocation* FDeviceMemoryManager::Alloc(bool bCanFail, VkDeviceSize AllocationSize, uint32 MemoryTypeIndex, void* DedicatedAllocateInfo, float Priority, bool bExternal, const char* File, uint32 Line)
	{
		//SCOPED_NAMED_EVENT(FDeviceMemoryManager_Alloc, FColor::Cyan);
		//FScopeLock Lock(&DeviceMemLock);

		if(false&&!DedicatedAllocateInfo)
		{
			/*FDeviceMemoryBlockKey Key = {MemoryTypeIndex, AllocationSize};

			FDeviceMemoryBlock& Block= Allocations[Key];
			if(Block.Allocations.size() > 0){
				FDeviceMemoryBlock::FFreeBlock Alloc = Block.Allocations.back();
				Block.Allocations.pop_back();

				switch (MemoryTypeIndex)
				{
				case 0:
					INC_DWORD_STAT_BY(STAT_VulkanMemory0, AllocationSize);
					DEC_DWORD_STAT_BY(STAT_VulkanMemory0Reserved, AllocationSize);
					break;
				case 1:
					INC_DWORD_STAT_BY(STAT_VulkanMemory1, AllocationSize);
					DEC_DWORD_STAT_BY(STAT_VulkanMemory1Reserved, AllocationSize);
					break;
				case 2:
					INC_DWORD_STAT_BY(STAT_VulkanMemory2, AllocationSize);
					DEC_DWORD_STAT_BY(STAT_VulkanMemory2Reserved, AllocationSize);
					break;
				case 3:
					INC_DWORD_STAT_BY(STAT_VulkanMemory3, AllocationSize);
					DEC_DWORD_STAT_BY(STAT_VulkanMemory3Reserved, AllocationSize);
					break;
				case 4:
					INC_DWORD_STAT_BY(STAT_VulkanMemory4, AllocationSize);
					DEC_DWORD_STAT_BY(STAT_VulkanMemory4Reserved, AllocationSize);
					break;
				case 5:
					INC_DWORD_STAT_BY(STAT_VulkanMemory5, AllocationSize);
					DEC_DWORD_STAT_BY(STAT_VulkanMemory5Reserved, AllocationSize);
					break;
				default:
					INC_DWORD_STAT_BY(STAT_VulkanMemoryX, AllocationSize);
					DEC_DWORD_STAT_BY(STAT_VulkanMemoryXReserved, AllocationSize);
					break;
				}
				DEC_DWORD_STAT_BY(STAT_VulkanMemoryReserved, AllocationSize);
				return Alloc.Allocation;
			}*/
		}

		check(AllocationSize > 0);
		check(MemoryTypeIndex < MemoryProperties.memoryTypeCount);

		VkMemoryAllocateInfo Info={};
		Info.sType=VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
		Info.allocationSize = AllocationSize;
		Info.memoryTypeIndex = MemoryTypeIndex;


#if VULKAN_SUPPORTS_MEMORY_PRIORITY
		VkMemoryPriorityAllocateInfoEXT Prio;
		if (Device->GetOptionalExtensions().HasMemoryPriority)
		{
			ZeroVulkanStruct(Prio, VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT);
			Prio.priority = Priority;
			Info.pNext = &Prio;
		}
#endif

#if VULKAN_SUPPORTS_DEDICATED_ALLOCATION
		if (DedicatedAllocateInfo)
		{
			((VkMemoryDedicatedAllocateInfoKHR*)DedicatedAllocateInfo)->pNext = Info.pNext;
			Info.pNext = DedicatedAllocateInfo;
			INC_DWORD_STAT_BY(STAT_VulkanDedicatedMemory, AllocationSize);
			IncMetaStats(EVulkanAllocationMetaImageRenderTarget, AllocationSize);
		}
#endif

		VkExportMemoryAllocateInfoKHR VulkanExportMemoryAllocateInfoKHR = {};
#if PLATFORM_WINDOWS
		VkExportMemoryWin32HandleInfoKHR VulkanExportMemoryWin32HandleInfoKHR = {};
#endif // PLATFORM_WINDOWS
		if (bExternal)
		{
			ZeroVulkanStruct(VulkanExportMemoryAllocateInfoKHR, VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR);
#if PLATFORM_WINDOWS
			ZeroVulkanStruct(VulkanExportMemoryWin32HandleInfoKHR, VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR);
			VulkanExportMemoryWin32HandleInfoKHR.pNext = Info.pNext;
			VulkanExportMemoryWin32HandleInfoKHR.pAttributes = NULL;
			VulkanExportMemoryWin32HandleInfoKHR.dwAccess =	GENERIC_ALL;
			VulkanExportMemoryWin32HandleInfoKHR.name = (LPCWSTR)nullptr;
			VulkanExportMemoryAllocateInfoKHR.pNext = IsWindows8OrGreater() ? &VulkanExportMemoryWin32HandleInfoKHR : nullptr;
			VulkanExportMemoryAllocateInfoKHR.handleTypes = IsWindows8OrGreater() ? VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT : VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT;
#else
			VulkanExportMemoryAllocateInfoKHR.pNext = Info.pNext;
			VulkanExportMemoryAllocateInfoKHR.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR;
#endif // PLATFORM_WINDOWS
			Info.pNext = &VulkanExportMemoryAllocateInfoKHR;
		}

		VkMemoryAllocateFlagsInfo MemoryAllocateFlagsInfo;
		if (false)//Device->GetOptionalExtensions().HasBufferDeviceAddress)
		{
			ZeroVulkanStruct(MemoryAllocateFlagsInfo, VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO);
			MemoryAllocateFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
			MemoryAllocateFlagsInfo.pNext = Info.pNext;
			Info.pNext = &MemoryAllocateFlagsInfo;
		}

		VkDeviceMemory Handle;
		VkResult Result;

		{
			//SCOPED_NAMED_EVENT(vkAllocateMemory, FColor::Cyan);
			Result = vkAllocateMemory(DeviceHandle, &Info, nullptr, &Handle);
		}

		if (Result == VK_ERROR_OUT_OF_DEVICE_MEMORY || Result == VK_ERROR_OUT_OF_HOST_MEMORY)
		{
			if (bCanFail)
			{
				//UE_LOG(LogVulkanRHI, Warning, TEXT("Failed to allocate Device Memory, Requested=%.2fKb MemTypeIndex=%d"), (float)Info.allocationSize / 1024.0f, Info.memoryTypeIndex);
				return nullptr;
			}
			const TCHAR* MemoryType = TEXT("?");
			switch (Result)
			{
			case VK_ERROR_OUT_OF_HOST_MEMORY: MemoryType = TEXT("Host"); break;
			case VK_ERROR_OUT_OF_DEVICE_MEMORY: MemoryType = TEXT("Local"); break;
			}
			//DumpRenderTargetPoolMemory(*GLog);
			Device->GetMemoryManager().DumpMemory();
			//GLog->Panic();

			//UE_LOG(LogVulkanRHI, Fatal, TEXT("Out of %s Memory, Requested%.2fKB MemTypeIndex=%d\n"), MemoryType, AllocationSize / 1024.f, MemoryTypeIndex);
		}
		else
		{
			VERIFYVULKANRESULT(Result);
		}

		FDeviceMemoryAllocation* NewAllocation = new FDeviceMemoryAllocation;
		NewAllocation->DeviceHandle = DeviceHandle;
		NewAllocation->Handle = Handle;
		NewAllocation->Size = AllocationSize;
		NewAllocation->MemoryTypeIndex = MemoryTypeIndex;
		NewAllocation->bCanBeMapped = VKHasAllFlags(MemoryProperties.memoryTypes[MemoryTypeIndex].propertyFlags, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
		NewAllocation->bIsCoherent = VKHasAllFlags(MemoryProperties.memoryTypes[MemoryTypeIndex].propertyFlags, VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
		NewAllocation->bIsCached = VKHasAllFlags(MemoryProperties.memoryTypes[MemoryTypeIndex].propertyFlags, VK_MEMORY_PROPERTY_HOST_CACHED_BIT);
#if VULKAN_SUPPORTS_DEDICATED_ALLOCATION
		NewAllocation->bDedicatedMemory = DedicatedAllocateInfo != 0;
#else
		NewAllocation->bDedicatedMemory = 0;
#endif
		//VULKAN_FILL_TRACK_INFO(NewAllocation->Track, File, Line);
		++NumAllocations;
		PeakNumAllocations = MathUtils::Max(NumAllocations, PeakNumAllocations);

		if (NumAllocations == Device->GetLimits().maxMemoryAllocationCount)// && !GVulkanSingleAllocationPerResource)
		{
			//UE_LOG(LogVulkanRHI, Warning, TEXT("Hit Maximum # of allocations (%d) reported by device!"), NumAllocations);
		}

		uint32 HeapIndex = MemoryProperties.memoryTypes[MemoryTypeIndex].heapIndex;
		HeapInfos[HeapIndex].Allocations.push_back(NewAllocation);
		HeapInfos[HeapIndex].UsedSize += AllocationSize;
		HeapInfos[HeapIndex].PeakSize = MathUtils::Max(HeapInfos[HeapIndex].PeakSize, HeapInfos[HeapIndex].UsedSize);

#if VULKAN_USE_LLM
		LLM_PLATFORM_SCOPE_VULKAN(ELLMTagVulkan::VulkanDriverMemoryGPU);
		LLM_IF_ENABLED(FLowLevelMemTracker::Get().OnLowLevelAlloc(ELLMTracker::Platform, (void*)NewAllocation->Handle, AllocationSize, ELLMTag::GraphicsPlatform, ELLMAllocType::System));
		LLM_TRACK_VULKAN_SPARE_MEMORY_GPU((int64)AllocationSize);
#else
		//LLM_IF_ENABLED(FLowLevelMemTracker::Get().OnLowLevelAlloc(ELLMTracker::Platform, (void*)NewAllocation->Handle, AllocationSize, ELLMTag::GraphicsPlatform));
		//LLM_IF_ENABLED(FLowLevelMemTracker::Get().OnLowLevelAlloc(ELLMTracker::Default, (void*)NewAllocation->Handle, AllocationSize, ELLMTag::Untagged));
#endif

		/*INC_DWORD_STAT(STAT_VulkanNumPhysicalMemAllocations);
		switch(MemoryTypeIndex)
		{
		case 0:  INC_DWORD_STAT_BY(STAT_VulkanMemory0, AllocationSize); break;
		case 1:  INC_DWORD_STAT_BY(STAT_VulkanMemory1, AllocationSize); break;
		case 2:  INC_DWORD_STAT_BY(STAT_VulkanMemory2, AllocationSize); break;
		case 3:  INC_DWORD_STAT_BY(STAT_VulkanMemory3, AllocationSize); break;
		case 4:  INC_DWORD_STAT_BY(STAT_VulkanMemory4, AllocationSize); break;
		case 5:  INC_DWORD_STAT_BY(STAT_VulkanMemory5, AllocationSize); break;
		default: INC_DWORD_STAT_BY(STAT_VulkanMemoryX, AllocationSize); break;
		}
		INC_DWORD_STAT_BY(STAT_VulkanMemoryTotal, AllocationSize);*/

		return NewAllocation;
	}

	void FDeviceMemoryManager::Free(FDeviceMemoryAllocation*& Allocation)
	{
		//SCOPED_NAMED_EVENT(FDeviceMemoryManager_Free, FColor::Cyan);
		//FScopeLock Lock(&DeviceMemLock);

		check(Allocation);
		check(Allocation->Handle != VK_NULL_HANDLE);
		check(!Allocation->bFreedBySystem);
		if (Allocation->bDedicatedMemory)
		{
			//DEC_DWORD_STAT_BY(STAT_VulkanDedicatedMemory, Allocation->Size);
			//DecMetaStats(EVulkanAllocationMetaImageRenderTarget, Allocation->Size);
		}
		/*switch (Allocation->MemoryTypeIndex)
		{
		case 0:  DEC_DWORD_STAT_BY(STAT_VulkanMemory0, Allocation->Size); break;
		case 1:  DEC_DWORD_STAT_BY(STAT_VulkanMemory1, Allocation->Size); break;
		case 2:  DEC_DWORD_STAT_BY(STAT_VulkanMemory2, Allocation->Size); break;
		case 3:  DEC_DWORD_STAT_BY(STAT_VulkanMemory3, Allocation->Size); break;
		case 4:  DEC_DWORD_STAT_BY(STAT_VulkanMemory4, Allocation->Size); break;
		case 5:  DEC_DWORD_STAT_BY(STAT_VulkanMemory5, Allocation->Size); break;
		default: DEC_DWORD_STAT_BY(STAT_VulkanMemoryX, Allocation->Size); break;
		}*/
		if(false&&!Allocation->bDedicatedMemory)
		{
			VkDeviceSize AllocationSize = Allocation->Size;
			FDeviceMemoryBlockKey Key = { Allocation->MemoryTypeIndex, AllocationSize };
			//FDeviceMemoryBlock& Block = Allocations[Key];
			//FDeviceMemoryBlock::FFreeBlock FreeBlock = {Allocation, 0};//GFrameNumberRenderThread};
			//Block.Allocations.push_back(FreeBlock);


			/*switch (Allocation->MemoryTypeIndex)
			{
			case 0:	INC_DWORD_STAT_BY(STAT_VulkanMemory0Reserved, AllocationSize); break;
			case 1: INC_DWORD_STAT_BY(STAT_VulkanMemory1Reserved, AllocationSize); break;
			case 2:	INC_DWORD_STAT_BY(STAT_VulkanMemory2Reserved, AllocationSize); break;
			case 3:	INC_DWORD_STAT_BY(STAT_VulkanMemory3Reserved, AllocationSize); break;
			case 4:	INC_DWORD_STAT_BY(STAT_VulkanMemory4Reserved, AllocationSize); break;
			case 5:	INC_DWORD_STAT_BY(STAT_VulkanMemory5Reserved, AllocationSize); break;
			default:
				INC_DWORD_STAT_BY(STAT_VulkanMemoryXReserved, AllocationSize);
				break;
			}
			INC_DWORD_STAT_BY(STAT_VulkanMemoryReserved, AllocationSize);*/
			

			Allocation = nullptr;
			return;
		}

		FreeInternal(Allocation);
		Allocation = nullptr;
	}
	void FDeviceMemoryManager::FreeInternal(FDeviceMemoryAllocation* Allocation)
	{
		//DEC_DWORD_STAT_BY(STAT_VulkanMemoryTotal, Allocation->Size);
		vkFreeMemory(DeviceHandle, Allocation->Handle, nullptr);

#if VULKAN_USE_LLM
		LLM_IF_ENABLED(FLowLevelMemTracker::Get().OnLowLevelFree(ELLMTracker::Platform, (void*)Allocation->Handle, ELLMAllocType::System));
		LLM_TRACK_VULKAN_SPARE_MEMORY_GPU(-(int64)Allocation->Size);
#else
		//LLM_IF_ENABLED(FLowLevelMemTracker::Get().OnLowLevelFree(ELLMTracker::Platform, (void*)Allocation->Handle));
		//LLM_IF_ENABLED(FLowLevelMemTracker::Get().OnLowLevelFree(ELLMTracker::Default, (void*)Allocation->Handle));
#endif

		--NumAllocations;

		//DEC_DWORD_STAT(STAT_VulkanNumPhysicalMemAllocations);

		uint32 HeapIndex = MemoryProperties.memoryTypes[Allocation->MemoryTypeIndex].heapIndex;

		HeapInfos[HeapIndex].UsedSize -= Allocation->Size;
		//HeapInfos[HeapIndex].Allocations.RemoveSwap(Allocation);
		Allocation->bFreedBySystem = true;

		delete Allocation;
		Allocation = nullptr;

	}
	FDeviceMemoryAllocation::~FDeviceMemoryAllocation()
	{
		
	}

	void* FDeviceMemoryAllocation::Map(VkDeviceSize InSize, VkDeviceSize Offset)
	{
		check(bCanBeMapped);
		if(!MappedPointer)
		{
			check(!MappedPointer);
			checkf(InSize == VK_WHOLE_SIZE || InSize + Offset <= Size, TEXT("Failed to Map %llu bytes, Offset %llu, AllocSize %llu bytes"), InSize, Offset, Size);
			vkMapMemory(DeviceHandle, Handle, Offset, InSize, 0, &MappedPointer);
		}
		return MappedPointer;
	}
	FVulkanResourceHeap::FVulkanResourceHeap(FMemoryManager* InOwner, uint32 InMemoryTypeIndex, uint32 InOverridePageSize)
		: Owner(InOwner)
		, MemoryTypeIndex((uint16)InMemoryTypeIndex)
		, HeapIndex((uint16)InOwner->mDevice->GetDeviceMemoryManager().GetHeapIndex(InMemoryTypeIndex))
		, bIsHostCachedSupported(false)
		, bIsLazilyAllocatedSupported(false)
		, OverridePageSize(InOverridePageSize)
		, PeakPageSize(0)
		, UsedMemory(0)
		, PageIDCounter(0)
	{
	}
	bool FVulkanResourceHeap::AllocateResource(FVulkanAllocation& OutAllocation, FVulkanEvictable* AllocationOwner, EType Type, uint32 Size, uint32 Alignment, bool bMapAllocation, bool bForceSeparateAllocation, EVulkanAllocationMetaType MetaType, bool bExternal, const char* File, uint32 Line)
	{
		//SCOPED_NAMED_EVENT(FResourceHeap_AllocateResource, FColor::Cyan);
		//FScopeLock ScopeLock(&PagesLock);

		bForceSeparateAllocation = bForceSeparateAllocation ;//|| GVulkanSingleAllocationPerResource != 0;

		FDeviceMemoryManager& DeviceMemoryManager = Owner->mDevice->GetDeviceMemoryManager();
		FVulkanPageSizeBucket MemoryBucket;
		uint32 BucketId = GetPageSizeBucket(MemoryBucket, Type, Size, bForceSeparateAllocation);

		bool bHasUnifiedMemory = DeviceMemoryManager.HasUnifiedMemory();
		std::vector<FVulkanSubresourceAllocator*>& UsedPages = ActivePages[BucketId];
		EVulkanAllocationType AllocationType = (Type == EType::Image) ? EVulkanAllocationImage : EVulkanAllocationBuffer;
		uint8 AllocationFlags = (!bHasUnifiedMemory && MetaTypeCanEvict(MetaType)) ? VulkanAllocationFlagsCanEvict : 0;
		if(bMapAllocation)
		{
			AllocationFlags |= VulkanAllocationFlagsMapped;
		}
		
		uint32 AllocationSize;

		if (!bForceSeparateAllocation)
		{
			if(Size < MemoryBucket.PageSize) // Last bucket, for dedicated allocations has max size set to 0, preventing reuse
			{
				// Check Used pages to see if we can fit this in
				for (int32 Index = 0; Index < UsedPages.size(); ++Index)
				{
					FVulkanSubresourceAllocator* Page = UsedPages[Index];
					if(Page->GetSubresourceAllocatorFlags() == AllocationFlags)
					{
						check(Page->MemoryAllocation->IsMapped() == bMapAllocation);
						if(Page->TryAllocate2(OutAllocation, AllocationOwner, Size, Alignment, MetaType, File, Line))
						{
							//IncMetaStats(MetaType, OutAllocation.Size);
							return true;
						}
					}
				}
			}
			AllocationSize = MathUtils::Max(Size, MemoryBucket.PageSize); // for allocations above max, which are forced to be seperate allocations
		}
		else
		{
			// We get here when bForceSeparateAllocation is true, which is used for lazy allocations, since pooling those doesn't make sense.
			AllocationSize = Size;
		}

		FDeviceMemoryAllocation* DeviceMemoryAllocation = DeviceMemoryManager.Alloc(true, AllocationSize, MemoryTypeIndex, nullptr, VULKAN_MEMORY_HIGHEST_PRIORITY, bExternal, File, Line);
		if (!DeviceMemoryAllocation && Size != AllocationSize)
		{
			// Retry with a smaller size
			DeviceMemoryAllocation = DeviceMemoryManager.Alloc(true, Size, MemoryTypeIndex, nullptr, VULKAN_MEMORY_HIGHEST_PRIORITY, bExternal, File, Line);
			if(!DeviceMemoryAllocation)
			{
				return false;
			}
		}
		if (!DeviceMemoryAllocation)
		{
			//UE_LOG(LogVulkanRHI, Fatal, TEXT("Out of memory on Vulkan; MemoryTypeIndex=%d, AllocSize=%0.3fMB"), MemoryTypeIndex, (float)AllocationSize / 1048576.0f);
		}
		if (bMapAllocation)
		{
			DeviceMemoryAllocation->Map(AllocationSize, 0);
		}

		uint32 BufferId = 0;
		/*if (UseVulkanDescriptorCache())
		{
			BufferId = ++GVulkanBufferHandleIdCounter;
		}*/
		
		++PageIDCounter;
		FVulkanSubresourceAllocator* Page = new FVulkanSubresourceAllocator(AllocationType, Owner, AllocationFlags, DeviceMemoryAllocation, MemoryTypeIndex, BufferId);
		Owner->RegisterSubresourceAllocator(Page);
		Page->BucketId = BucketId;
		ActivePages[BucketId].push_back(Page);

		UsedMemory += AllocationSize;

		PeakPageSize = MathUtils::Max(PeakPageSize, AllocationSize);

		
		//OutAllocation.bHasOwnership = 1;
		//OutAllocation.SetType(Page->Type);
		//OutAllocation.MetaType = MetaType;
		//OutAllocation.Size = Size;
		//OutAllocation.Offset = 0;
		//check(InAllocatorIndex < (uint32)MAX_uint16);
		//OutAllocation.AllocatorIndex = 0xffff;
		//OutAllocation.AllocationIndex = 0xffffffff;
		//OutAllocation.VulkanHandle = (uint64)Page->Buffer;
		//OutAllocation.HandleId = BufferId;

		bool bOk = Page->TryAllocate2(OutAllocation, AllocationOwner, Size, Alignment, MetaType, File, Line);
		if(bOk)
		{
			//IncMetaStats(MetaType, OutAllocation.Size);
		}
		return bOk;
	}
	uint32 FVulkanResourceHeap::GetPageSizeBucket(FVulkanPageSizeBucket& BucketOut, EType Type, uint32 AllocationSize, bool bForceSingleAllocation)
	{
		if(bForceSingleAllocation)
		{
			uint32 Bucket = PageSizeBuckets.size()-1;
			BucketOut = PageSizeBuckets[Bucket];
			return Bucket;
		}
		uint32 Mask = 0;
		Mask |= (Type == EType::Image) ? FVulkanPageSizeBucket::BUCKET_MASK_IMAGE : 0;
		Mask |= (Type == EType::Buffer) ? FVulkanPageSizeBucket::BUCKET_MASK_BUFFER: 0;
		for(FVulkanPageSizeBucket& B : PageSizeBuckets)
		{
			if(Mask == (B.BucketMask & Mask) && AllocationSize <= B.AllocationMax)
			{
				BucketOut = B;
				return &B - &PageSizeBuckets[0];
			}
		}
		checkNoEntry();
		return 0xffffffff;
	}
    FVulkanAllocation::FVulkanAllocation()
    {
        
    }
    FVulkanAllocation::~FVulkanAllocation()
    {
        
    }
	void FRange::AllocateFromEntry(std::vector<FRange>& Ranges, int32 Index, uint32 SizeToAllocate)
	{
		FRange& Entry = Ranges[Index];
		if (SizeToAllocate < Entry.Size)
		{
			// Modify current free entry in-place.
			Entry.Size -= SizeToAllocate;
			Entry.Offset += SizeToAllocate;
		}
		else
		{
			// Remove this free entry.
			//Ranges.RemoveAt(Index, EAllowShrinking::No);
#if UE_VK_MEMORY_KEEP_FREELIST_SORTED_CATCHBUGS
			SanityCheck(Ranges);
#endif
		}
	}
	FVulkanSubresourceAllocator::FVulkanSubresourceAllocator(EVulkanAllocationType InType, FMemoryManager* InOwner, uint8 InSubResourceAllocatorFlags, FDeviceMemoryAllocation* InDeviceMemoryAllocation, uint32 InMemoryTypeIndex, VkMemoryPropertyFlags InMemoryPropertyFlags, uint32 InAlignment, VkBuffer InBuffer, uint32 InBufferSize, uint32 InBufferId, VkBufferUsageFlags InBufferUsageFlags, int32 InPoolSizeIndex)
		: Type(InType)
		, Owner(InOwner)
		, MemoryTypeIndex(InMemoryTypeIndex)
		, MemoryPropertyFlags(InMemoryPropertyFlags)
		, MemoryAllocation(InDeviceMemoryAllocation)
		, MaxSize(InBufferSize)
		, Alignment(InAlignment)
		, FrameFreed(0)
		, UsedSize(0)
		, BufferUsageFlags(InBufferUsageFlags)
		, Buffer(InBuffer)
		, BufferId(InBufferId)
		, PoolSizeIndex(InPoolSizeIndex)
		, AllocatorIndex(0xffffffff)
		, SubresourceAllocatorFlags(InSubResourceAllocatorFlags)
	{
		memset(MemoryUsed,0,sizeof(MemoryUsed));

		if(InDeviceMemoryAllocation->IsMapped())
		{
			SubresourceAllocatorFlags |= VulkanAllocationFlagsMapped;
		}
		else
		{
			SubresourceAllocatorFlags &= ~VulkanAllocationFlagsMapped;
		}

		FRange FullRange;
		FullRange.Offset = 0;
		FullRange.Size = MaxSize;
		FreeList.push_back(FullRange);
	}
	FVulkanSubresourceAllocator::FVulkanSubresourceAllocator(EVulkanAllocationType InType, FMemoryManager* InOwner, uint8 InSubResourceAllocatorFlags, FDeviceMemoryAllocation* InDeviceMemoryAllocation, uint32 InMemoryTypeIndex, uint32 BufferId)
		: Type(InType)
		, Owner(InOwner)
		, MemoryTypeIndex(InMemoryTypeIndex)
		, MemoryPropertyFlags(0)
		, MemoryAllocation(InDeviceMemoryAllocation)
		, Alignment(0)
		, FrameFreed(0)
		, UsedSize(0)
		, BufferUsageFlags(0)
		, Buffer(VK_NULL_HANDLE)
		, BufferId(BufferId)
		, PoolSizeIndex(0x7fffffff)
		, AllocatorIndex(0xffffffff)
		, SubresourceAllocatorFlags(InSubResourceAllocatorFlags)
	{
		memset(&MemoryUsed,0,sizeof(MemoryUsed));
		MaxSize = InDeviceMemoryAllocation->GetSize();

		if (InDeviceMemoryAllocation->IsMapped())
		{
			SubresourceAllocatorFlags |= VulkanAllocationFlagsMapped;
		}
		else
		{
			SubresourceAllocatorFlags &= ~VulkanAllocationFlagsMapped;
		}

		FRange FullRange;
		FullRange.Offset = 0;
		FullRange.Size = MaxSize;
		FreeList.push_back(FullRange);
	}
	bool FVulkanSubresourceAllocator::TryAllocate2(FVulkanAllocation& OutAllocation, FVulkanEvictable* AllocationOwner, uint32 InSize, uint32 InAlignment, EVulkanAllocationMetaType InMetaType, const char* File, uint32 Line)
	{
		//FScopeLock ScopeLock(&SubresourceAllocatorCS);
		if (bIsEvicting || bLocked)
		{
			return false;
		}
		InAlignment = MathUtils::Max(InAlignment, Alignment);
		for (int32 Index = 0; Index < FreeList.size(); ++Index)
		{
			FRange& Entry = FreeList[Index];
			uint32 AllocatedOffset = Entry.Offset;
			uint32 AlignedOffset = Align(Entry.Offset, InAlignment);
			uint32 AlignmentAdjustment = AlignedOffset - Entry.Offset;
			uint32 AllocatedSize = AlignmentAdjustment + InSize;
			if (AllocatedSize <= Entry.Size)
			{
				FRange::AllocateFromEntry(FreeList, Index, AllocatedSize);

				UsedSize += AllocatedSize;
				int32 ExtraOffset = AllocateInternalData();
				OutAllocation.Init(Type, InMetaType, (uint64)Buffer, InSize, AlignedOffset, GetAllocatorIndex(), ExtraOffset, BufferId);
				MemoryUsed[InMetaType] += AllocatedSize;
				static uint32 UIDCounter = 0;
				UIDCounter++;
				InternalData[ExtraOffset].Init(OutAllocation, AllocationOwner, AllocatedOffset, AllocatedSize, InAlignment);
				//VULKAN_FILL_TRACK_INFO(InternalData[ExtraOffset].Track, File, Line);
				AllocCalls++;
				NumSubAllocations++;

				//LLM_TRACK_VULKAN_HIGH_LEVEL_ALLOC(InternalData[ExtraOffset], OutAllocation.Size);
				//LLM_TRACK_VULKAN_SPARE_MEMORY_GPU(-(int64)OutAllocation.Size);
				bIsDefragging = false;
				return true;
			}
		}
		return false;
	}
	int32 FVulkanSubresourceAllocator::AllocateInternalData()
	{
		int32 FreeListHead = InternalFreeList;
		if(FreeListHead < 0)
		{
			FVulkanAllocationInternal vulkanAllocationInternal;
			memset(&vulkanAllocationInternal,0,sizeof(vulkanAllocationInternal));
			int32 Result = InternalData.size();
			InternalData.push_back(vulkanAllocationInternal);
			InternalData[Result].NextFree = -1;
			return Result;

		}
		else
		{
			InternalFreeList = InternalData[FreeListHead].NextFree;
			InternalData[FreeListHead].NextFree = -1;
			return FreeListHead;
		}
	}
	void FVulkanAllocationInternal::Init(const FVulkanAllocation& Alloc, FVulkanEvictable* InAllocationOwner, uint32 InAllocationOffset, uint32 InAllocationSize, uint32 InAlignment)
	{
		check(State == EUNUSED);
		State = EALLOCATED;
		Type = Alloc.GetType();
		MetaType = Alloc.MetaType;

		Size = Alloc.Size;
		AllocationSize = InAllocationSize;
		AllocationOffset = InAllocationOffset;
		AllocationOwner = InAllocationOwner;
		Alignment = InAlignment;
	}
	void FVulkanAllocation::Init(EVulkanAllocationType InType, EVulkanAllocationMetaType InMetaType, uint64 Handle, uint32 InSize, uint32 InAlignedOffset, uint32 InAllocatorIndex, uint32 InAllocationIndex, uint32 BufferId)
	{
		check(!HasAllocation());
		bHasOwnership = 1;
		SetType(InType);
		MetaType = InMetaType;
		Size = InSize;
		Offset = InAlignedOffset;
		check(InAllocatorIndex < (uint32)MAX_uint16);
		AllocatorIndex = InAllocatorIndex;
		AllocationIndex = InAllocationIndex;
		VulkanHandle = Handle;
		HandleId = BufferId;
		// Make sure all allocations have a valid Id on platforms that use "Descriptor Cache"
		ensure(!UseVulkanDescriptorCache() || HandleId != 0);
	}
	bool FVulkanAllocation::IsValid() const
	{
		return Size != 0;
	}
	VkDeviceMemory FVulkanAllocation::GetDeviceMemoryHandle(VulkanDevice* Device) const
	{
		FVulkanSubresourceAllocator* Allocator = GetSubresourceAllocator(Device);
		return Allocator->GetMemoryAllocation()->GetHandle();
	}
	void* FVulkanAllocation::GetMappedPointer(VulkanDevice* Device)
	{
		FVulkanSubresourceAllocator* Allocator = GetSubresourceAllocator(Device);
		uint8* pMappedPointer = (uint8*)Allocator->GetMappedPointer();
		check(pMappedPointer);
		return Offset + pMappedPointer;
	}

	void FVulkanAllocation::FlushMappedMemory(VulkanDevice* Device)
	{
		FVulkanSubresourceAllocator* Allocator = GetSubresourceAllocator(Device);
		Allocator->Flush(Offset, Size);
	}

	void FVulkanAllocation::InvalidateMappedMemory(VulkanDevice* Device)
	{
		FVulkanSubresourceAllocator* Allocator = GetSubresourceAllocator(Device);
		Allocator->Invalidate(Offset, Size);
	}

	VkBuffer FVulkanAllocation::GetBufferHandle() const
	{
		return (VkBuffer)VulkanHandle;
	}
	uint32 FVulkanAllocation::GetBufferAlignment(VulkanDevice* Device) const
	{
		FVulkanSubresourceAllocator* Allocator = GetSubresourceAllocator(Device);
		return Allocator->GetAlignment();
	}

	void FVulkanAllocation::BindBuffer(VulkanDevice* Device, VkBuffer Buffer)
	{
		VkDeviceMemory MemoryHandle = GetDeviceMemoryHandle(Device);
		VkResult Result = vkBindBufferMemory(Device->GetDevice(), Buffer, MemoryHandle, Offset);
		if (Result == VK_ERROR_OUT_OF_DEVICE_MEMORY || Result == VK_ERROR_OUT_OF_HOST_MEMORY)
		{
			Device->GetMemoryManager().DumpMemory();
		}
		VERIFYVULKANRESULT(Result);
	}
	void FVulkanAllocation::BindImage(VulkanDevice* Device, VkImage Image)
	{
		VkDeviceMemory MemoryHandle = GetDeviceMemoryHandle(Device);
		VkResult Result = vkBindImageMemory(Device->GetDevice(), Image, MemoryHandle, Offset);
		if (Result == VK_ERROR_OUT_OF_DEVICE_MEMORY || Result == VK_ERROR_OUT_OF_HOST_MEMORY)
		{
			Device->GetMemoryManager().DumpMemory();
		}
		VERIFYVULKANRESULT(Result);
	}
	FVulkanSubresourceAllocator* FVulkanAllocation::GetSubresourceAllocator(VulkanDevice* Device) const
	{
		switch(Type)
		{
		case EVulkanAllocationEmpty:
			return 0;
			break;
		case EVulkanAllocationPooledBuffer:
		case EVulkanAllocationBuffer:
		case EVulkanAllocationImage:
		case EVulkanAllocationImageDedicated:
			return Device->GetMemoryManager().GetSubresourceAllocator(AllocatorIndex);
		break;
		default:
			check(0);
		}
		return 0;
	}
	void FVulkanSubresourceAllocator::Flush(VkDeviceSize Offset, VkDeviceSize AllocationSize)
	{
		if (!MemoryAllocation->IsCoherent() )//|| GForceCoherent != 0)
		{
			//const VkDeviceSize NonCoherentAtomSize = Owner->GetParent()->GetLimits().nonCoherentAtomSize;
			//MemoryAllocation->FlushMappedMemory(Offset, AdjustToNonCoherentAtomSize(Offset, AllocationSize, MemoryAllocation->GetSize(), NonCoherentAtomSize));
		}
	}
	void FVulkanSubresourceAllocator::Invalidate(VkDeviceSize Offset, VkDeviceSize AllocationSize)
	{
		if (!MemoryAllocation->IsCoherent())// || GForceCoherent != 0)
		{
			//const VkDeviceSize NonCoherentAtomSize = Owner->GetParent()->GetLimits().nonCoherentAtomSize;
			//MemoryAllocation->InvalidateMappedMemory(Offset, AdjustToNonCoherentAtomSize(Offset, AllocationSize, MemoryAllocation->GetSize(), NonCoherentAtomSize));
		}
	}
	void FVulkanSubresourceAllocator::Free(FVulkanAllocation& Allocation)
	{
		check(Allocation.Type == Type);
		check(Allocation.AllocatorIndex == GetAllocatorIndex());
		bool bTryFree = false;
		{
			//FScopeLock ScopeLock(&SubresourceAllocatorCS);
			FreeCalls++;
			uint32 AllocationOffset;
			uint32 AllocationSize;
			{
				FVulkanAllocationInternal& Data = InternalData[Allocation.AllocationIndex];
				bool bWasDiscarded = Data.State == FVulkanAllocationInternal::EFREEDISCARDED;
				check(Data.State == FVulkanAllocationInternal::EALLOCATED || Data.State == FVulkanAllocationInternal::EFREEPENDING || Data.State == FVulkanAllocationInternal::EFREEDISCARDED);
				AllocationOffset = Data.AllocationOffset;
				AllocationSize = Data.AllocationSize;
				if(!bWasDiscarded)
				{
					MemoryUsed[Allocation.MetaType] -= AllocationSize;
					//LLM_TRACK_VULKAN_HIGH_LEVEL_FREE(Data);
					//LLM_TRACK_VULKAN_SPARE_MEMORY_GPU((int64)Allocation.Size);
					//VULKAN_FREE_TRACK_INFO(Data.Track);
				}
				Data.State = FVulkanAllocationInternal::EFREED;
				//FreeInternalData(Allocation.AllocationIndex);
				Allocation.AllocationIndex = -1;
				if(bWasDiscarded)
				{
					//this occurs if we do full defrag when there are pending frees. in that case the memory is just not moved to the new block.
					return;
				}
			}
			FRange NewFree;
			NewFree.Offset = AllocationOffset;
			NewFree.Size = AllocationSize;
			check(NewFree.Offset <= GetMaxSize());
			check(NewFree.Offset + NewFree.Size <= GetMaxSize());
			//FRange::Add(FreeList, NewFree);
			UsedSize -= AllocationSize;
			NumSubAllocations--;
			check(UsedSize >= 0);
			//if (JoinFreeBlocks())
			{
				//bTryFree = true; //cannot free here as it will cause incorrect lock ordering
			}
		}

		if (bTryFree)
		{
			//Owner->ReleaseSubresourceAllocator(this);
		}
	}
    static uint32 CalculateBufferAlignmentFromVKUsageFlags(VulkanDevice& InDevice, const VkBufferUsageFlags BufferUsageFlags)
    {
        const VkPhysicalDeviceLimits& Limits = InDevice.GetLimits();

        const bool bIsTexelBuffer = VKHasAnyFlags(BufferUsageFlags, (VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT));
        const bool bIsStorageBuffer = VKHasAnyFlags(BufferUsageFlags, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
        const bool bIsVertexOrIndexBuffer = VKHasAnyFlags(BufferUsageFlags, (VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT));
        const bool bIsAccelerationStructureBuffer = VKHasAnyFlags(BufferUsageFlags, VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR);
        const bool bIsUniformBuffer = VKHasAnyFlags(BufferUsageFlags, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);

        // Buffers are sometimes directly cast into classes with 16byte alignment expectations (like FVector3f)
        uint32 Alignment = 16u;

        if (bIsTexelBuffer || bIsStorageBuffer)
        {
            Alignment = MathUtils::Max(Alignment, (uint32)Limits.minTexelBufferOffsetAlignment);
            Alignment = MathUtils::Max(Alignment, (uint32)Limits.minStorageBufferOffsetAlignment);
        }
        else if (bIsVertexOrIndexBuffer)
        {
            // No alignment restrictions on Vertex or Index buffers, leave it at 1
        }
        else if (bIsAccelerationStructureBuffer)
        {
            Alignment = MathUtils::Max(Alignment, 0u);//GRHIRayTracingAccelerationStructureAlignment);
        }
        else if (bIsUniformBuffer)
        {
            Alignment = MathUtils::Max(Alignment, (uint32)Limits.minUniformBufferOffsetAlignment);
        }
        else
        {
            //checkf(false, TEXT("Unknown buffer alignment for VkBufferUsageFlags combination: 0x%x (%s)"), BufferUsageFlags, VK_FLAGS_TO_STRING(VkBufferUsageFlags, BufferUsageFlags));
        }

        return Alignment;
    }
	FMemoryManager::FMemoryManager(VulkanDevice* InDevice)
		:mDevice(InDevice)
		,DeviceMemoryManager(&InDevice->GetDeviceMemoryManager())
	{
		
	}

    void FMemoryManager::Init()
	{
		const uint32 TypeBits = (1 << DeviceMemoryManager->GetNumMemoryTypes()) - 1;

		const VkPhysicalDeviceMemoryProperties& MemoryProperties = DeviceMemoryManager->GetMemoryProperties();

		ResourceTypeHeaps.resize(MemoryProperties.memoryTypeCount);

		// Upload heap. Spec requires this combination to exist.
		{
			uint32 TypeIndex = 0;
			DeviceMemoryManager->GetMemoryTypeFromProperties(TypeBits, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, &TypeIndex);
			uint64 HeapSize = MemoryProperties.memoryHeaps[MemoryProperties.memoryTypes[TypeIndex].heapIndex].size;
			ResourceTypeHeaps[TypeIndex] = new FVulkanResourceHeap(this, TypeIndex, STAGING_HEAP_PAGE_SIZE);

			auto& PageSizeBuckets = ResourceTypeHeaps[TypeIndex]->PageSizeBuckets;
			FVulkanPageSizeBucket Bucket0 = {STAGING_HEAP_PAGE_SIZE, STAGING_HEAP_PAGE_SIZE, FVulkanPageSizeBucket::BUCKET_MASK_IMAGE | FVulkanPageSizeBucket::BUCKET_MASK_BUFFER};
			FVulkanPageSizeBucket Bucket1 = { UINT64_MAX, 0, FVulkanPageSizeBucket::BUCKET_MASK_IMAGE | FVulkanPageSizeBucket::BUCKET_MASK_BUFFER};
			PageSizeBuckets.push_back(Bucket0);
			PageSizeBuckets.push_back(Bucket1);
		}

		// Download heap. Optional type per the spec.
		{
			uint32 TypeIndex = 0;
			{
				uint32 HostVisCachedIndex = 0;
				VkResult HostCachedResult = DeviceMemoryManager->GetMemoryTypeFromProperties(TypeBits, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT, &HostVisCachedIndex);
				uint32 HostVisIndex = 0;
				VkResult HostResult = DeviceMemoryManager->GetMemoryTypeFromProperties(TypeBits, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, &HostVisIndex);
				if (HostCachedResult == VK_SUCCESS)
				{
					TypeIndex = HostVisCachedIndex;
				}
				else if (HostResult == VK_SUCCESS)
				{
					TypeIndex = HostVisIndex;
				}
				else
				{
					// Redundant as it would have asserted above...
					//UE_LOG(LogVulkanRHI, Fatal, TEXT("No Memory Type found supporting VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT!"));
				}
			}
			uint64 HeapSize = MemoryProperties.memoryHeaps[MemoryProperties.memoryTypes[TypeIndex].heapIndex].size;
			ResourceTypeHeaps[TypeIndex] = new FVulkanResourceHeap(this, TypeIndex, STAGING_HEAP_PAGE_SIZE);

			auto& PageSizeBuckets = ResourceTypeHeaps[TypeIndex]->PageSizeBuckets;
			FVulkanPageSizeBucket Bucket0 = { STAGING_HEAP_PAGE_SIZE, STAGING_HEAP_PAGE_SIZE, FVulkanPageSizeBucket::BUCKET_MASK_IMAGE | FVulkanPageSizeBucket::BUCKET_MASK_BUFFER };
			FVulkanPageSizeBucket Bucket1 = { UINT64_MAX, 0, FVulkanPageSizeBucket::BUCKET_MASK_IMAGE | FVulkanPageSizeBucket::BUCKET_MASK_BUFFER };
			PageSizeBuckets.push_back(Bucket0);
			PageSizeBuckets.push_back(Bucket1);
		}


		// Setup main GPU heap
		{
			uint32 Index;
			check(DeviceMemoryManager->GetMemoryTypeFromProperties(TypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, &Index) == VK_SUCCESS);

			for (Index = 0; Index < MemoryProperties.memoryTypeCount; ++Index)
			{
				const int32 HeapIndex = MemoryProperties.memoryTypes[Index].heapIndex;
				const VkDeviceSize HeapSize = MemoryProperties.memoryHeaps[HeapIndex].size;
				if(!ResourceTypeHeaps[Index] )
				{
					ResourceTypeHeaps[Index] = new FVulkanResourceHeap(this, Index);
					ResourceTypeHeaps[Index]->bIsHostCachedSupported = VKHasAllFlags(MemoryProperties.memoryTypes[Index].propertyFlags, VK_MEMORY_PROPERTY_HOST_CACHED_BIT);
					ResourceTypeHeaps[Index]->bIsLazilyAllocatedSupported = VKHasAllFlags(MemoryProperties.memoryTypes[Index].propertyFlags, VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT);
					auto& PageSizeBuckets = ResourceTypeHeaps[Index]->PageSizeBuckets;

#if PLATFORM_ANDROID
					FVulkanPageSizeBucket BucketImage= { UINT64_MAX, (uint32)ANDROID_MAX_HEAP_IMAGE_PAGE_SIZE, FVulkanPageSizeBucket::BUCKET_MASK_IMAGE };
					FVulkanPageSizeBucket BucketBuffer = { UINT64_MAX, (uint32)ANDROID_MAX_HEAP_BUFFER_PAGE_SIZE, FVulkanPageSizeBucket::BUCKET_MASK_BUFFER };
					PageSizeBuckets.Add(BucketImage);
					PageSizeBuckets.Add(BucketBuffer);
#else
					uint32 SmallAllocationThreshold = 2 << 20;
					uint32 LargeAllocationThreshold = UE_VK_MEMORY_MAX_SUB_ALLOCATION;
					VkDeviceSize SmallPageSize = 8llu << 20;
					VkDeviceSize LargePageSize = MathUtils::Min<VkDeviceSize>(HeapSize / 8, GPU_ONLY_HEAP_PAGE_SIZE);

				
					FVulkanPageSizeBucket BucketSmallImage = { SmallAllocationThreshold, (uint32)SmallPageSize, FVulkanPageSizeBucket::BUCKET_MASK_IMAGE };
					FVulkanPageSizeBucket BucketLargeImage = { LargeAllocationThreshold, (uint32)LargePageSize, FVulkanPageSizeBucket::BUCKET_MASK_IMAGE };
					FVulkanPageSizeBucket BucketSmallBuffer = { SmallAllocationThreshold, (uint32)SmallPageSize, FVulkanPageSizeBucket::BUCKET_MASK_BUFFER };
					FVulkanPageSizeBucket BucketLargeBuffer = { LargeAllocationThreshold, (uint32)LargePageSize, FVulkanPageSizeBucket::BUCKET_MASK_BUFFER };
					FVulkanPageSizeBucket BucketRemainder = { UINT64_MAX, 0, FVulkanPageSizeBucket::BUCKET_MASK_BUFFER|FVulkanPageSizeBucket::BUCKET_MASK_IMAGE };
					PageSizeBuckets.push_back(BucketSmallImage);
					PageSizeBuckets.push_back(BucketLargeImage);
					PageSizeBuckets.push_back(BucketSmallBuffer);
					PageSizeBuckets.push_back(BucketLargeBuffer);
					PageSizeBuckets.push_back(BucketRemainder);
#endif
				}
			}
		}
	}
	void FMemoryManager::FreeVulkanAllocationPooledBuffer(FVulkanAllocation& Allocation)
	{
		//SCOPED_NAMED_EVENT(FResourceHeapManager_FreeVulkanAllocationPooledBuffer, FColor::Cyan);
		//DecMetaStats(Allocation.MetaType, Allocation.Size);
		const uint32 Index = Allocation.AllocatorIndex;
		GetSubresourceAllocator(Index)->Free(Allocation);
	}
	void FMemoryManager::FreeVulkanAllocationBuffer(FVulkanAllocation& Allocation)
	{
		//SCOPED_NAMED_EVENT(FResourceHeapManager_FreeVulkanAllocationBuffer, FColor::Cyan);
		//DecMetaStats(Allocation.MetaType, Allocation.Size);
		const uint32 Index = Allocation.AllocatorIndex;
		GetSubresourceAllocator(Index)->Free(Allocation);
	}

	void FMemoryManager::FreeVulkanAllocationImage(FVulkanAllocation& Allocation)
	{
		//SCOPED_NAMED_EVENT(FResourceHeapManager_FreeVulkanAllocationImage, FColor::Cyan);
		//DecMetaStats(Allocation.MetaType, Allocation.Size);
		const uint32 Index = Allocation.AllocatorIndex;
		GetSubresourceAllocator(Index)->Free(Allocation);
	}
	void FMemoryManager::FreeVulkanAllocationImageDedicated(FVulkanAllocation& Allocation)
	{
		//SCOPED_NAMED_EVENT(FResourceHeapManager_FreeVulkanAllocationImageDedicated, FColor::Cyan);
		//DecMetaStats(Allocation.MetaType, Allocation.Size);
		const uint32 Index = Allocation.AllocatorIndex;
		GetSubresourceAllocator(Index)->Free(Allocation);
	}
	bool FMemoryManager::AllocateBufferMemory(FVulkanAllocation& OutAllocation, FVulkanEvictable* AllocationOwner, const VkMemoryRequirements& MemoryReqs, VkMemoryPropertyFlags MemoryPropertyFlags, EVulkanAllocationMetaType MetaType, bool bExternal, bool bForceSeparateAllocation, const char* File, uint32 Line)
	{
		//SCOPED_NAMED_EVENT(FResourceHeapManager_AllocateBufferMemory, FColor::Cyan);
		uint32 TypeIndex = 0;
		VkResult Result = DeviceMemoryManager->GetMemoryTypeFromProperties(MemoryReqs.memoryTypeBits, MemoryPropertyFlags, &TypeIndex);
		bool bMapped = VKHasAllFlags(MemoryPropertyFlags, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
		if ((Result != VK_SUCCESS) || !ResourceTypeHeaps[TypeIndex])
		{
			if (VKHasAllFlags(MemoryPropertyFlags, VK_MEMORY_PROPERTY_HOST_CACHED_BIT))
			{
				// Try non-cached flag
				MemoryPropertyFlags = MemoryPropertyFlags & ~VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
			}

			if (VKHasAllFlags(MemoryPropertyFlags, VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT))
			{
				// Try non-lazy flag
				MemoryPropertyFlags = MemoryPropertyFlags & ~VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
			}

			if (VKHasAllFlags(MemoryPropertyFlags, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT))
			{
				// Try regular host memory if local+host is not available
				MemoryPropertyFlags = MemoryPropertyFlags & ~VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
			}

			// Try another heap type
			uint32 OriginalTypeIndex = TypeIndex;
			if (DeviceMemoryManager->GetMemoryTypeFromPropertiesExcluding(MemoryReqs.memoryTypeBits, MemoryPropertyFlags, (Result == VK_SUCCESS) ? TypeIndex : (uint32)-1, &TypeIndex) != VK_SUCCESS)
			{
				//UE_LOG(LogVulkanRHI, Fatal, TEXT("Unable to find alternate type for index %d, MemSize %d, MemPropTypeBits %u, MemPropertyFlags %u, %s(%d)"),
				//	OriginalTypeIndex, (uint32)MemoryReqs.size, (uint32)MemoryReqs.memoryTypeBits, (uint32)MemoryPropertyFlags, ANSI_TO_TCHAR(File), Line);
			}
			if (!ResourceTypeHeaps[TypeIndex])
			{
				DumpMemory();
				//UE_LOG(LogVulkanRHI, Fatal, TEXT("Missing memory type index %d (originally requested %d), MemSize %d, MemPropTypeBits %u, MemPropertyFlags %u, %s(%d)"), TypeIndex, OriginalTypeIndex, (uint32)MemoryReqs.size, (uint32)MemoryReqs.memoryTypeBits, (uint32)MemoryPropertyFlags, ANSI_TO_TCHAR(File), Line);
			}
		}

		check(MemoryReqs.size <= (uint64)MAX_uint32);

		if (!ResourceTypeHeaps[TypeIndex]->AllocateResource(OutAllocation, AllocationOwner, EType::Buffer, MemoryReqs.size, MemoryReqs.alignment, bMapped, bForceSeparateAllocation, MetaType, bExternal, File, Line))
		{
			// Try another memory type if the allocation failed
			VERIFYVULKANRESULT(DeviceMemoryManager->GetMemoryTypeFromPropertiesExcluding(MemoryReqs.memoryTypeBits, MemoryPropertyFlags, TypeIndex, &TypeIndex));
			bMapped = (MemoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
			if (!ResourceTypeHeaps[TypeIndex])
			{
				//UE_LOG(LogVulkanRHI, Fatal, TEXT("Missing memory type index %d, MemSize %d, MemPropTypeBits %u, MemPropertyFlags %u, %s(%d)"), TypeIndex, (uint32)MemoryReqs.size, (uint32)MemoryReqs.memoryTypeBits, (uint32)MemoryPropertyFlags, ANSI_TO_TCHAR(File), Line);
			}
			if (!ResourceTypeHeaps[TypeIndex]->AllocateResource(OutAllocation, AllocationOwner, EType::Buffer, MemoryReqs.size, MemoryReqs.alignment, bMapped, bForceSeparateAllocation, MetaType, bExternal, File, Line))
			{
				DumpMemory();
				//UE_LOG(LogVulkanRHI, Fatal, TEXT("Out Of Memory, trying to allocate %d bytes\n"), MemoryReqs.size);
				return false;
			}
		}
		return true;
	}
	bool FMemoryManager::AllocateBufferMemory(FVulkanAllocation& OutAllocation, VkBuffer InBuffer, EVulkanAllocationFlags InAllocFlags, const TCHAR* InDebugName, uint32 InForceMinAlignment)
	{
		//SCOPED_NAMED_EVENT(FMemoryManager_AllocateBufferMemory, FColor::Cyan);

		VkBufferMemoryRequirementsInfo2 BufferMemoryRequirementsInfo;
		ZeroVulkanStruct(BufferMemoryRequirementsInfo, VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2);
		BufferMemoryRequirementsInfo.buffer = InBuffer;

		VkMemoryDedicatedRequirements DedicatedRequirements;
		ZeroVulkanStruct(DedicatedRequirements, VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS);

		VkMemoryRequirements2 MemoryRequirements;
		ZeroVulkanStruct(MemoryRequirements, VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2);
		MemoryRequirements.pNext = &DedicatedRequirements;

		vkGetBufferMemoryRequirements2(mDevice->GetDevice(), &BufferMemoryRequirementsInfo, &MemoryRequirements);

		// Allow caller to force his own alignment requirements
		MemoryRequirements.memoryRequirements.alignment = MathUtils::Max(MemoryRequirements.memoryRequirements.alignment, (VkDeviceSize)InForceMinAlignment);

		if (DedicatedRequirements.requiresDedicatedAllocation || DedicatedRequirements.prefersDedicatedAllocation)
		{
			InAllocFlags |= EVulkanAllocationFlags::Dedicated;
		}

		// For now, translate all the flags into a call to the legacy AllocateBufferMemory() function
		const VkMemoryPropertyFlags MemoryPropertyFlags = GetMemoryPropertyFlags(InAllocFlags, DeviceMemoryManager->HasUnifiedMemory());
		const bool bExternal = EnumHasAllFlags(InAllocFlags, EVulkanAllocationFlags::External);
		const bool bForceSeparateAllocation = EnumHasAllFlags(InAllocFlags, EVulkanAllocationFlags::Dedicated);
		AllocateBufferMemory(OutAllocation, nullptr, MemoryRequirements.memoryRequirements, MemoryPropertyFlags, EVulkanAllocationMetaBufferOther, bExternal, bForceSeparateAllocation, __FILE__, __LINE__);

		if (OutAllocation.IsValid())
		{
			if (EnumHasAllFlags(InAllocFlags, EVulkanAllocationFlags::AutoBind))
			{
				VkBindBufferMemoryInfo BindBufferMemoryInfo;
				ZeroVulkanStruct(BindBufferMemoryInfo, VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO);
				BindBufferMemoryInfo.buffer = InBuffer;
				BindBufferMemoryInfo.memory = OutAllocation.GetDeviceMemoryHandle(mDevice);
				BindBufferMemoryInfo.memoryOffset = OutAllocation.Offset;

				vkBindBufferMemory2(mDevice->GetDevice(), 1, &BindBufferMemoryInfo);
			}

			if (InDebugName)
			{
				//VULKAN_SET_DEBUG_NAME((*Device), VK_OBJECT_TYPE_BUFFER, InBuffer, TEXT("%s"), InDebugName);
			}
		}
		else
		{
			if (!EnumHasAllFlags(InAllocFlags, EVulkanAllocationFlags::NoError))
			{
				const bool IsHostMemory = EnumHasAnyFlags(InAllocFlags, EVulkanAllocationFlags::HostVisible | EVulkanAllocationFlags::HostCached);
				HandleOOM(false, IsHostMemory ? VK_ERROR_OUT_OF_HOST_MEMORY : VK_ERROR_OUT_OF_DEVICE_MEMORY, MemoryRequirements.memoryRequirements.size);
			}
		}

		return OutAllocation.IsValid();
	}
    uint32 FMemoryManager::CalculateBufferAlignment(VulkanDevice& InDevice, EBufferUsageFlags InUEUsage, bool bZeroSize)
    {
        const VkBufferUsageFlags VulkanBufferUsage = VulkanResourceMultiBuffer::UEToVKBufferUsageFlags(&InDevice, InUEUsage, bZeroSize);

        uint32 Alignment = CalculateBufferAlignmentFromVKUsageFlags(InDevice, VulkanBufferUsage);

        if (EnumHasAllFlags(InUEUsage, EBufferUsageFlags::RayTracingScratch))
        {
            //Alignment = GRHIRayTracingScratchBufferAlignment;
        }

        return Alignment;
    }

    float FMemoryManager::CalculateBufferPriority(const VkBufferUsageFlags BufferUsageFlags)
    {
        float Priority = VULKAN_MEMORY_MEDIUM_PRIORITY;

        if (VKHasAnyFlags(BufferUsageFlags, (VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT)))
        {
            Priority = VULKAN_MEMORY_HIGHEST_PRIORITY;
        }
        else if (VKHasAnyFlags(BufferUsageFlags, (VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)))
        {
            Priority = VULKAN_MEMORY_MEDIUM_PRIORITY;
        }
        else if (VKHasAnyFlags(BufferUsageFlags, 
            (VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR |
            VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR |
            VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR)))
        {
            Priority = VULKAN_MEMORY_MEDIUM_PRIORITY;
        }
        else if (VKHasAnyFlags(BufferUsageFlags, (VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT)))
        {
            Priority = VULKAN_MEMORY_LOW_PRIORITY;
        }
        else if (VKHasAnyFlags(BufferUsageFlags, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT))
        {
            Priority = VULKAN_MEMORY_HIGHER_PRIORITY;
        }
        else
        {
            //checkf(false, TEXT("Unknown priority for VkBufferUsageFlags combination: 0x%x (%s)"), BufferUsageFlags, VK_FLAGS_TO_STRING(VkBufferUsageFlags, BufferUsageFlags));
        }

        return Priority;
    }
	bool FMemoryManager::AllocateBufferPooled(FVulkanAllocation& OutAllocation, FVulkanEvictable* AllocationOwner, uint32 Size, uint32 MinAlignment, VkBufferUsageFlags BufferUsageFlags, VkMemoryPropertyFlags MemoryPropertyFlags, EVulkanAllocationMetaType MetaType, const char* File, uint32 Line)
	{
		//SCOPED_NAMED_EVENT(FResourceHeapManager_AllocateBufferPooled, FColor::Cyan);
		check(OutAllocation.Type == EVulkanAllocationEmpty);

		uint32 Alignment = MathUtils::Max(MinAlignment, CalculateBufferAlignmentFromVKUsageFlags(*mDevice, BufferUsageFlags));
		const float Priority = CalculateBufferPriority(BufferUsageFlags);

		const int32 PoolSize = (int32)GetPoolTypeForAlloc(Size, Alignment);
		if (PoolSize != (int32)EPoolSizes::SizesCount)
		{
			Size = PoolSizes[PoolSize];
		}

		//FScopeLock ScopeLock(&UsedFreeBufferAllocationsLock);

		for (uint32 Index = 0; Index < UsedBufferAllocations[PoolSize].size(); ++Index)
		{
			FVulkanSubresourceAllocator* SubresourceAllocator = UsedBufferAllocations[PoolSize][Index];
			if ((SubresourceAllocator->BufferUsageFlags & BufferUsageFlags) == BufferUsageFlags &&
				(SubresourceAllocator->MemoryPropertyFlags & MemoryPropertyFlags) == MemoryPropertyFlags)
			{

				if(SubresourceAllocator->TryAllocate2(OutAllocation, AllocationOwner, Size, Alignment, MetaType, File, Line))
				{
					//IncMetaStats(MetaType, OutAllocation.Size);
					return true;
				}
			}
		}

		for (uint32 Index = 0; Index < FreeBufferAllocations[PoolSize].size(); ++Index)
		{
			FVulkanSubresourceAllocator* SubresourceAllocator = FreeBufferAllocations[PoolSize][Index];
			if ((SubresourceAllocator->BufferUsageFlags & BufferUsageFlags) == BufferUsageFlags &&
				(SubresourceAllocator->MemoryPropertyFlags & MemoryPropertyFlags) == MemoryPropertyFlags)
			{
				if(SubresourceAllocator->TryAllocate2(OutAllocation, AllocationOwner, Size, Alignment, MetaType, File, Line))
				{
					//IncMetaStats(MetaType, OutAllocation.Size);
					FreeBufferAllocations[PoolSize][Index]=nullptr;//.RemoveAtSwap(Index, EAllowShrinking::No);
					UsedBufferAllocations[PoolSize].push_back(SubresourceAllocator);
					return true;
				}
			}
		}

		// New Buffer
		const uint32 BufferSize = MathUtils::Max(Size, BufferSizes[PoolSize]);

		VkBuffer Buffer;
		VkBufferCreateInfo BufferCreateInfo={};
		BufferCreateInfo.sType=VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
		BufferCreateInfo.size = BufferSize;
		BufferCreateInfo.usage = BufferUsageFlags;
		// For descriptors buffers
		if (false)//mDevice->GetOptionalExtensions().HasBufferDeviceAddress)
		{
			BufferCreateInfo.usage |= VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT;
		}
		vkCreateBuffer(mDevice->GetDevice(), &BufferCreateInfo, nullptr, &Buffer);

		VkMemoryRequirements MemReqs;
		vkGetBufferMemoryRequirements(mDevice->GetDevice(), Buffer, &MemReqs);
		Alignment = MathUtils::Max((uint32)MemReqs.alignment, Alignment);
		ensure(MemReqs.size >= BufferSize);

		uint32 MemoryTypeIndex;	
		mDevice->GetDeviceMemoryManager().GetMemoryTypeFromProperties(MemReqs.memoryTypeBits, MemoryPropertyFlags, &MemoryTypeIndex);

		bool bHasUnifiedMemory = DeviceMemoryManager->HasUnifiedMemory();
		FDeviceMemoryAllocation* DeviceMemoryAllocation = DeviceMemoryManager->Alloc(true, MemReqs.size, MemoryTypeIndex, nullptr, Priority, false, File, Line);
		if(!DeviceMemoryAllocation)
		{
			MemoryPropertyFlags &= (~VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
			const uint32 ForbiddenMemoryTypeIndex = MemoryTypeIndex;
			if (VK_SUCCESS == mDevice->GetDeviceMemoryManager().GetMemoryTypeFromPropertiesExcluding(MemReqs.memoryTypeBits, MemoryPropertyFlags, ForbiddenMemoryTypeIndex, &MemoryTypeIndex))
			{
				DeviceMemoryAllocation = DeviceMemoryManager->Alloc(false, MemReqs.size, MemoryTypeIndex, nullptr, Priority, false, File, Line);
			}
		}
		if(!DeviceMemoryAllocation)
		{
			HandleOOM(false);
			checkNoEntry();
		}
		vkBindBufferMemory(mDevice->GetDevice(), Buffer, DeviceMemoryAllocation->GetHandle(), 0);
		uint8 AllocationFlags = 0;
		if(!bHasUnifiedMemory && MetaTypeCanEvict(MetaType))
		{
			AllocationFlags |= VulkanAllocationFlagsCanEvict;
		}
		if (DeviceMemoryAllocation->CanBeMapped())
		{
			DeviceMemoryAllocation->Map(BufferSize, 0);
		}

		uint32 BufferId = 0;
		//if (UseVulkanDescriptorCache())
		//{
		//	BufferId = ++GVulkanBufferHandleIdCounter;
		//}
		FVulkanSubresourceAllocator* SubresourceAllocator = new FVulkanSubresourceAllocator(EVulkanAllocationPooledBuffer, this, AllocationFlags, DeviceMemoryAllocation, MemoryTypeIndex,
			MemoryPropertyFlags, MemReqs.alignment, Buffer, BufferSize, BufferId, BufferUsageFlags, PoolSize);

		RegisterSubresourceAllocator(SubresourceAllocator);
		UsedBufferAllocations[PoolSize].push_back(SubresourceAllocator);
		
		if(SubresourceAllocator->TryAllocate2(OutAllocation, AllocationOwner, Size, Alignment, MetaType, File, Line))
		{
			//IncMetaStats(MetaType, OutAllocation.Size);
			return true;
		}
		HandleOOM(false);
		checkNoEntry();
		return false;
	}
	void FMemoryManager::RegisterSubresourceAllocator(FVulkanSubresourceAllocator* SubresourceAllocator)
	{
		check(SubresourceAllocator->AllocatorIndex == 0xffffffff);
		//FRWScopeLock ScopedLock(AllBufferAllocationsLock, SLT_Write);
		if (AllBufferAllocationsFreeListHead != (PTRINT)-1)
		{
			const uint32 Index = AllBufferAllocationsFreeListHead;
			AllBufferAllocationsFreeListHead = (PTRINT)AllBufferAllocations[Index];
			SubresourceAllocator->AllocatorIndex = Index;
			AllBufferAllocations[Index] = SubresourceAllocator;
		}
		else
		{
			SubresourceAllocator->AllocatorIndex = AllBufferAllocations.size();
			AllBufferAllocations.push_back(SubresourceAllocator);
		}
	}
	void FMemoryManager::DumpMemory(bool bFullDump)
	{
		
	}
	void FMemoryManager::HandleOOM(bool bCanResume, VkResult Result, uint64 AllocationSize, uint32 MemoryTypeIndex)
	{
	}
	void FMemoryManager::FreeVulkanAllocation(FVulkanAllocation& Allocation, EVulkanFreeFlags FreeFlags)
	{
		//by default, all allocations are implicitly deferred, unless manually handled.
		if(FreeFlags & EVulkanFreeFlag_DontDefer)
		{
			switch (Allocation.Type)
			{
			case EVulkanAllocationEmpty:
				break;
			case EVulkanAllocationPooledBuffer:
				FreeVulkanAllocationPooledBuffer(Allocation);
				break;
			case EVulkanAllocationBuffer:
				FreeVulkanAllocationBuffer(Allocation);
				break;
			case EVulkanAllocationImage:
				FreeVulkanAllocationImage(Allocation);
				break;
			case EVulkanAllocationImageDedicated:
				FreeVulkanAllocationImageDedicated(Allocation);
				break;
			}
			memset(&Allocation,0, sizeof(Allocation));
			Allocation.Type = EVulkanAllocationEmpty;
		}
		else
		{
			//GetSubresourceAllocator(Allocation.AllocatorIndex)->SetFreePending(Allocation);
			//mDevice->GetDeferredDeletionQueue().EnqueueResourceAllocation(Allocation);
		}
		check(!Allocation.HasAllocation());
	}
}