#pragma once
#include "Runtime/BattleFirePrefix.h"
#include "ThreadSafeCounter.h"
#define checkLockFreePointerList
FORCEINLINE void TestCriticalStall()
{
}
void LockFreeTagCounterHasOverflowed();
void LockFreeLinksExhausted(BattleFireUInt32 TotalNum);
void* LockFreeAllocLinks(SIZE_T AllocSize);
void LockFreeFreeLinks(SIZE_T AllocSize, void* Ptr);

#define MAX_LOCK_FREE_LINKS_AS_BITS (26)
#define MAX_LOCK_FREE_LINKS (1 << 26)

template<class T, unsigned int MaxTotalItems, unsigned int ItemsPerPage>
class TLockFreeAllocOnceIndexedAllocator
{
	enum
	{
		MaxBlocks = (MaxTotalItems + ItemsPerPage - 1) / ItemsPerPage
	};
public:

	TLockFreeAllocOnceIndexedAllocator()
	{
		NextIndex.Increment(); // skip the null ptr
		for (BattleFireUInt32 Index = 0; Index < MaxBlocks; Index++)
		{
			Pages[Index] = nullptr;
		}
	}

	FORCEINLINE BattleFireUInt32 Alloc(BattleFireUInt32 Count = 1)
	{
		BattleFireUInt32 FirstItem = NextIndex.Add(Count);
		if (FirstItem + Count > MaxTotalItems)
		{
			LockFreeLinksExhausted(MaxTotalItems);
		}
		for (BattleFireUInt32 CurrentItem = FirstItem; CurrentItem < FirstItem + Count; CurrentItem++)
		{
			new (GetRawItem(CurrentItem)) T();
		}
		return FirstItem;
	}
	FORCEINLINE T* GetItem(BattleFireUInt32 Index)
	{
		if (!Index)
		{
			return nullptr;
		}
		BattleFireUInt32 BlockIndex = Index / ItemsPerPage;
		BattleFireUInt32 SubIndex = Index % ItemsPerPage;
		checkLockFreePointerList(Index < (BattleFireUInt32)NextIndex.GetValue() && Index < MaxTotalItems&& BlockIndex < MaxBlocks&& Pages[BlockIndex]);
		return Pages[BlockIndex] + SubIndex;
	}

private:
	void* GetRawItem(BattleFireUInt32 Index)
	{
		BattleFireUInt32 BlockIndex = Index / ItemsPerPage;
		BattleFireUInt32 SubIndex = Index % ItemsPerPage;
		checkLockFreePointerList(Index && Index < (BattleFireUInt32)NextIndex.GetValue() && Index < MaxTotalItems&& BlockIndex < MaxBlocks);
		if (!Pages[BlockIndex])
		{
			T* NewBlock = (T*)LockFreeAllocLinks(ItemsPerPage * sizeof(T));
			T* ExpectedNullPtr=nullptr;
			if (!Pages[BlockIndex].compare_exchange_strong(ExpectedNullPtr,NewBlock))
			{
				// we lost discard block
				checkLockFreePointerList(Pages[BlockIndex] && Pages[BlockIndex] != NewBlock);
				LockFreeFreeLinks(ItemsPerPage * sizeof(T), NewBlock);
			}
			else
			{
				checkLockFreePointerList(Pages[BlockIndex]);
			}
		}
		return (void*)(Pages[BlockIndex] + SubIndex);
	}
	FThreadSafeCounter NextIndex;
	std::atomic<T*> Pages[MaxBlocks];
};


#define MAX_TagBitsValue (BattleFireUInt64(1) << (64 - MAX_LOCK_FREE_LINKS_AS_BITS))

MS_ALIGN(8)
struct LockFreeLinkNode//CounterAndState(Other 40 Bits) Ptr(MAX_LOCK_FREE_LINKS_AS_BITS 26 Bits)
{
	static FORCEINLINE BattleFireUInt64 GetCounterAndState(BattleFireUInt64 node)
	{
		return (node >> MAX_LOCK_FREE_LINKS_AS_BITS);
	}
	static FORCEINLINE BattleFireUInt32 GetLinkPtr(BattleFireUInt64 node)
	{
		return BattleFireUInt32(node & (MAX_LOCK_FREE_LINKS - 1));
	}
	static FORCEINLINE BattleFireUInt64 MakeNode(BattleFireUInt32 LinkPtr, BattleFireUInt64 CounterAndState) {
		return (BattleFireUInt64(LinkPtr) | (CounterAndState << MAX_LOCK_FREE_LINKS_AS_BITS));
	}
	template<BattleFireUInt64 ABAInc>
	static FORCEINLINE BattleFireUInt64 GetState(BattleFireUInt64 node)
	{
		return (node >> MAX_LOCK_FREE_LINKS_AS_BITS) & (ABAInc - 1);
	}
	
	template<BattleFireUInt64 TABAInc>
	static FORCEINLINE BattleFireUInt64 SetState(BattleFireUInt64 node,BattleFireUInt64 Value)
	{
		const BattleFireUInt32 Ptr=GetLinkPtr(node);
		const BattleFireUInt64 CounterAndState=(GetCounterAndState(node) & ~(TABAInc - 1)) | Value;
		return MakeNode(Ptr,CounterAndState);
	}
	template<BattleFireUInt64 ABAInc>
	static FORCEINLINE BattleFireUInt64 AdvanceCounterAndState(BattleFireUInt64 LinkNode) {
		const BattleFireUInt64 LinkNodeCounterAndState = GetCounterAndState(LinkNode);
		const BattleFireUInt64 AdvancedLinkNodeCounterAndState = LinkNodeCounterAndState + ABAInc;
		const BattleFireUInt64 AdvancedLinkNode = MakeNode(GetLinkPtr(LinkNode),AdvancedLinkNodeCounterAndState);
		if(AdvancedLinkNodeCounterAndState<LinkNodeCounterAndState)
		{
			LockFreeTagCounterHasOverflowed();
		}
		return AdvancedLinkNode;
	}
	template<BattleFireUInt64 ABAInc>
	static FORCEINLINE BattleFireUInt64 MakeNodeWithLinkPtrAndAdvanceOldCounterAndState(BattleFireUInt64 OldLinkNode,BattleFireUInt32 LinkPtr) {
		const BattleFireUInt64 LinkNodeCounterAndState = GetCounterAndState(OldLinkNode);
		const BattleFireUInt64 AdvancedLinkNodeCounterAndState = LinkNodeCounterAndState + ABAInc;
		const BattleFireUInt64 AdvancedLinkNode = MakeNode(LinkPtr,AdvancedLinkNodeCounterAndState);
		if(AdvancedLinkNodeCounterAndState<LinkNodeCounterAndState)
		{
			LockFreeTagCounterHasOverflowed();
		}
		return AdvancedLinkNode;
	}
	void Init()
	{
		mAtomicProxy=0;
	}
	std::atomic<BattleFireUInt64> mAtomicProxy;
} GCC_ALIGN(8);

struct FIndexedLockFreeLink
{
	LockFreeLinkNode DoubleNext;
	void* Payload;
	BattleFireUInt32 SingleNext;
};

// there is a version of this code that uses 128 bit atomics to avoid the indirection, that is why we have this policy class at all.
struct FLockFreeLinkPolicy
{
	enum
	{
		MAX_BITS_IN_TLinkPtr = MAX_LOCK_FREE_LINKS_AS_BITS
	};
	typedef LockFreeLinkNode TDoublePtr;
	typedef FIndexedLockFreeLink TLink;
	typedef BattleFireUInt32 TLinkPtr;
	typedef TLockFreeAllocOnceIndexedAllocator<FIndexedLockFreeLink, MAX_LOCK_FREE_LINKS, 16384> TAllocator;

	static FORCEINLINE FIndexedLockFreeLink* DerefLink(BattleFireUInt32 Ptr)
	{
		return LinkAllocator.GetItem(Ptr);
	}
	static FORCEINLINE FIndexedLockFreeLink* IndexToLink(BattleFireUInt32 Index)
	{
		return LinkAllocator.GetItem(Index);
	}
	static FORCEINLINE BattleFireUInt32 IndexToPtr(BattleFireUInt32 Index)
	{
		return Index;
	}
	static BattleFireUInt32 AllocLockFreeLink();
	static void FreeLockFreeLink(BattleFireUInt32 Item);
	static TAllocator LinkAllocator;
};

template<int TPaddingForCacheContention, BattleFireUInt64 TABAInc = 1>
class FLockFreePointerListLIFORoot : public FNoncopyable
{
	typedef FLockFreeLinkPolicy::TDoublePtr TDoublePtr;
	typedef FLockFreeLinkPolicy::TLink TLink;
	typedef FLockFreeLinkPolicy::TLinkPtr TLinkPtr;

public:
	FORCEINLINE FLockFreePointerListLIFORoot()
	{
		// We want to make sure we have quite a lot of extra counter values to avoid the ABA problem. This could probably be relaxed, but eventually it will be dangerous. 
		// The question is "how many queue operations can a thread starve for".
		static_assert(MAX_TagBitsValue / TABAInc >= (1 << 23), "risk of ABA problem");
		static_assert((TABAInc & (TABAInc - 1)) == 0, "must be power of two");
		Reset();
	}

	void Reset()
	{
		Head.Init();
	}

	void Push(TLinkPtr Item)
	{
		while (true)
		{
			BattleFireUInt64 CurrentHeadNode = Head.mAtomicProxy.load();
			const BattleFireUInt64 NewHeadNode = LockFreeLinkNode::MakeNodeWithLinkPtrAndAdvanceOldCounterAndState<TABAInc>(CurrentHeadNode,Item);
			FLockFreeLinkPolicy::DerefLink(Item)->SingleNext = LockFreeLinkNode::GetLinkPtr(CurrentHeadNode);
			if (Head.mAtomicProxy.compare_exchange_weak(CurrentHeadNode, NewHeadNode))
			{
				break;
			}
		}
	}
	bool PushIf(void* InPayload, bool(*OkToPush)(BattleFireUInt64),TLinkPtr ItemToPush)
	{
		while (true)
		{
			BattleFireUInt64 CurrentHeadNode = Head.mAtomicProxy.load();
			const BattleFireUInt32 CurrentNode= LockFreeLinkNode::GetLinkPtr(CurrentHeadNode);
			const BattleFireUInt64 LocalState = LockFreeLinkNode::GetState<TABAInc>(CurrentHeadNode);
			TLinkPtr Item = 0;
			if (OkToPush(LocalState))
			{
				if (!ItemToPush)
				{
					ItemToPush = FLockFreeLinkPolicy::AllocLockFreeLink();
					FLockFreeLinkPolicy::DerefLink(ItemToPush)->Payload = InPayload;
				}
				Item=ItemToPush;
			}
			if (!Item)
			{
				return false;
			}
			const BattleFireUInt64 NewHeadNode = LockFreeLinkNode::MakeNode(Item,LockFreeLinkNode::GetCounterAndState(CurrentHeadNode)+TABAInc);
			FLockFreeLinkPolicy::DerefLink(Item)->SingleNext = CurrentNode;
			if (Head.mAtomicProxy.compare_exchange_weak(CurrentHeadNode, NewHeadNode))
			{
				break;
			}
		}
		return true;
	}
	TLinkPtr Pop()
	{
		TLinkPtr Item = 0;
		while (true)
		{
			BattleFireUInt64 threadLocalHead=Head.mAtomicProxy.load();
			Item = LockFreeLinkNode::GetLinkPtr(threadLocalHead);
			if (!Item)
			{
				break;
			}
			BattleFireUInt64 threadLocalNewHead = LockFreeLinkNode::AdvanceCounterAndState<TABAInc>(threadLocalHead);
			TLink* ItemP = FLockFreeLinkPolicy::DerefLink(Item);
			threadLocalNewHead = LockFreeLinkNode::MakeNode(ItemP->SingleNext,  LockFreeLinkNode::GetCounterAndState(threadLocalNewHead));

			if (Head.mAtomicProxy.compare_exchange_weak(threadLocalHead, threadLocalNewHead))
			{
				ItemP->SingleNext = 0;
				break;
			}
		}
		return Item;
	}

	TLinkPtr PopAll()
	{
		TLinkPtr Item = 0;
		while (true)
		{
			BattleFireUInt64 threadLocalHead = Head.mAtomicProxy.load();
			Item = LockFreeLinkNode::GetLinkPtr(threadLocalHead);
			if (!Item)
			{
				break;
			}
			BattleFireUInt64 threadNewHead = LockFreeLinkNode::AdvanceCounterAndState<TABAInc>(threadLocalHead);
			threadNewHead = LockFreeLinkNode::MakeNode(0, LockFreeLinkNode::GetCounterAndState(threadNewHead));
			if (Head.mAtomicProxy.compare_exchange_weak(threadLocalHead,threadNewHead))
			{
				break;
			}
		}
		return Item;
	}

	TLinkPtr PopAllAndChangeState(BattleFireUInt64(*StateChange)(BattleFireUInt64))
	{
		TLinkPtr Item = 0;
		while (true)
		{
			BattleFireUInt64 LocalHead=Head.mAtomicProxy.load();
			Item = LockFreeLinkNode::GetLinkPtr(LocalHead);
			BattleFireUInt64 NewHead=LockFreeLinkNode::AdvanceCounterAndState<TABAInc>(LocalHead);
			BattleFireUInt64 NewHeadState=StateChange(LockFreeLinkNode::GetState<TABAInc>(LocalHead));
			NewHead=LockFreeLinkNode::SetState<TABAInc>(NewHead,NewHeadState);
			NewHead=LockFreeLinkNode::MakeNode(0,LockFreeLinkNode::GetCounterAndState(NewHead));
			
			if (Head.mAtomicProxy.compare_exchange_weak(LocalHead, NewHead))
			{
				break;
			}
		}
		return Item;
	}

	FORCEINLINE bool IsEmpty() const
	{
		return !LockFreeLinkNode::GetLinkPtr(Head.mAtomicProxy.load());
	}

	FORCEINLINE BattleFireUInt64 GetState() const
	{
		const BattleFireUInt64 threadLocalHead = Head.mAtomicProxy.load();
		return LockFreeLinkNode::GetState<TABAInc>(threadLocalHead);
	}

private:
	TDoublePtr Head;
};

template<class T, int TPaddingForCacheContention, BattleFireUInt64 TABAInc = 1>
class FLockFreePointerListLIFOBase : public FNoncopyable
{
	typedef FLockFreeLinkPolicy::TDoublePtr TDoublePtr;
	typedef FLockFreeLinkPolicy::TLink TLink;
	typedef FLockFreeLinkPolicy::TLinkPtr TLinkPtr;
public:
	void Reset()
	{
		RootList.Reset();
	}

	void Push(T* InPayload)
	{
		TLinkPtr Item = FLockFreeLinkPolicy::AllocLockFreeLink();
		FLockFreeLinkPolicy::DerefLink(Item)->Payload = InPayload;
		RootList.Push(Item);
	}

	bool PushIf(T* InPayload, bool(*OkToPush)(BattleFireUInt64))
	{
		TLinkPtr Item = 0;
		if (!RootList.PushIf(InPayload,OkToPush,Item))
		{
			if (Item)
			{
				FLockFreeLinkPolicy::FreeLockFreeLink(Item);
			}
			return false;
		}
		return true;
	}


	T* Pop()
	{
		TLinkPtr Item = RootList.Pop();
		T* Result = nullptr;
		if (Item)
		{
			Result = (T*)FLockFreeLinkPolicy::DerefLink(Item)->Payload;
			FLockFreeLinkPolicy::FreeLockFreeLink(Item);
		}
		return Result;
	}

	void PopAll(std::vector<T*>&OutArray)
	{
		TLinkPtr Links = RootList.PopAll();
		while (Links)
		{
			TLink* LinksP = FLockFreeLinkPolicy::DerefLink(Links);
			OutArray.push_back((T*)LinksP->Payload);
			TLinkPtr Del = Links;
			Links = LinksP->SingleNext;
			FLockFreeLinkPolicy::FreeLockFreeLink(Del);
		}
	}

	void PopAllAndChangeState(std::vector<T*>& OutArray, BattleFireUInt64(*StateChange)(BattleFireUInt64))
	{
		TLinkPtr Links = RootList.PopAllAndChangeState(StateChange);
		while (Links)
		{
			TLink* LinksP = FLockFreeLinkPolicy::DerefLink(Links);
			OutArray.push_back((T*)LinksP->Payload);
			TLinkPtr Del = Links;
			Links = LinksP->SingleNext;
			FLockFreeLinkPolicy::FreeLockFreeLink(Del);
		}
	}

	FORCEINLINE bool IsEmpty() const
	{
		return RootList.IsEmpty();
	}

	FORCEINLINE BattleFireUInt64 GetState() const
	{
		return RootList.GetState();
	}

private:

	FLockFreePointerListLIFORoot<TPaddingForCacheContention, TABAInc> RootList;
};

template<class T, int TPaddingForCacheContention, BattleFireUInt64 TABAInc = 1>
class FLockFreePointerFIFOBase : public FNoncopyable
{
	typedef FLockFreeLinkPolicy::TDoublePtr TDoublePtr;
	typedef FLockFreeLinkPolicy::TLink TLink;
	typedef FLockFreeLinkPolicy::TLinkPtr TLinkPtr;
public:

	FORCEINLINE FLockFreePointerFIFOBase()
	{
		// We want to make sure we have quite a lot of extra counter values to avoid the ABA problem. This could probably be relaxed, but eventually it will be dangerous. 
		// The question is "how many queue operations can a thread starve for".
		static_assert(TABAInc <= 65536, "risk of ABA problem");
		static_assert((TABAInc & (TABAInc - 1)) == 0, "must be power of two");
		
		const TLinkPtr Stub = FLockFreeLinkPolicy::AllocLockFreeLink();
		Head.mAtomicProxy=LockFreeLinkNode::MakeNode(Stub,0);
		Tail.mAtomicProxy=LockFreeLinkNode::MakeNode(Stub,0);
	}

	void Push(T* InPayload)
	{
		TLinkPtr Item = FLockFreeLinkPolicy::AllocLockFreeLink();
		FLockFreeLinkPolicy::DerefLink(Item)->Payload = InPayload;
		BattleFireUInt64 LocalTail;
		while (true)
		{
			LocalTail=Tail.mAtomicProxy.load();
			TLink* LocalTailP = FLockFreeLinkPolicy::DerefLink(LockFreeLinkNode::GetLinkPtr(LocalTail));
			BattleFireUInt64 LocalNext=LocalTailP->DoubleNext.mAtomicProxy.load();
			const BattleFireUInt64 TestLocalTail=Tail.mAtomicProxy.load();
			if (TestLocalTail == LocalTail)
			{
				if (LockFreeLinkNode::GetLinkPtr(LocalNext))
				{
					const BattleFireUInt64 NewTail=LockFreeLinkNode::MakeNodeWithLinkPtrAndAdvanceOldCounterAndState<TABAInc>(LocalTail,LockFreeLinkNode::GetLinkPtr(LocalNext));
					Tail.mAtomicProxy.compare_exchange_strong(LocalTail,NewTail);
				}
				else
				{
					const BattleFireUInt64 NewNext=LockFreeLinkNode::MakeNodeWithLinkPtrAndAdvanceOldCounterAndState<TABAInc>(LocalNext,Item);
					if (LocalTailP->DoubleNext.mAtomicProxy.compare_exchange_weak(LocalNext,NewNext))
					{
						break;
					}
				}
			}
		}
		{
			const BattleFireUInt64 NewTail=LockFreeLinkNode::MakeNodeWithLinkPtrAndAdvanceOldCounterAndState<TABAInc>(LocalTail,Item);
			Tail.mAtomicProxy.compare_exchange_strong(LocalTail,NewTail);
		}
	}

	T* Pop()
	{
		T* Result = nullptr;
		BattleFireUInt64 LocalHead;
		while (true)
		{
			LocalHead=Head.mAtomicProxy.load();
			BattleFireUInt64 LocalTail=Tail.mAtomicProxy.load();
			BattleFireUInt64 LocalNext=FLockFreeLinkPolicy::DerefLink( LockFreeLinkNode::GetLinkPtr(LocalHead))->DoubleNext.mAtomicProxy.load();
			const BattleFireUInt64 LocalHeadTest=Head.mAtomicProxy.load();
			if (LocalHead == LocalHeadTest)
			{
				if (LockFreeLinkNode::GetLinkPtr(LocalHead) == LockFreeLinkNode::GetLinkPtr(LocalTail))
				{
					if (!LockFreeLinkNode::GetLinkPtr(LocalNext))
					{
						return nullptr;
					}
					const BattleFireUInt64 NewTail=LockFreeLinkNode::MakeNodeWithLinkPtrAndAdvanceOldCounterAndState<TABAInc>(LocalTail,LockFreeLinkNode::GetLinkPtr(LocalNext));
					Tail.mAtomicProxy.compare_exchange_strong(LocalTail,NewTail);
				}
				else
				{
					Result = (T*)FLockFreeLinkPolicy::DerefLink(LockFreeLinkNode::GetLinkPtr(LocalNext))->Payload;
					BattleFireUInt64 NewHead=LockFreeLinkNode::MakeNodeWithLinkPtrAndAdvanceOldCounterAndState<TABAInc>(LocalHead,LockFreeLinkNode::GetLinkPtr(LocalNext));
					if (Head.mAtomicProxy.compare_exchange_weak(LocalHead, NewHead))
					{
						break;
					}
				}
			}
		}
		FLockFreeLinkPolicy::FreeLockFreeLink( LockFreeLinkNode::GetLinkPtr(LocalHead));
		return Result;
	}

	FORCEINLINE bool IsEmpty() const
	{
		BattleFireUInt64 currentHeadNode = Head.mAtomicProxy.load();
		BattleFireUInt32 currentItem = GetItemPtr(currentHeadNode);
		BattleFireUInt64 nextNode = FLockFreeLinkPolicy::DerefLink(currentItem)->DoubleNext;
		return !GetItemPtr(nextNode);
	}

private:
	TDoublePtr Head;
	TDoublePtr Tail;
};


template<class T, int TPaddingForCacheContention, int NumPriorities>
class StallingTaskQueue : public FNoncopyable
{
	typedef FLockFreeLinkPolicy::TDoublePtr TDoublePtr;
	typedef FLockFreeLinkPolicy::TLink TLink;
	typedef FLockFreeLinkPolicy::TLinkPtr TLinkPtr;
public:
	StallingTaskQueue()
	{
		MasterState.Init();
	}
	int32 Push(T* InPayload, BattleFireUInt32 Priority)
	{
		BattleFireUInt64 currentMasterState = MasterState.mAtomicProxy.load();
		BattleFireUInt32 currentMasterPtr = LockFreeLinkNode::GetLinkPtr(currentMasterState);
		PriorityQueues[Priority].Push(InPayload);

		BattleFireUInt64 newMaterState = LockFreeLinkNode::AdvanceCounterAndState<1>(currentMasterState);
		int32 ThreadToWake = FindThreadToWake(currentMasterPtr);

		if (ThreadToWake >= 0)
		{
			newMaterState = LockFreeLinkNode::MakeNode(TurnOffBit(currentMasterPtr, ThreadToWake), LockFreeLinkNode::GetCounterAndState(newMaterState));
		}
		else
		{
			newMaterState = LockFreeLinkNode::MakeNode(currentMasterPtr, LockFreeLinkNode::GetCounterAndState(newMaterState));
		}
		while (!MasterState.mAtomicProxy.compare_exchange_weak(currentMasterState, newMaterState))
		{
			currentMasterState = MasterState.mAtomicProxy.load();
			currentMasterPtr = LockFreeLinkNode::GetLinkPtr(currentMasterState);
			newMaterState = LockFreeLinkNode::AdvanceCounterAndState<1>(currentMasterState);
			ThreadToWake = FindThreadToWake(currentMasterPtr);
			if (ThreadToWake >= 0)
			{
				newMaterState = LockFreeLinkNode::MakeNode(TurnOffBit(currentMasterPtr, ThreadToWake), LockFreeLinkNode::GetCounterAndState(newMaterState));
			}
			else
			{
				newMaterState = LockFreeLinkNode::MakeNode(currentMasterPtr, LockFreeLinkNode::GetCounterAndState(newMaterState));
			}
		}
		return ThreadToWake;
	}

	T* Pop(int32 MyThread, bool bAllowStall)
	{
		while (true)
		{
			BattleFireUInt64 currentMasterState = MasterState.mAtomicProxy.load();
			for (int32 Index = 0; Index < NumPriorities; Index++)
			{
				T * Result = PriorityQueues[Index].Pop();
				if (Result)
				{
					while (true)
					{
						const BattleFireUInt64 newMasterState = LockFreeLinkNode::AdvanceCounterAndState<1>(currentMasterState);
						if (MasterState.mAtomicProxy.compare_exchange_weak(currentMasterState,newMasterState))
						{
							return Result;
						}
						currentMasterState = MasterState.mAtomicProxy.load();
					}
				}
			}
			if (!bAllowStall)
			{
				break; // if we aren't stalling, we are done, the queues are empty
			}
			{
				BattleFireUInt64 newMasterState = LockFreeLinkNode::AdvanceCounterAndState<1>(currentMasterState);
				newMasterState = LockFreeLinkNode::MakeNode(TurnOnBit( LockFreeLinkNode::GetLinkPtr(currentMasterState), MyThread), LockFreeLinkNode::GetCounterAndState(newMasterState));
				if (MasterState.mAtomicProxy.compare_exchange_weak(currentMasterState,newMasterState))
				{
					break;
				}
			}
		}
		return nullptr;
	}

private:

	static int32 FindThreadToWake(TLinkPtr Ptr)
	{
		int32 Result = -1;
		UPTRINT Test = UPTRINT(Ptr);
		if (Test)
		{
			Result = 0;
			while (!(Test & 1))
			{
				Test >>= 1;
				Result++;
			}
		}
		return Result;
	}

	static TLinkPtr TurnOffBit(TLinkPtr Ptr, int32 BitToTurnOff)
	{
		return (TLinkPtr)(UPTRINT(Ptr) & ~(UPTRINT(1) << BitToTurnOff));
	}

	static TLinkPtr TurnOnBit(TLinkPtr Ptr, int32 BitToTurnOn)
	{
		return (TLinkPtr)(UPTRINT(Ptr) | (UPTRINT(1) << BitToTurnOn));
	}

	static bool TestBit(TLinkPtr Ptr, int32 BitToTest)
	{
		return !!(UPTRINT(Ptr) & (UPTRINT(1) << BitToTest));
	}
	FLockFreePointerFIFOBase<T, TPaddingForCacheContention> PriorityQueues[NumPriorities];
	TDoublePtr MasterState;
};




template<class T, int TPaddingForCacheContention>
class TLockFreePointerListLIFOPad : private FLockFreePointerListLIFOBase<T, TPaddingForCacheContention>
{
public:
	void Push(T* NewItem)
	{
		FLockFreePointerListLIFOBase<T, TPaddingForCacheContention>::Push(NewItem);
	}
	T* Pop()
	{
		return FLockFreePointerListLIFOBase<T, TPaddingForCacheContention>::Pop();
	}
	void PopAll(std::vector<T*>& Output)
	{
		FLockFreePointerListLIFOBase<T, TPaddingForCacheContention>::PopAll(Output);
	}
	FORCEINLINE bool IsEmpty() const
	{
		return FLockFreePointerListLIFOBase<T, TPaddingForCacheContention>::IsEmpty();
	}
};
template<class T>
class TLockFreePointerListLIFO : public TLockFreePointerListLIFOPad<T, 0>
{
};
template<class T, int TPaddingForCacheContention>
class TLockFreePointerListUnordered : public TLockFreePointerListLIFOPad<T, TPaddingForCacheContention>
{
};
template<class T, int TPaddingForCacheContention>
class TLockFreePointerListFIFO : private FLockFreePointerFIFOBase<T, TPaddingForCacheContention>
{
public:
	void Push(T* NewItem)
	{
		FLockFreePointerFIFOBase<T, TPaddingForCacheContention>::Push(NewItem);
	}
	T* Pop()
	{
		return FLockFreePointerFIFOBase<T, TPaddingForCacheContention>::Pop();
	}
	void PopAll(std::vector<T*>& Output)
	{
		FLockFreePointerFIFOBase<T, TPaddingForCacheContention>::PopAll(Output);
	}
	FORCEINLINE bool IsEmpty() const
	{
		return FLockFreePointerFIFOBase<T, TPaddingForCacheContention>::IsEmpty();
	}
};

template<class T, int TPaddingForCacheContention>
class TClosableLockFreePointerListUnorderedSingleConsumer : private FLockFreePointerListLIFOBase<T, TPaddingForCacheContention, 2>
{
public:
	void Reset()
	{
		FLockFreePointerListLIFOBase<T, TPaddingForCacheContention, 2>::Reset();
	}
	bool PushIfNotClosed(T* NewItem)
	{
		return FLockFreePointerListLIFOBase<T, TPaddingForCacheContention, 2>::PushIf(NewItem, [](BattleFireUInt64 State)->bool {return !(State & 1); });
	}
	void PopAllAndClose(std::vector<T*>& Output)
	{
		auto CheckOpenAndClose = [](BattleFireUInt64 State) -> BattleFireUInt64
		{
			return State | 1;
		};
		FLockFreePointerListLIFOBase<T, TPaddingForCacheContention, 2>::PopAllAndChangeState(Output, CheckOpenAndClose);
	}
	bool IsClosed() const
	{
		return !!(FLockFreePointerListLIFOBase<T, TPaddingForCacheContention, 2>::GetState() & 1);
	}

};