#include <assert.h>
#include "ThreadCache.h"
#include "CentralCache.h"


//申请内存
void* ThreadCache::Allocate(size_t size)
{
	assert(size <= MAX_BYTES); //256KB

	size_t alignSize = SizeClass::RoupdUp(size);  //获取实际要开辟的内存（计算内存对齐数）
	size_t index = SizeClass::Index(alignSize);	  //获取桶的位置

	if (!m_freeLists[index].Empty())
	{
		return m_freeLists[index].Pop();
	}
	else
	{
		//从中心缓存获取对象
		return FetchFromCentralCache(index, alignSize);
	}

	return nullptr;
}

//释放内存
void ThreadCache::Deallocate(void* ptr, size_t size)
{
	assert(size <= MAX_BYTES);
	
	if (!ptr)
	{
		return;
	}

	//找出自由链表映射的桶，将对象插入
	size_t alignSize = SizeClass::RoupdUp(size);
	size_t index = SizeClass::Index(alignSize);  
	m_freeLists[index].Push(ptr);

	//当链表长度大于等于一次批量申请的内存时就开始还一段内存给central cache
	if (m_freeLists[index].Size() >= m_freeLists[index].GetMaxSize())
	{
		ListTooLong(m_freeLists[index], size);
	}

}


//从中心缓存获取内存
void* ThreadCache::FetchFromCentralCache(size_t index, size_t size)
{	
	//慢开始反馈调节算法
	//最开始不会向 central cache要太多因为可能用不完，如果不要size大小需求batchNum会不断增长直到上限;
	//size越大一次向central cache要的越小，size越小一次向central cache要的越大

	size_t batchNum = std::min(m_freeLists[index].GetMaxSize(), SizeClass::NumMoveSize(size));
	if (m_freeLists[index].GetMaxSize() == batchNum)
	{
		m_freeLists[index].SetMaxSize(m_freeLists[index].GetMaxSize() + 1);
	}

	void* start = nullptr;
	void* end = nullptr;
	size_t actualNum = CentralCache::GetInstance()->FetchRangeObj(start, end, batchNum, size);

	assert(actualNum > 0);
	if (actualNum == 1)
	{
		assert(start == end);
		return start;
	}
	else
	{
		m_freeLists[index].PushRange(NextObj(start), end, actualNum - 1);
		return start;
	}

	return nullptr;
}


//释放对象时，如果链表过长，则回收一部分内存到central cache
void ThreadCache::ListTooLong(FreeList& list, size_t size)
{
	void* start = nullptr;
	void* end = nullptr;
	list.PopRange(start, end, list.GetMaxSize());//取出内存

	//把内存还给下一层：central cache
	CentralCache::GetInstance()->ReleaseListToSpans(start, size);
}
