/*************************************************************************
	> File Name: ./src/ThreadCache.cpp
	> Author: zq
	> Mail: zhouquan1511@163.com 
	> Created Time: Thu 27 Apr 2023 12:09:59 PM CST
 ************************************************************************/

#include "ThreadCache.h"
#include "CentralCache.h"

void* ThreadCache::Allocate(size_t size) {
	assert(size <= MAX_BYTES);
	
	size_t alignSize = SizeClass::RoundUp(size);
	size_t index = SizeClass::Index(size);
	if (!_freeLists[index].Empty()) {
		return _freeLists[index].Pop();
	}
	else {
		return FetchFromCentralCache(index, alignSize);
	}
}

void ThreadCache::Deallocate(void *ptr, size_t size) {
	assert(ptr);
	assert(MAX_BYTES);
	
	size_t index = SizeClass::Index(size);
	_freeLists[index].Push(ptr);
	
	//当自由链表长度大于一次批量申请对象个数时，就归还一些对象给中心缓存
	if (_freeLists[index].Size() >= _freeLists[index].MaxSize()) {
		ListTooLong(_freeLists[index], size);
	}
}

void ThreadCache::ListTooLong(FreeList &list, size_t size) {
	void *start = nullptr;
	void *end = nullptr;

	//批量从自由链表中取出对象
	list.PopRange(start, end, list.MaxSize());

	//将取出的对象归还给中心缓存的1个或多个Span
	CentralCache::GetInstance()->ReleaseListToSpans(start, size);
}

void* ThreadCache::FetchFromCentralCache(size_t index, size_t alignSize) {
	//慢开始反馈调节
	//1 刚开始会向中心缓存申请较少数量的内存块，因为要多了浪费
	//2 如果线程不断有当前size大小的内存需求，batchNum就会不断增长直到上限
	size_t batchNum = std::min(_freeLists[index].MaxSize(),
							SizeClass::NumMoveSize(size));
	if (batchNum == _freeLists[index].MaxSize()) {
		_freeLists[index].MaxSize() += 1;
	}

	void *start = nullptr;
	void *end = nullptr;
	
	size_t actualNum = CentralCache::GetInstance()
					->FetchRangeObj(start, end, batchNum, size);
	assert(actualNum >= 1);

	if (actualNum == 1) {
		assert(start == end);
		return start;
	}
	else {
		_freeLists[index].PushRange(NextObj(start), end, actualNum - 1);
		return start;
	}	
}


