
#include "allocator.h"


namespace
{
	struct	memory_detail_t
	{
		size_t	block_size;
		size_t	heap_idx;
	};

	struct	memory_header_t : public SLIST_ENTRY
	{
		memory_detail_t	detail;
	};

	class	lookaside_util
	{
	public:

		static	size_t	alignment_size( size_t size, size_t alignment )
		{
			if( size <= 0 )
			{
				size = 1;
			}

			return ((size + (alignment-1)) & ~(alignment-1));
		}

		static	size_t	lookaside_idx( size_t max_las_size, size_t shift_count, size_t size )
		{
			static const size_t	no_lookaside_idx = static_cast< size_t >( ~0 );

			size_t lookaside_idx = ((size <= max_las_size) ? (size >> shift_count)-1 : no_lookaside_idx);
			return lookaside_idx;
		}
	};
}


namespace	memory
{
	allocator::allocator()
	{
	}

	allocator::~allocator()
	{
		destroy();
	}

	bool	allocator::create( bool statistics, size_t parallelism, size_t max_lookaside_size )
	{
		if( parallelism <= 0 )
		{
			SYSTEM_INFO system_info;
			::memset( &system_info, 0, sizeof( system_info ) );
			::GetSystemInfo( &system_info );
			parallelism = system_info.dwNumberOfProcessors + 3;
		}

		size_t lookaside_count = (max_lookaside_size/lookaside_block_size);
		if( caches_.create( this, parallelism, lookaside_count ) == false )
		{
			return false;
		}

		if( heaps_.create( parallelism ) == false )
		{
			return false;
		}

		if( hint_.create() == false )
		{
			destroy();
			return false;
		}

		if( statistics_.create( statistics, heaps_.count(), caches_.line_count(), caches_.las_count() ) == false )
		{
			destroy();
			return false;
		}

		return true;
	}

	void	allocator::destroy()
	{
		caches_.destroy();
		heaps_.destroy();

		hint_.destroy();
		statistics_.destroy();
	}

	void*	allocator::alloc( size_t size )
	{
		size_t src_hint = reinterpret_cast< size_t >( hint_.get() );
		if( src_hint >= caches_.line_count() )
		{
			src_hint = 0;
		}

		size_t block_size = lookaside_util::alignment_size( size, lookaside_block_size );
		size_t las_idx = lookaside_util::lookaside_idx( lookaside_block_size*caches_.las_count(), lookaside_shift_count, block_size );

		memory_header_t* header = static_cast< memory_header_t* >( caches_.pop( src_hint, las_idx ) );
		if( header != nullptr )
		{
			statistics_.inc_cache_alloc( src_hint, las_idx );
			return (header + 1);
		}

		size_t new_hint = src_hint;
		size_t alloc_size = block_size + sizeof( memory_detail_t );

		while( (header = static_cast< memory_header_t* >( heaps_.try_alloc( new_hint, alloc_size ) )) == nullptr )
		{
			++new_hint;
			if( new_hint >= heaps_.count() )
			{
				new_hint = 0;
			}
			if( new_hint == src_hint )
			{
				break;
			}
		}

		if( header == nullptr )
		{
			header = static_cast< memory_header_t* >( heaps_.alloc( src_hint, alloc_size ) );
		}

		if( header != nullptr )
		{
			if( src_hint != new_hint )
			{
				hint_.set( reinterpret_cast< void* >( new_hint ) );
			}

			statistics_.inc_heap_alloc( new_hint );

			header->detail.block_size = block_size;
			header->detail.heap_idx = new_hint;
			return (header + 1);
		}

		return nullptr;
	}

	bool	allocator::free( void* p )
	{
		if( p != nullptr )
		{
			memory_header_t* temp = reinterpret_cast< memory_header_t* >( p );
			memory_header_t* header = temp - 1;

			assert( header->Next == nullptr );

			size_t hint = reinterpret_cast< size_t >( hint_.get() );
			if( hint >= caches_.line_count() )
			{
				hint = 0;
			}

			size_t las_idx = lookaside_util::lookaside_idx( lookaside_block_size*caches_.las_count(), lookaside_shift_count, header->detail.block_size );
			if( caches_.push( hint, las_idx, header ) == false )
			{
				return must_be_free( header );
			}
			else
			{
				statistics_.inc_cache_free( hint, las_idx );
			}
		}

		return true;
	}

	const statistics*	allocator::get_statistics()	const
	{
		return &statistics_;
	}

	bool	allocator::must_be_free( void* entry )
	{
		memory_header_t* header = reinterpret_cast< memory_header_t* >( entry );
		size_t idx = header->detail.heap_idx;
		if( heaps_.free( idx, header ) == true )
		{
			hint_.set( reinterpret_cast< void* >( idx ) );
			statistics_.inc_heap_free( idx );
			return true;
		}

		return false;
	}

	void	allocator::on_delete( SLIST_ENTRY* entry )
	{
		must_be_free( entry );
	}
}
