// objects.h - Copyright (C) 2013 Willow Schlanger. All rights reserved. See univm_license.txt.
// --------------------------------------------------------------------------------------------
// This header file provides support for memory pooling and reference counting. It is
// automatically included when you include <infrared/infrared.h> or <infrared/sysbase.h>.
//
// Sample code:
//    ref_t<int> x1 = ref_t<int>::allocate(true);   // use fast allocator
//    ref_t<int> x2 = ref_t<int>::allocate(false);  // do not use fast allocator
//    *x1 = 1;
//    *x2 = 2;
//
//    weak_ref_t<int> x3 = x1;
//    // It's safe to use *x3 here, as long as the underlying object's reference count
//    // doesn't fall to 0. This won't happen as long as 'x1' remains in scope.
//    //    A weak reference is more like a pointer: it doesn't increase the target's
//    // reference count. This is useful when a cycle would otherwise be introduced,
//    // i.e. for a binary search tree where each node needs a parent pointer.
//
//    // Do not do this:
//    //    weak_ref_t<int> bad = ref_t<int>::allocate(true);
//    // The above would leave 'bad' with a dangling pointer to the now-destroyed
//    // 'int' object.
//
//    ref_t<int> x3 = ref_t<int>::allocate_array(4);  // allocate an array of four int's
//    ref_t<int>::allocate_array(8, 0);  // allocate an array of 8 copies of '0'
//    ref_t<int>::allocate(true, 0);     // allocate an int (fast allocator), set to '0'
//
// You can use ref_t<BaseType, DerivedType> or weak_ref_t<BaseType, DerivedType> to create
// a reference that refers to an object of type BaseType but for construction and
// dereferencing purposes, DerivedType is used. You can then down-cast such a reference if
// so desired (see below sample):
//
//    ref_t<base_t, derived_t> foo = ref_t<base_t, derived_t>::allocate(true);
//    ref_t<base_t> bar = foo;  // down-cast
//
// To cast back up again, use asgn_cast() or the constructor form that takes a second bool
// argument. That bool argument is not used (it can be true or false) but simply indicates
// to the ref_t<> or weak-ref_t<> constructor that we may be up-casting. The C++ compiler
// should generate an error if the types are unrelated or if the given object doesn't
// extend from the 'base_t' type.
//
//    ref_t<base_t, derived_t> tmp1(bar, true); // cast back up again (constructor call)
//    tmp1.asgn_cast(bar);                      // same effect as above (assignment call)
//
// Some additional notes:
//
//    The 'allocate' method rounds up the size being allocated to the nearest multiple of
// 16 bytes. In general, ref_t<int>::allocate() needs 24 bytes of memory; this means
// 32 bytes will really be allocated (these numbers are for a 64-bit platform). This
// overhead includes the reference count value itself (8 bytes); as well as the v-table
// pointer (8 bytes) and the actual object. It appears that some platforms round the size
// of classes like ref_t<> to the nearest multiple of 64 bits (so ref_t<int>::allocate()
// may actually need 24 bytes for itself despite 8 + 8 + 4 being only 20).
//    Also, the true size of allocated memory is slightly greater since we use boost's
// memory pooling which like has some overhead of its own (it may store allocation sizes
// in a header, etc.) As stated above, ref_t<int>::allocate() might require 24 bytes, but
// when doing the actual allocation we round that up to the nearest multiple of 16 bytes,
// which takes us to 32 bytes being allocated from the boost memory pool for a ref_t<int>
//object.
// ============================================================================================

#ifndef l_objects_h__infrared_isb__included
#define l_objects_h__infrared_isb__included

// This has to be done first.
namespace boost
{

template <class value_type>
inline void intrusive_ptr_release(value_type *src)
{
	if(src == NULL)
		return;
	if(src->reference_count != 0)
	{
		if(src->reference_count == 1)
			src->deallocate(src);
		else
			--src->reference_count;
	}
}

template <class value_type>
inline void intrusive_ptr_add_ref(value_type *src)
{
	if(src == NULL)
		return;
	if(src->reference_count != 0)
	{
		++src->reference_count;
	}
}

}	// namespace boost

// Then we can include intrusive_ptr.hpp for internal use.
// Hopefully the user of infrared/objects.h hasn't already
// included it!
#include <boost/intrusive_ptr.hpp>

// Next comes the objects_internal namespace.
namespace infrared_sysbase
{
namespace objects_internal
{

using boost::intrusive_ptr_add_ref;
using boost::intrusive_ptr_release;

struct qword_pair_t
{
	U8 first;
	U8 second;
};

inline usize_t bytes_to_dqwords(usize_t size_bytes)
{
	if(size_bytes == 0)
		return 1;
	return (size_bytes + sizeof(qword_pair_t) - 1) / sizeof(qword_pair_t);
}

inline void *pool_alloc(bool fast, usize_t size_bytes)
{
	if(fast)
		return boost::fast_pool_allocator<qword_pair_t>::allocate(bytes_to_dqwords(size_bytes));
	return boost::pool_allocator<qword_pair_t>::allocate(bytes_to_dqwords(size_bytes));
}

inline void pool_free_dq(bool fast_mode, usize_t num_dqwords, void *ptr)
{
	if(fast_mode)
		boost::fast_pool_allocator<qword_pair_t>::deallocate((qword_pair_t *)(ptr), num_dqwords);
	else
		boost::pool_allocator<qword_pair_t>::deallocate((qword_pair_t *)(ptr), num_dqwords);
}

inline void pool_free(bool fast_mode, usize_t num_bytes, void *ptr)
{
	pool_free_dq(fast_mode, bytes_to_dqwords(num_bytes), ptr);
}

template <class base_value_type>
class reference_counted_object_base_t
{
public:
	typedef boost::intrusive_ptr<reference_counted_object_base_t<base_value_type> > ref_t;
	typedef reference_counted_object_base_t<base_value_type> *weak_ref_t;

	U8 reference_count : 63;
	U8 fast_alloc : 1;

	virtual ~reference_counted_object_base_t()
	{
	}

	virtual base_value_type *get(usize_t n = 0) = 0;

	virtual const base_value_type *cget(usize_t n = 0) const = 0;

	virtual usize_t size_dqwords() = 0;

	virtual qword_pair_t *get_ptr() = 0;

	virtual usize_t get_array_size() const = 0;

	static void deallocate(reference_counted_object_base_t *src)
	{
		if(src == NULL)
			return;

		// Don't call destructor if construction wasn't successful.
		if(src->reference_count == 0)
			return;
		src->reference_count = 0;

		// Get some information from derived object before destructor is called.
		usize_t num_dqwords = src->size_dqwords();
		qword_pair_t *ptr = src->get_ptr();

		// Call destructor.
		src->~reference_counted_object_base_t();

		// "Free" the memory.
		pool_free_dq(src->fast_alloc, num_dqwords, ptr);
	}
};

template <class base_value_type, class value_type = base_value_type>
class reference_counted_array_t :
	public reference_counted_object_base_t<base_value_type>
{
	reference_counted_array_t() { }	// allowed for internal use only
	reference_counted_array_t(const value_type &src) : values(src) { }	// allowed for internal use only
	reference_counted_array_t(const reference_counted_array_t &src) { }	// not allowed
	reference_counted_array_t &operator=(const reference_counted_array_t &src) { return *this; }	// not allowed

public:
	typedef typename reference_counted_object_base_t<base_value_type>::ref_t ref_type;
	usize_t item_count;

	usize_t get_array_size() const { return item_count; }

	virtual base_value_type *get(usize_t n = 0)
	{
		return ((value_type *)(&values) + n);
	}

	virtual const base_value_type *cget(usize_t n = 0) const
	{
		return ((value_type *)(&values) + n);
	}

	static reference_counted_array_t *raw_allocate(usize_t item_countT)
	{
		if(item_countT == 0)
			item_countT = 1;
		usize_t bytes_extra = sizeof(value_type) * (item_countT - 1);
		reference_counted_array_t *p = (reference_counted_array_t *)pool_alloc(false, bytes_extra + sizeof(reference_counted_array_t<value_type>));
		p->item_count = item_countT;
		p->reference_count = 0;

		// construct main class, and first array item
		(void) new (p) reference_counted_array_t();

		value_type *q = &p->values;
		for(usize_t n = 1; n < item_countT; ++n)
		{
			++q;
			(void) new (q) value_type();
		}

		p->fast_alloc = 0;
		p->reference_count = 1;
		return p;
	}

	static reference_counted_array_t *raw_allocate(usize_t item_countT, const value_type &src)
	{
		if(item_countT == 0)
			item_countT = 1;
		usize_t bytes_extra = sizeof(value_type) * (item_countT - 1);
		reference_counted_array_t *p = (reference_counted_array_t *)pool_alloc(false, bytes_extra + sizeof(reference_counted_array_t<value_type>));
		p->item_count = item_countT;
		p->reference_count = 0;

		// construct main class, and first array item
		(void) new (p) reference_counted_array_t(src);

		value_type *q = &p->values;
		for(usize_t n = 1; n < item_countT; ++n)
		{
			++q;
			(void) new (q) value_type(src);
		}

		p->fast_alloc = 0;
		p->reference_count = 1;
		return p;
	}

	static ref_type allocate(usize_t item_countT)
	{
		reference_counted_array_t *p = raw_allocate(item_countT);
		boost::intrusive_ptr<reference_counted_object_base_t<base_value_type> > ref = (reference_counted_object_base_t<base_value_type> *)(p);
		intrusive_ptr_release(p);
		return ref;
	}

	static ref_type allocate(usize_t item_countT, const value_type &src)
	{
		reference_counted_array_t *p = raw_allocate(item_countT, src);
		boost::intrusive_ptr<reference_counted_object_base_t<base_value_type> > ref = (reference_counted_object_base_t<base_value_type> *)(p);
		intrusive_ptr_release(p);
		return ref;
	}

	virtual usize_t size_dqwords()
	{
		usize_t bytes_extra = sizeof(value_type) * (item_count - 1);
		return bytes_to_dqwords(bytes_extra + sizeof(reference_counted_array_t<value_type>));
	}

	virtual ~reference_counted_array_t()
	{
		// Call destructors on any items beyond the first one, which will be
		// automatically destructed.
		for(usize_t n = 1; n < item_count; ++n)
			((value_type *)(&values) + n)->~value_type();
	}

	virtual qword_pair_t *get_ptr()
	{
		return (qword_pair_t *)(void *)(this);
	}

	value_type values; // warning: this must be the last item! its true size is dynamic.
};

template <class base_value_type, class value_type = base_value_type>
class reference_counted_object_t :
	public reference_counted_object_base_t<base_value_type>
{
	reference_counted_object_t() { }	// allowed for internal use only
	reference_counted_object_t(const value_type &src) : value(src) { }	// allowed for internal use only
	reference_counted_object_t(const reference_counted_object_t &src) { }	// not allowed
	reference_counted_object_t &operator=(const reference_counted_object_t &src) { return *this; }	// not allowed

public:
	value_type value;

	virtual usize_t get_array_size() const { return 1; }

	virtual usize_t size_dqwords()
	{
		return bytes_to_dqwords(sizeof(reference_counted_object_t<value_type>));
	}

	virtual ~reference_counted_object_t()
	{
	}

	virtual base_value_type *get(usize_t)
	{
		return &value;
	}

	virtual const base_value_type *cget(usize_t) const
	{
		return &value;
	}

	virtual qword_pair_t *get_ptr()
	{
		return (qword_pair_t *)(void *)(this);
	}

	static reference_counted_object_t *raw_allocate(bool fast)
	{
		reference_counted_object_t *p = (reference_counted_object_t *)pool_alloc(fast, sizeof(reference_counted_object_t<value_type>));
		p->reference_count = 0;
		(void) new (p) reference_counted_object_t();
		p->fast_alloc = (fast) ? 1 : 0;
		p->reference_count = 1;
		return p;
	}

	static reference_counted_object_t *raw_allocate(bool fast, const value_type &src)
	{
		reference_counted_object_t *p = (reference_counted_object_t *)pool_alloc(fast, sizeof(reference_counted_object_t<value_type>));
		p->reference_count = 0;
		(void) new (p) reference_counted_object_t(src);
		p->fast_alloc = (fast) ? 1 : 0;
		p->reference_count = 1;
		return p;
	}

	typedef typename reference_counted_object_base_t<base_value_type>::ref_t ref_type;

	static ref_type allocate(bool fast)
	{
		reference_counted_object_t *p = raw_allocate(fast);
		boost::intrusive_ptr<reference_counted_object_base_t<base_value_type> > ref = (reference_counted_object_base_t<base_value_type> *)(p);
		intrusive_ptr_release(p);
		return ref;
	}

	static ref_type allocate(bool fast, const value_type &src)
	{
		reference_counted_object_t *p = raw_allocate(fast, src);
		boost::intrusive_ptr<reference_counted_object_base_t<base_value_type> > ref = (reference_counted_object_base_t<base_value_type> *)(p);
		intrusive_ptr_release(p);
		return ref;
	}
};

template <class base_value_type>
class weak_ref_base_t
{
public:
	typedef base_value_type base_t;
	typedef typename reference_counted_object_base_t<base_value_type>::weak_ref_t internal_reference;

protected:
	internal_reference ref;

public:
	weak_ref_base_t() :
		ref(NULL)
	{
	}

	weak_ref_base_t(internal_reference refT) :
		ref(refT)
	{
	}

	internal_reference internal_ref() const
	{
		return ref;
	}

	void set_ref(internal_reference r)
	{
		ref = r;
	}

	void clear()
	{
		ref = NULL;
	}

	usize_t array_size() const
	{
		return ref->get_array_size();
	}

	bool is_null() const
	{
		return !ref;
	}
};

template <class base_value_type>
class ref_base_t
{
public:
	typedef base_value_type base_t;
	typedef typename reference_counted_object_base_t<base_value_type>::ref_t internal_reference;

protected:
	internal_reference ref;

public:
	bool is_null() const
	{
		return !ref;
	}

	internal_reference internal_ref() const
	{
		return ref;
	}

	void set_ref(internal_reference r)
	{
		ref = r;
	}

	void clear()
	{
		ref.reset();
	}

	usize_t array_size() const
	{
		return ref->get_array_size();
	}
};

template <class base_value_type, class value_type = base_value_type>
class ref_t :
	public ref_base_t<base_value_type>
{
public:
	typedef value_type value_t;
	typedef boost::intrusive_ptr<reference_counted_object_t<base_value_type, value_type> > internal_derived_reference;
	typedef base_value_type base_type_t;
	typedef typename reference_counted_object_base_t<base_value_type>::ref_t internal_reference_type;
	typedef ref_t<base_value_type, base_value_type> base_ref_type;

	base_ref_type base_ref()
	{
		ref_t<base_value_type, base_value_type> r;
		r.set_ref(ref_base_t<base_value_type>::internal_ref());
		return r;
	}

	static internal_reference_type internal_allocate_array(usize_t count)
	{
		return reference_counted_array_t<base_value_type, value_type>::allocate(count);
	}

	static internal_reference_type internal_allocate_array(usize_t count, const value_type &src)
	{
		return reference_counted_array_t<base_value_type, value_type>::allocate(count, src);
	}

	static internal_reference_type internal_allocate(bool fast)
	{
		return reference_counted_object_t<base_value_type, value_type>::allocate(fast);
	}

	static internal_reference_type internal_allocate(bool fast, const value_type &src)
	{
		return reference_counted_object_t<base_value_type, value_type>::allocate(fast, src);
	}

	static ref_t allocate(bool fast)
	{
		ref_t r;
		r.ref = internal_allocate(fast);
		return r;
	}

	static ref_t allocate(bool fast, const value_type &src)
	{
		ref_t r;
		r.ref = internal_allocate(fast, src);
		return r;
	}

	static ref_t allocate_array(usize_t count)
	{
		ref_t r;
		r.ref = internal_allocate_array(count);
		return r;
	}

	static ref_t allocate_array(usize_t count, const value_type &src)
	{
		ref_t r;
		r.ref = internal_allocate_array(count, src);
		return r;
	}

	value_type &operator*()
	{
		return *(value_type *)(ref_base_t<base_value_type>::internal_ref()->get());
	}

	value_type *operator->()
	{
		return (value_type *)(ref_base_t<base_value_type>::internal_ref()->get());
	}

	value_type &operator[](usize_t n)
	{
		// warning: no bounds checking here!
		return *(value_type *)(ref_base_t<base_value_type>::internal_ref()->get(n));
	}

	usize_t size() const
	{
		return this->array_size();
	}

	template <class other_type>
	ref_t<base_value_type, value_type> operator=(const ref_t<base_value_type, other_type> &src)
	{
		// Make sure 'other_type' extends 'value_type'.
		other_type *ignore_other = NULL;
		value_type *ignore_value = ignore_other;

		// Make sure 'value_type' extends 'base_value_type'.
		base_value_type *ignore_base = ignore_value;

		// Get rid of compiler warnings.
		(void) ignore_other;
		(void) ignore_value;
		(void) ignore_base;

		this->set_ref(src.internal_ref());

		return *this;
	}

	template <class other_type>
	ref_t<base_value_type, value_type> asgn_cast(const ref_t<base_value_type, other_type> &src)
	{
		other_type *ignore_other = NULL;
		base_value_type *ignore_base = ignore_other;	// other type must extend base type

		// Get rid of compiler warnings.
		(void) ignore_other;
		(void) ignore_base;

		this->set_ref(src.internal_ref());

		return *this;
	}

	ref_t()
	{
	}

	ref_t(const ref_t &src) :
		ref_base_t<base_value_type>(src)
	{
	}

	~ref_t()
	{
	}

	template <class other_type>
	ref_t(const ref_t<base_value_type, other_type> &src)
	{
		// Make sure 'other_type' extends value_type.
		other_type *ignore_other = NULL;
		value_type *ignore_value = ignore_other;
		base_value_type *ignore_base = ignore_value;

		// Get rid of compiler warnings.
		(void) ignore_other;
		(void) ignore_value;
		(void) ignore_base;

		this->set_ref(src.internal_ref());
	}

	// Allows casts.
	template <class other_type>
	ref_t(const ref_t<base_value_type, other_type> &src, bool)
	{
		other_type *ignore_other = NULL;
		base_value_type *ignore_base = ignore_other;	// other type must extend base type

		// Get rid of compiler warnings.
		(void) ignore_other;
		(void) ignore_base;

		this->set_ref(src.internal_ref());
	}
};

template <class base_value_type, class value_type = base_value_type>
class weak_ref_t :
	public weak_ref_base_t<base_value_type>
{
public:
	typedef value_type value_t;
	typedef boost::weak_ptr<reference_counted_object_t<base_value_type, value_type> > internal_derived_reference;
	typedef base_value_type base_type_t;
	typedef typename reference_counted_object_base_t<base_value_type>::weak_ref_t internal_reference_type;
	typedef weak_ref_t<base_value_type, base_value_type> base_ref_type;

	base_ref_type base_ref()
	{
		weak_ref_t<base_value_type, base_value_type> r;
		r.set_ref(weak_ref_base_t<base_value_type>::internal_ref());
		return r;
	}

	value_type &operator*()
	{
		return *this->ref->get();
	}

	value_type *operator->()
	{
		return this->ref->get();
	}

	value_type &operator[](usize_t n)
	{
		// warning: no bounds checking here!
		return *this->ref->get(n);
	}

	usize_t size() const
	{
		return this->array_size();
	}

	template <class other_type>
	weak_ref_t<base_value_type, value_type> operator=(const weak_ref_t<base_value_type, other_type> &src)
	{
		// Make sure 'other_type' extends 'value_type'.
		other_type *ignored_other = NULL;
		value_type *ignored_value = ignored_other;

		// Make sure 'value_type' extends 'base_value_type'.
		base_value_type *ignore_base = ignored_value;

		this->set_ref(src.internal_ref());

		return *this;
	}

	template <class other_type>
	weak_ref_t<base_value_type, value_type> asgn_cast(const weak_ref_t<base_value_type, other_type> &src)
	{
		other_type *ignore_other = NULL;
		base_value_type *ignore_base = ignore_other;	// other type must extend base type

		// Get rid of compiler warnings.
		(void) ignore_other;
		(void) ignore_base;

		this->set_ref(src.internal_ref());

		return *this;
	}

	template <class other_type>
	weak_ref_t<base_value_type, value_type> operator=(const ref_t<base_value_type, other_type> &src)
	{
		// Make sure 'other_type' extends 'value_type'.
		other_type *ignore_other = NULL;
		value_type *ignore_value = ignore_other;

		// Make sure 'value_type' extends 'base_value_type'.
		base_value_type *ignore_base = ignore_value;

		// Get rid of compiler warnings.
		(void) ignore_other;
		(void) ignore_value;
		(void) ignore_base;

		this->set_ref(src.internal_ref().get());

		return *this;
	}

	template <class other_type>
	weak_ref_t<base_value_type, value_type> asgn_cast(const ref_t<base_value_type, other_type> &src)
	{
		other_type *ignore_other = NULL;
		base_value_type *ignore_base = ignore_other;	// other type must extend base type

		// Get rid of compiler warnings.
		(void) ignore_other;
		(void) ignore_base;

		this->set_ref(src.internal_ref().get());

		return *this;
	}

	weak_ref_t()
	{
	}

	weak_ref_t(const weak_ref_t &src) :
		weak_ref_base_t<base_value_type>(src)
	{
	}

	~weak_ref_t()
	{
	}

	template <class other_type>
	weak_ref_t(const weak_ref_t<base_value_type, other_type> &src)
	{
		// Make sure 'other_type' extends value_type.
		other_type *ignored_other = NULL;
		value_type *ignored_value = ignored_other;
		base_value_type *ignore_base = ignored_value;

		this->set_ref(src.internal_ref());
	}

	// Allows casts.
	template <class other_type>
	weak_ref_t(const weak_ref_t<base_value_type, other_type> &src, bool)
	{
		other_type *ignore_other = NULL;
		base_value_type *ignore_base = ignore_other;	// other type must extend base type

		// Get rid of compiler warnings.
		(void) ignore_other;
		(void) ignore_base;

		this->set_ref(src.internal_ref());
	}

	template <class other_type>
	weak_ref_t(const ref_t<base_value_type, other_type> &src)
	{
		// Make sure 'other_type' extends value_type.
		other_type *ignore_other = NULL;
		value_type *ignore_value = ignore_other;
		base_value_type *ignore_base = ignore_value;

		// Get rid of compiler warnings.
		(void) ignore_other;
		(void) ignore_value;
		(void) ignore_base;

		this->set_ref(src.internal_ref().get());
	}

	// Allows casts.
	template <class other_type>
	weak_ref_t(const ref_t<base_value_type, other_type> &src, bool)
	{
		other_type *ignore_other = NULL;
		base_value_type *ignore_base = ignore_other;	// other type must extend base type

		// Get rid of compiler warnings.
		(void) ignore_other;
		(void) ignore_base;

		this->set_ref(src.internal_ref().get());
	}
};

}	// namespace objects_internal

// These are the main classes intended for use by the user of infrared/objects.h.
using objects_internal::weak_ref_t;
using objects_internal::ref_t;

// prototype:
//    inline void *pool_alloc(bool fast, usize_t size_bytes);
//
//    This inline function is a suitable replacement for malloc().
// The size of data actually allocated is equal to the requested
// size, rounded up to the nearest multiple of 16 bytes. If you
// request 0 bytes, you really get 16 bytes.
//    You can specify whether or not to use boost's fast allocator
// by specifying true or false for the 'fast' argument.
using objects_internal::pool_alloc;

// prototype:
//    inline void pool_free(bool fast_mode, usize_t num_bytes, void *ptr)
//
//    This inline function frees memory allocated by pool_alloc(). It
// requires the size in bytes and "fast" mode status that were passed
// to pool_alloc(); along with the pointer returned from pool_alloc()
// that you want to free.
using objects_internal::pool_free;

}	// namespace infrared_sysbase

#endif	// l_objects_h__infrared_isb__included
