#include "Tree.h"

#include <iostream>
using namespace std;

//////////////////////////////////////////////////////////////////
// TreeClassifier Object										//
//////////////////////////////////////////////////////////////////

// Zero argument constructor for tree. Quite useless.
TreeClassifier::TreeClassifier()
{
	_sub_w = 0;
	_sub_h = 0;
	_root = NULL;
}

// Constructor for tree classifier which takes sub-window
// dimensions.
TreeClassifier::TreeClassifier(int sub_w, int sub_h)
{
	_sub_w = sub_w;
	_sub_h = sub_h;
	_root = NULL;
}

// Constructor for tree classifier which loads data.
TreeClassifier::TreeClassifier(char *directory)
{
	_root = NULL;
	ifstream input;
	input.open(directory);
	UnserialiseNode(_root, input);
	input.close();
}

// NOTE THAT THE COPY CONSTRUCTOR AND DESTRUCTOR ARE MISSING!!!
// SORT THIS OUT ASAP!!!

// This function saves a tree in text format.
void TreeClassifier::SerialiseTree(char *directory)
{
	ofstream output;
	output.open(directory);
	int n = 0;
	SerialiseNode(_root, output, n);
	output.close();
}

// This function is used to recursively save a tree classifier to 
// a file.
void TreeClassifier::SerialiseNode(TreeNode* current, ofstream& output, int& n)
{
	// Output variables for this node
	output << "b";
	int index = ++n;
	output << n;				output << "\n";
	output << current->_tl_x;	output << "\n";
	output << current->_tl_y;	output << "\n";
	output << current->_br_x;	output << "\n";
	output << current->_br_y;	output << "\n";
	output << current->_f;		output << "\n";
	output << current->_o1;		output << "\n";
	output << current->_o2;		output << "\n";
	output << current->_thresh;	output << "\n";
	output << current->_pos;	output << "\n";
	output << current->_neg;	output << "\n";

	// Check if we need to branch left
	if(current->_l)
	{
		output << "lp";			
		output << "\n";
		SerialiseNode(current->_l, output, n);
	}
	else
		output << "ln";
	output << "\n";

	// Check if we need to branch right
	if(current->_r)
	{
		output << "rp";
		output << "\n";
		SerialiseNode(current->_r, output, n);
	}
	else
		output << "rn";
	output << "\n";
	output << "e";
	output << index;
}

// This function is used to recursively read a tree from a text
// file.
void TreeClassifier::UnserialiseNode(TreeNode *&current, std::ifstream &input)
{
	char chbuf[6];
	int intbuf;
	input >> chbuf;
	// Check to see if this is the start of a node
	if(chbuf[0] == 'b')
	{
		// Create the next node in the tree
		current = new TreeNode;
		input >> current->_tl_x;
		input >> current->_tl_y;
		input >> current->_br_x;
		input >> current->_br_y;
		input >> intbuf;
		if(intbuf)
			current->_f = DOMINANT_EDGE;
		else
			current->_f = EDGE_RATIO;
		input >> current->_o1;
		input >> current->_o2;
		input >> current->_thresh;
		input >> current->_pos;
		input >> current->_neg;
		input >> chbuf;

		current->_l = NULL;
		current->_r = NULL;

		// Check if there is a left branch
		if(chbuf[1] == 'p')
			UnserialiseNode(current->_l, input);

		input >> chbuf;

		// Check if there is a right branch
		if(chbuf[1] == 'p')
			UnserialiseNode(current->_r, input);

		input >> chbuf;
	}
}

// This funtion traverses the tree stored in the tree classifier 
// class.
void TreeClassifier::TraverseTree()
{
	TraverseNode(_root);
}

// This function traverses a tree recursively and prints out the 
// contents of the nodes to the screen. It is useful for debugging
// to see the contents of the tree.
void TreeClassifier::TraverseNode(TreeNode* current)
{
	if(current)
	{
		cout << "The top left corner is (" << current->_tl_x << ", " 
			 << current->_tl_y << ")" << endl;
		cout << "The bottom right corner is (" << current->_br_x << ", " 
			 << current->_br_y << ")" << endl;
		cout << "The feature is a ";
		if(current->_f == DOMINANT_EDGE)
		{
			cout << "dominant orientation feature" << endl;
			cout << "Orientation " << current->_o1 << " is used" << endl;	
		}
		else
		{
			if(current->_f == EDGE_RATIO)
			{
				cout << "ratio of edges feature" << endl;
				cout << "Orientations " << current->_o1 << " and " << current->_o2 << " are used" << endl;
			}
		}
		cout << "The threshold for this node is " << current->_thresh << endl;
		cout << "The positive probability for this node is " << current->_pos << endl;
		cout << "The negative probability for this node is " << current->_neg << endl;
	}
	if(current->_l)
		TraverseNode(current->_l);
	if(current->_r)
		TraverseNode(current->_r);
}

//////////////////////////////////////////////////////////////////
// This function trains a tree classifier. It begins by checking
// if there is enough memory to hold all of the training data.
void TreeClassifier::LearnTree(ExampleSet *input_set)
{
	// Check how much memory we will need to proceed
	if(input_set->GetSizeOfSet() < MAX_RAM)
	{
		;
	}
	else
	{
		// Set dimensions for the classifier.
		_sub_w = 64;
		_sub_h = 128;

		// Create a dynamic array to store features.
		Feature* feature_pool[FEATURE_TESTS];
		Feature** f_pool_pointer = feature_pool;
		for(int i=0; i<FEATURE_TESTS; i++)
			*f_pool_pointer++ = new Feature(_sub_w,_sub_h);

		// Create an array of thresholds
		f_pool_pointer = feature_pool;
		float threshold_pool[FEATURE_TESTS*THRESHOLD_TESTS];
		float* t_pool_pointer = threshold_pool;
		for(int i=0; i<FEATURE_TESTS; i++)
		{
			for(int j=0; j<THRESHOLD_TESTS; j++)
			{
				*t_pool_pointer++ = (*f_pool_pointer)->GenerateThreshold();
			}
			f_pool_pointer++;
		}

		// Evaluate the feature pool on the training data. This
		// requires loading the training data in blocks. Begin by
		// creating an array of pointers, where each pointer 
		// points to a set of feature values for the training 
		// data.
		float** f_vals_array = new float*[FEATURE_TESTS];
		float** f_vals_pointer = f_vals_array;
		for(int i=0; i<FEATURE_TESTS; i++)
			*f_vals_pointer++ = new float[input_set->_n];

		// Figure out how many blocks will have to be loaded
		int mem_blocks = static_cast<int>((input_set->GetSizeOfSet()/MAX_RAM) + 1.0f);
		cout << mem_blocks << " memory blocks will be needed for this round of training." << endl;
		ExampleNode* e_block_pointer = input_set->_head;
		for(int i=0; i<mem_blocks; i++)
		{
			e_block_pointer = e_block_pointer->next;
		}

		// Wipe dynamically allocated features.
		f_pool_pointer = feature_pool;
		for(int i=0; i<FEATURE_TESTS; i++)
			delete *f_pool_pointer++;
	}
}
//////////////////////////////////////////////////////////////////

// This function trains a tree classifier on a set of labelled
// data.
void TreeClassifier::LearnTree(IntegralHistSet* input_set)
{
	// Set dimensions for the classifier.
	_sub_w = input_set->_head->_hist->_w;
	_sub_h = input_set->_head->_hist->_h;

	// Create a ghost set for later use.
	GhostSet g_set(input_set);

	// Create variables to hold the best feature, threshold,
	// information gain, and split set.
	Feature best_f(_sub_w, _sub_h);
	float best_t;
	float best_gain = -1000000;
	GhostSet best_left;
	GhostSet best_right;

	// Draw random features
	for(int i=0; i<FEATURE_TESTS; i++)
	{
		Feature f(_sub_w, _sub_h);
		float* fvals = f.EvaluateFeature(&g_set);
		// Draw random thresholds
		for(int j=0; j<FEATURE_TESTS; j++)
		{
			float t = f.GenerateThreshold();
			SplitSet s_set(&g_set, fvals, t);
			// If this is the best information gain, remember the
			// feature, threshold, information gain, and split 
			// set.
			if(s_set.InfoGain() > best_gain)
			{
				best_gain = s_set.InfoGain();
				best_f = f;
				best_t = t;
				best_left = s_set._left_set;
				best_right = s_set._right_set;
			}
		}
	}

	// Add a new node to the tree
	_root = new TreeNode;
	_root->_tl_x = best_f._tl_x;
	_root->_tl_y = best_f._tl_y;
	_root->_br_x = best_f._br_x;
	_root->_br_y = best_f._br_y;
	_root->_f = best_f._f;
	_root->_o1 = best_f._o1;
	_root->_o2 = best_f._o2;
	_root->_thresh = best_t;
	_root->_pos = ((float)g_set._pos)/((float)g_set._n);
	_root->_neg = ((float)g_set._neg)/((float)g_set._n);

	// Set pointers to NULL (to be safe) and learn recursively.
	_root->_l = NULL;
	_root->_r = NULL;
	LearnNode(_root->_l, &best_left, 1);
	LearnNode(_root->_r, &best_right, 1);
}

// This function is called recursively to learn a decision tree.
// It either returns a split node or a leaf node.
void TreeClassifier::LearnNode(TreeNode *&current, GhostSet* g_set, int depth)
{
	cout << "TRAINING AT DEPTH " << depth << endl;
	// Create variables to hold the best feature, threshold,
	// information gain, and split set.
	Feature best_f(_sub_w, _sub_h);
	float best_t;
	float best_gain = -1000000;
	GhostSet best_left;
	GhostSet best_right;

	// Draw random features
	for(int i=0; i<FEATURE_TESTS; i++)
	{
		Feature f(_sub_w, _sub_h);
		float* fvals = f.EvaluateFeature(g_set);
		// Draw random thresholds
		for(int j=0; j<FEATURE_TESTS; j++)
		{
			float t = f.GenerateThreshold();
			SplitSet s_set(g_set, fvals, t);
			// If this is the best information gain, remember the
			// feature, threshold, information gain, and split 
			// set.
			if(s_set.InfoGain() > best_gain)
			{
				best_gain = s_set.InfoGain();
				best_f = f;
				best_t = t;
				best_left = s_set._left_set;
				best_right = s_set._right_set;
			}
		}
	}

	// Check that the maximum tree depth has not been reached and
	// that there are enough training example at this node, and
	// that a suitable feature has been found.
	if((depth < MAX_TREE_DEPTH) && (g_set->_n > LEAF_THRESHOLD) && best_gain != -1000000)
	{
		// Add a split node and call the functions on the subsets.
		current = new TreeNode;
		current->_tl_x = best_f._tl_x;
		current->_tl_y = best_f._tl_y;
		current->_br_x = best_f._br_x;
		current->_br_y = best_f._br_y;
		current->_f = best_f._f;
		current->_o1 = best_f._o1;
		current->_o2 = best_f._o2;
		current->_thresh = best_t;
		current->_pos = ((float)g_set->_pos)/((float)g_set->_n);
		current->_neg = ((float)g_set->_neg)/((float)g_set->_n);
		current->_l = NULL;
		current->_r = NULL;
		LearnNode(current->_l, &best_left, depth+1);
		LearnNode(current->_r, &best_right, depth+1);
	}
	else
	{
		// Return a leaf node and terminate.
		current = new TreeNode;
		current->_tl_x = 0;
		current->_tl_y = 0;
		current->_br_x = 0;
		current->_br_y = 0;
		current->_f = EDGE_RATIO;
		current->_o1 = 0;
		current->_o2 = 0;
		current->_thresh = 0.0f;
		current->_pos = ((float)g_set->_pos)/((float)g_set->_n);
		current->_neg = ((float)g_set->_neg)/((float)g_set->_n);
		current->_l = NULL;
		current->_r = NULL;	
		cout << "LEAF NODE CREATED, STOPPING TRAINING HERE AT DEPTH " << depth << endl;
	}
}

// This function runs the tree classifier on a set of images, and
// returns an array of probabilities for each image.
float** TreeClassifier::RunTreeOnSameSizeSet(IntegralHistSet* histset)
{
	float** results = new float*[histset->_n];
	float** results_p = results;

	// Run the tree on each image and return the probability distribution.
	IntegralHistNode* temp = histset->_head;
	//for(int i=0; i<histset->_n; i++)
	while(temp)
	{
		TreeNode* leaf = RunTreeOnSameSizeImage(temp->_hist);
		*results_p = new float[2];
		(*results_p)[0] = leaf->_pos;
		(*results_p)[1] = leaf->_neg;
		results_p++;
		temp = temp->next;
	}

	return results;
}

// This function runs the tree classifier on a single image. The
// return value is a pointer to the leaf node, as this allows the
// histogram values to be reached without returning dynamically
// allocated floating point values
TreeNode* TreeClassifier::RunTreeOnSameSizeImage(IntegralHist* hist)
{
	TreeNode* temp = _root;
	// Loop until we reach a leaf node (which has no children)
	while(temp->_l && temp->_r)
	{
		float f_val;
		// Evaluate the current feature on the image. Start by 
		// checking the feature type.
		if(temp->_f == EDGE_RATIO)
		{
			// Compute first edge feature
			float h1 = ((temp->_tl_y == 0)||(temp->_tl_x == 0)) ? 0 : hist->_histdata[((temp->_tl_y-1)*hist->_w*ORIENTATION_BINS)+((temp->_tl_x-1)*ORIENTATION_BINS)+temp->_o1];// Top left
			float h2 = (temp->_tl_y == 0) ? 0 : hist->_histdata[((temp->_tl_y-1)*hist->_w*ORIENTATION_BINS)+(temp->_br_x*ORIENTATION_BINS)+temp->_o1];							// Top right
			float h3 = (temp->_tl_x == 0) ? 0 : hist->_histdata[(temp->_br_y*hist->_w*ORIENTATION_BINS)+((temp->_tl_x-1)*ORIENTATION_BINS)+temp->_o1];							// Bottom left
			float h4 = hist->_histdata[(temp->_br_y*hist->_w*ORIENTATION_BINS)+(temp->_br_x*ORIENTATION_BINS)+temp->_o1];														// Bottom right
			float e1 = h1 - h2 - h3 + h4;
			// Compute second edge feature
			h1 = ((temp->_tl_y == 0)||(temp->_tl_x == 0)) ? 0 : hist->_histdata[((temp->_tl_y-1)*hist->_w*ORIENTATION_BINS)+((temp->_tl_x-1)*ORIENTATION_BINS)+temp->_o2];		// Top left
			h2 = (temp->_tl_y == 0) ? 0 : hist->_histdata[((temp->_tl_y-1)*hist->_w*ORIENTATION_BINS)+(temp->_br_x*ORIENTATION_BINS)+temp->_o2];								// Top right
			h3 = (temp->_tl_x == 0) ? 0 : hist->_histdata[(temp->_br_y*hist->_w*ORIENTATION_BINS)+((temp->_tl_x-1)*ORIENTATION_BINS)+temp->_o2];								// Bottom left
			h4 = hist->_histdata[(temp->_br_y*hist->_w*ORIENTATION_BINS)+(temp->_br_x*ORIENTATION_BINS)+temp->_o2];																// Bottom right
			float e2 = h1 - h2 - h3 + h4;
			// Note that an epsilon variable has been used to
			// prevent division by zero. It is also important to 
			// note that some values of e1 and e2 will be negative 
			// when they are in fact 0 due to floating point 
			// inaccuracies. The epsilon value stops small 
			// negative values from occuring, and if it is not 
			// used, other measures must be taken to compensate
			// for this.
			f_val = (e1 + EPSILON)/(e2 + EPSILON);
		}
		else
		{
			float etotal = 0;
			float e1 = 0;
			// Compute the edge features in sequence
			for(int k=0; k<ORIENTATION_BINS; k++)
			{
				float h1 = ((temp->_tl_y == 0)||(temp->_tl_x == 0)) ? 0 : hist->_histdata[((temp->_tl_y-1)*hist->_w*ORIENTATION_BINS)+((temp->_tl_x-1)*ORIENTATION_BINS)+k];	// Top left
				float h2 = (temp->_tl_y == 0) ? 0 : hist->_histdata[((temp->_tl_y-1)*hist->_w*ORIENTATION_BINS)+(temp->_br_x*ORIENTATION_BINS)+k];						// Top right
				float h3 = (temp->_tl_x == 0) ? 0 : hist->_histdata[(temp->_br_y*hist->_w*ORIENTATION_BINS)+((temp->_tl_x-1)*ORIENTATION_BINS)+k];						// Bottom left
				float h4 = hist->_histdata[(temp->_br_y*hist->_w*ORIENTATION_BINS)+(temp->_br_x*ORIENTATION_BINS)+k];												// Bottom right
				etotal = etotal + h1 - h2 - h3 + h4;
				if(k == temp->_o1)
					e1 = h1 - h2 - h3 + h4;
			}
			f_val = (e1 + EPSILON)/(etotal + EPSILON);
		}

		// Branch left or right depending on feature value
		if(f_val < temp->_thresh)
			temp = temp->_l;
		else
			temp = temp->_r;
	}
	return temp;
}