//----------------------------------------------------------------------------------------------
//	Filename:	ModelTraining.h
//	Author:		Sandro Spina
//	Date:		16/01/2012
//----------------------------------------------------------------------------------------------
//----------------------------------------------------------------------------------------------
#pragma once

#include "ModelTraining.h"

#include <System\EngineKernel.h>
#include <Scene\Environment.h>
#include <Device\BufferedImageDevice.h>
#include <Scene\Scene.h>
#include <Sampler\RandomSampler.h>

using namespace Illumina::Core;

ModelTraining::ModelTraining(Environment *p_environment)
{
	environment = p_environment;
	trained_pcs = new std::map<std::string, std::map<std::string, PointCloudSegmentation*>*>();
	dbGlobalIndex = 0;
	dbLocalIndex = 0;	//within obj class
	dbGlobalIndex = 0;	//global object index
	crtActiveObjectClass = "NotSet";
}

void ModelTraining::ResetDBGlobalIndex()
{
	dbGlobalIndex = 0;
	crtActiveObjectClass = "NotSet";
}

void ModelTraining::ResetDBLocalIndex()
{
	dbLocalIndex = 0;
}

//Returns the next FULL (not partial view) PCS in DB.
PointCloudSegmentation * ModelTraining::GetNext() 
{
	if (dbGlobalIndex == trained_pcs->size()) return NULL;
	auto trained_pcs_it = trained_pcs->begin();
	for (int i = 0; i<dbGlobalIndex; i++) trained_pcs_it++;	
	if (dbLocalIndex == trained_pcs_it->second->size()) 
	{
		ResetDBLocalIndex();
		dbGlobalIndex++;
		if (dbGlobalIndex == trained_pcs->size()) return NULL;
		trained_pcs_it++;
	}
	auto trained_pcs_local_it = trained_pcs_it->second->begin();
	for (int i = 0; i<dbLocalIndex; i++) trained_pcs_local_it++;
	crtActiveObjectClass = trained_pcs_it->first;
	crtActiveObjectInClass = trained_pcs_local_it->first;
	dbLocalIndex++;
	return trained_pcs_local_it->second;
}

//Returns the next FULL (not partial view) PCS in DB. Filtered by Name (e.g. Chairs)
PointCloudSegmentation * ModelTraining::GetNext(std::string _name)
{
	auto trained_pcs_global_it = trained_pcs->find(_name);
	if (trained_pcs_global_it == trained_pcs->end()) return NULL; //name not in db
	auto trained_pcs_local_map = trained_pcs_global_it->second;
	if (dbLocalIndex == trained_pcs_local_map->size()) return NULL;  //at the end of the local list
	auto trained_pcs_local_map_it = trained_pcs_local_map->begin();
	for (int i = 0; i<dbLocalIndex; i++) trained_pcs_local_map_it++;
	return trained_pcs_local_map_it->second;
}

PointCloudSegmentation * ModelTraining::GetByName(std::string _class, std::string _name)
{
	auto pcs_it = trained_pcs->find(_class);
	if (pcs_it == trained_pcs->end()) return NULL;
	auto pcs_local_it = pcs_it->second->find(_name);
	if (pcs_local_it == pcs_it->second->end()) return NULL;	
	return pcs_local_it->second;
}

//Load from binary trained models. If not already trained re-train and persist to file.
int ModelTraining::LoadAndTrainModels(Environment* p_environment, KDTreePointCloud* p_scene, MaterialGroup * p_materialGroup) {		
	std::string store_location = p_environment->GetShapeStore();		
	std::string strName = "model";
		
	GeometricPrimitive * geometric_primitive = (GeometricPrimitive*)(p_environment->GetScene()->GetSpace()->PrimitiveList[0]);
	DepthIntegrator * depth_integrator = (DepthIntegrator*)(p_environment->GetIntegrator());
	NullDevice * null_device = new NullDevice(1920, 1920);		
	BufferedImageDevice * image_device = (BufferedImageDevice*) (p_environment->GetEngineKernel()->GetDeviceManager()->RequestInstance("BIDevice"));

	std::vector<std::pair<std::string,std::string>> off_models;
	std::vector<std::pair<std::string,std::string>> obj_models;
	std::vector<std::pair<std::string,std::string>> pcs_models;

	typedef boost::tokenizer<boost::char_separator<char>> tokenizer;
	boost::char_separator<char> sep("\t\n");
	tokenizer::iterator beg;

	typedef boost::tokenizer<boost::char_separator<char>> s_tokenizer;
	boost::char_separator<char> s_sep(" \n");
	s_tokenizer::iterator s_beg;

	//Read file 
	FILE *fp;
	bool quit = false;
	if (!(fp = fopen(store_location.c_str(), "r")))
	{
		std::cout << "[Training] - Unable to open store file " << store_location << std::endl;
		quit = true;
	}
	else
	{
		std::cout << "[Training] - Reading from store file " << store_location << std::endl;
		char buffer[1024];
		int line_count = 0;
		std::string model_name, model_type, model_location, line_data;			

		while (fgets(buffer, 512, fp))
		{
			line_count++;
			char *bufferp = buffer;
			while (isspace(*bufferp)) bufferp++;
				
			if (*bufferp == '#') continue;   //skip comments
			if (*bufferp == '\0') continue;	 //and blank lines				
				
			line_data = buffer;
			tokenizer tok(line_data, sep); 
			beg = tok.begin();
			model_name = *beg; ++beg;
			model_type = *beg; ++beg;
			model_location = *beg;
				
			if (model_type.compare("off") == 0) { off_models.push_back(std::pair<std::string, std::string>(model_name,model_location)); continue; }
			if (model_type.compare("obj") == 0) { obj_models.push_back(std::pair<std::string, std::string>(model_name,model_location)); continue; }
			if (model_type.compare("pcs") == 0) { pcs_models.push_back(std::pair<std::string, std::string>(model_name,model_location)); continue; }
		}
		fclose(fp);
	}

	int training_models = off_models.size() + obj_models.size() + pcs_models.size();
	std::cout << "[Training] - Number of Models in ShapeStore = " << training_models << std::endl;

	if (!quit)  //train models
	{
		//off files
		FILE *fp;
		std::string model_location, model_name;
		char buffer[1024];
		int nverts, nfaces, nedges, fvertices, line_count;			
		float x, y, z;
		VertexP nxtvrtx;
		List<Vector3> vertices;
		List<Vector3> tris;
		AxisAlignedBoundingBox model_bb;
		model_bb.Invalidate();						
		float min, max;

		int i1, i2, i3;
		boost::timer load_time;

		for (std::vector<std::pair<std::string,std::string>>::iterator models_iterator = off_models.begin(); models_iterator != off_models.end(); models_iterator++)
		{
			//Load and parse the .off file from the file system
			//Generate the point cloud and scale it depending on size of original mesh
			model_name = models_iterator->first;
			model_location = models_iterator->second;
			KDTreeMesh * mesh = new KDTreeMesh(10, 10);
				
			if (!(fp = fopen(model_location.c_str(), "r")))
			{
				std::cout << "\n[Training] Unable to open .off file at " << model_location << std::endl;				
				continue;
			} 

			nverts = 0;	nfaces = 0;	nedges = 0;	line_count = 0;
			fvertices = 0;
			bool parse_error = false;
			vertices.Clear();
			tris.Clear();				
			std::string line_data;
			bool init_file = false;
			int lines_read = 0;

			std::cout << "\n[Training - " << model_name << "] Reading from .off file at " << model_location << std::endl;
			load_time.restart();
			//reading .off file
			while (fgets(buffer, 512, fp))
			{
				lines_read++;
				char *bufferp = buffer;
				while (isspace(*bufferp)) bufferp++;
				if (*bufferp == '#') continue;   //skip comments
				if (*bufferp == '\0') continue;	 //and blank lines

				line_data = buffer;					

				if (!init_file)
				{
					tokenizer tok(line_data, sep);		
					beg = tok.begin();
					//start populating						
					if ((*beg).compare("OFF") == 0)
					{
						//OK get the next line from the file
						fgets(buffer, 512, fp);
						line_data = buffer;
						s_tokenizer tok(line_data);
						int c = 0;
						for (s_beg = tok.begin(); s_beg != tok.end(); ++s_beg) c++;
						if (c != 3) 
						{
							std::cout << "Syntax error reading header on line " << line_count << " in file " << model_name << std::endl;
							parse_error = true;
							break;
						} 
						else
						{
							s_beg = tok.begin();
							nverts = boost::lexical_cast<int, std::string>(*s_beg); ++s_beg;
							nfaces = boost::lexical_cast<int, std::string>(*s_beg); ++s_beg;
							nedges = boost::lexical_cast<int, std::string>(*s_beg);
						}
						init_file = true;
						std::cout << "[Training - " << model_name << "] Init Data: V=" << nverts << " F=" << nfaces << std::endl;
						continue; //read the next line
					}
					else {						
						std::cout << "[Training - " << model_name << "] Parse Error Reading Init data. Break!" << std::endl;
						break;
					}
				}					

				if (nverts > 0)  //Read vertices first
				{		
					s_tokenizer tok(line_data, s_sep);
					int c = 0;
					for (s_beg = tok.begin(); s_beg != tok.end(); ++s_beg) c++;					
					if (c != 3)
					{
						std::cout << "Syntax error with vertex coordinates on line " << line_count << " in file " << model_name << std::endl;
						parse_error = true;
						break;							
					} else {							
						s_beg = tok.begin();
						x = boost::lexical_cast<float, std::string>(*s_beg); ++s_beg;
						y = boost::lexical_cast<float, std::string>(*s_beg); ++s_beg;
						z = boost::lexical_cast<float, std::string>(*s_beg);
						vertices.PushBack(Vector3(x,y,z));							
						Vertex v;
						v.Position.Set(x,y,z);
						mesh->AddVertex(v);
						nverts--;
						continue;
					}						
				}					

				if (nfaces > 0)
				{		
					s_tokenizer tok(line_data);
					int c = 0;
					for (s_beg = tok.begin(); s_beg != tok.end(); ++s_beg) c++;

					if (c != 4)
					{
						std::cout << "Syntax error reading face indices at line " << line_count << " in file " << model_name << std::endl;
						parse_error = true;
						break;							
					} else {
						//Create Triangle
						s_beg = tok.begin(); s_beg++;
						i1 = boost::lexical_cast<int, std::string> (*s_beg); s_beg++;
						i2 = boost::lexical_cast<int, std::string> (*s_beg); s_beg++;
						i3 = boost::lexical_cast<int, std::string> (*s_beg);

						tris.PushBack(Vector3(vertices.At(i1)));
						tris.PushBack(Vector3(vertices.At(i2)));
						tris.PushBack(Vector3(vertices.At(i3)));
						mesh->AddIndexedTriangle(i1, i2, i3);							
						nfaces--;
						continue;
					}						
				}													
			}

			//Build structure graph model
			if (!parse_error) 
			{	
				//scale and translate mesh over origin.
				KDTreePointCloud * p_cloud = new KDTreePointCloud(model_name);
				p_cloud->SetWorldType(1);
				p_cloud->SetSurfaceCurvature(p_scene->GetSurfaceCurvature());
				p_cloud->SetkNNType(p_scene->GetkNNType());
				p_cloud->SetkNNFlood(p_scene->GetkNNFlood());
				p_cloud->SetkNNFloodRadius(p_scene->GetkNNFloodRadius());
				p_cloud->SetkNNTypeRadius(p_scene->GetkNNTypeRadius());
				p_cloud->SetCornerRatioEpsilon(p_scene->GetCornerRatioEpsilon());
				p_cloud->SetEdgeCornerDetect(p_scene->GetEdgeCornerDetect());
				p_cloud->SetkNNFloodMode(p_scene->GetkNNFloodMode());
				p_cloud->SetkNNTypeMode(p_scene->GetkNNTypeMode());
				p_cloud->SetNoise(p_scene->GetNoise());
				p_cloud->SetRadius(p_scene->GetRadius());
				p_cloud->SetEnvironment(p_environment);
				//Model_name in meshes used for training are always an integer > 9000
				p_cloud->SetPCSId(boost::lexical_cast<int, std::string>(model_name));

				std::cout << "[Training - " << model_name << "] All Vertices and Faces Loaded in " << load_time.elapsed() << "s." << std::endl;
				mesh->ComputeBoundingVolume();
				
				float mext0 = mesh->GetBoundingVolume()->GetExtent().Length();
				float scale = p_environment->GetScalingFactor() / mext0;
				std::cout << "[Training - " << model_name << "] Scale::" << scale << std::endl;
				//scale = 5.5f;
				Vector3 translate = mesh->GetBoundingVolume()->GetCentre();
				//float x,y,z;
				for (int i = 0; i<mesh->VertexList.Size(); i++)	 { mesh->VertexList[i].Position -= translate; mesh->VertexList[i].Position *= scale; } 					
				for (int i = 0; i<mesh->TriangleList.Size(); i++) { mesh->TriangleList[i].ComputeEdges(); }					
				for (int i = 0; i<tris.Size(); i++) { tris[i] -= translate; tris[i] *= scale; }					

				mesh->ComputeBoundingVolume();
				mesh->ComputeRadii(min, max);
				mesh->Compile();
				std::cout << "[Training - " << model_name << "] KD-Tree Computed in " << load_time.elapsed() << std::endl;
				float mext1 = mesh->GetBoundingVolume()->GetExtent().Length();
					
				std::cout << "[Training - " << model_name << "] Bounding Volume Extent Updated from : " << mext0 << " to " << mext1 << std::endl;
				std::cout << "[Scanning - " << model_name << "] Generating points from " << tris.Size()/3 << " Tris. #Scans=" << p_environment->GetNumberOfScans() <<  std::endl;

				//bind this shape to the geometric primitive					
				geometric_primitive->SetShape(mesh);				
					
				std::vector<std::vector<Vector3>> scans;		
				ScanModel(p_environment->GetNumberOfScans(), model_name, scans, depth_integrator, geometric_primitive, image_device, p_environment);				
										
				//Add points from all views to p_cloud
				for (int i=0; i<scans.size(); i++)
				{
					p_cloud->AddPointCloudPart(scans[i].size());
					for (int j=0; j<scans[i].size(); j++) p_cloud->AddPoint(CloudPoint(scans[i][j]));
				}				

				std::cout << "*********************PCS CONTRUCTOR*************************" << std::endl;
				PointCloudSegmentation * pcs_full = new PointCloudSegmentation(p_environment, geometric_primitive, p_cloud, boost::lexical_cast<int, std::string>(model_name), 0.1f);
				std::cout << "*********************PCS CONTRUCTOR*************************" << std::endl;

				AddPCS("chairs", model_name, pcs_full);	
				//WritePointsToASCIIFile(*p_cloud, "Output/PointClouds/"+model_name+"_FULL.asc");
				//p_cloud->WriteToBKD3File("Output/PCS/"+p_cloud->GetName(), true, p_materialGroup);					

				for (int i=0; i<p_environment->GetNumberOfScans(); i++)
				{
					int mid = boost::lexical_cast<int, std::string>(model_name)+((i+1)*100);
					std::cout << "[Training " << model_name << " From View " << i << "] Starting now ***** " << model_name << " *****" << std::endl;
					KDTreePointCloud * p_cloud_view = new KDTreePointCloud(boost::lexical_cast<std::string, int>(mid));
					p_cloud_view->SetWorldType(1);
					p_cloud_view->SetSurfaceCurvature(p_scene->GetSurfaceCurvature());
					p_cloud_view->SetkNNType(p_scene->GetkNNType());
					p_cloud_view->SetkNNFlood(p_scene->GetkNNFlood());
					p_cloud_view->SetkNNFloodRadius(p_scene->GetkNNFloodRadius());
					p_cloud_view->SetkNNTypeRadius(p_scene->GetkNNTypeRadius());
					p_cloud_view->SetCornerRatioEpsilon(p_scene->GetCornerRatioEpsilon());
					p_cloud_view->SetEdgeCornerDetect(p_scene->GetEdgeCornerDetect());
					p_cloud_view->SetkNNFloodMode(p_scene->GetkNNFloodMode());
					p_cloud_view->SetkNNTypeMode(p_scene->GetkNNTypeMode());
					p_cloud_view->SetNoise(p_scene->GetNoise());
					p_cloud_view->SetRadius(p_scene->GetRadius());
					p_cloud_view->SetEnvironment(p_environment);
					for (int j=0; j<scans[i].size(); j++)
					{
						p_cloud_view->AddPoint(CloudPoint(scans[i][j]));
					}

					PointCloudSegmentation * pcs = new PointCloudSegmentation(p_environment, geometric_primitive, p_cloud_view, mid, 0.1f);

					//pcs_full->EnhanceGraphsFromSingleView(pcs, i);
					//WritePointsToASCIIFile(*p_cloud_view, "Output/PointClouds/"+p_cloud_view->GetName()+".asc");
					//p_cloud_view->WriteToBKD3File("Output/PCS/"+p_cloud_view->GetName(), true, p_materialGroup);
					//pcs->WriteToBPCSFile("Output/PCS/"+model_name+"/");
					//AddPCS(model_name, pcs); //Do not store the partial view.					
				}

				pcs_full->m_structureGraph->PromoteInternalNodesToLevel0();
				pcs_full->ComputePointCoveragePerSegment(128);
				bool success = pcs_full->ComputeFeatureMatrix(3, 5, 1.f);  //Create 3 feature structure graphs (init states == 3 -highest coverage * point count segments-), depth=3 for the anchor segments
				pcs_full->WriteToBPCSFile("Output/PCS/"+model_name+"/");
				pcs_full->PrintStatistics();
				
				std::cout << "[Training] ------> Of " << model_name << " Finished <-------" << std::endl;									
				std::cout << "[Training] ------> Most Visible Surface ID = " << pcs_full->m_mostVisibileSurface << std::endl; 
								
				fclose(fp);
			}
		}

		//obj files
		for (std::vector<std::pair<std::string,std::string>>::iterator models_iterator = obj_models.begin(); models_iterator != obj_models.end(); models_iterator++)
		{
			//Load and parse the .off file from the file system 
			//Generate the point cloud and scale it depending on size of original mesh
			//ReadFile
			//Build kd-tree
			//p_cloud->Compile();
			//p_cloud->ComputePointTypes(0, p_cloud->GetSize(), p_cloud->GetBallRadius());
			//std::cout<<"[PCSupport - " << strName << "] Point type computations done."<<std::endl;
			//structure_graphs_db.LearnAndAddStructureGraphForModel(p_cloud);
		}

		//pcs files
		for (std::vector<std::pair<std::string,std::string>>::iterator models_iterator = pcs_models.begin(); models_iterator != pcs_models.end(); models_iterator++)
		{
			//Load and parse the previously trained binary .pcs file from the file system
			model_name = models_iterator->first;
			bool loaded;
			PointCloudSegmentation * pcs_loaded = new PointCloudSegmentation(p_environment, "Output/PCS/"+model_name+"/", boost::lexical_cast<int, std::string>(model_name), loaded);			
			pcs_loaded->PrintStatistics();
			AddPCS("chairs", model_name, pcs_loaded);
		}		
	}	//!quit

	return training_models;
}

//Load from binary trained models. If not already trained re-train and persist to file.
int ModelTraining::LoadTrainedModels(Environment* p_environment, KDTreePointCloud* p_scene, MaterialGroup * p_materialGroup) {		
	std::string store_location = p_environment->GetShapeStore();		
	std::string strName = "model";
		
	GeometricPrimitive * geometric_primitive = (GeometricPrimitive*)(p_environment->GetScene()->GetSpace()->PrimitiveList[0]);
	DepthIntegrator * depth_integrator = (DepthIntegrator*)(p_environment->GetIntegrator());
	NullDevice * null_device = new NullDevice(1920, 1920);		
	BufferedImageDevice * image_device = (BufferedImageDevice*) (p_environment->GetEngineKernel()->GetDeviceManager()->RequestInstance("BIDevice"));
	
	std::vector<std::pair<std::string,std::string>> pcs_models;

	typedef boost::tokenizer<boost::char_separator<char>> tokenizer;
	boost::char_separator<char> sep("\t\n");
	tokenizer::iterator beg;

	typedef boost::tokenizer<boost::char_separator<char>> s_tokenizer;
	boost::char_separator<char> s_sep(" \n");
	s_tokenizer::iterator s_beg;

	//Read file 
	FILE *fp;
	bool quit = false;
	if (!(fp = fopen(store_location.c_str(), "r")))
	{
		std::cout << "[Loading] - Unable to open store file " << store_location << std::endl;
		quit = true;
	}
	else
	{
		std::cout << "[Loading] - Reading from store file " << store_location << std::endl;
		char buffer[1024];
		int line_count = 0;
		std::string model_name, model_type, model_location, line_data;			

		while (fgets(buffer, 512, fp))
		{
			line_count++;
			char *bufferp = buffer;
			while (isspace(*bufferp)) bufferp++;
				
			if (*bufferp == '#') continue;   //skip comments
			if (*bufferp == '\0') continue;	 //and blank lines				
				
			line_data = buffer;
			tokenizer tok(line_data, sep); 
			beg = tok.begin();
			model_name = *beg; ++beg;
			model_type = *beg; ++beg;
			model_location = *beg;
				
			if (model_type.compare("pcs") == 0) { pcs_models.push_back(std::pair<std::string, std::string>(model_name,model_location)); continue; }
		}
		fclose(fp);
	}

	int training_models = pcs_models.size();
	std::cout << "[Loading] - Number of Models in ShapeStore = " << training_models << std::endl;

	if (!quit)
	{
		std::string model_location, model_name;
		
		//pcs files
		for (std::vector<std::pair<std::string,std::string>>::iterator models_iterator = pcs_models.begin(); models_iterator != pcs_models.end(); models_iterator++)
		{
			//Load and parse the previously trained binary .pcs file from the file system
			model_name = models_iterator->first;
			bool loaded;
			PointCloudSegmentation * pcs_loaded = new PointCloudSegmentation(p_environment, "Output/PCS/"+model_name+"/", boost::lexical_cast<int, std::string>(model_name), loaded);
			pcs_loaded->PrintStatistics();
			AddPCS("chairs", model_name, pcs_loaded);			
		}		
	}	//!quit

	return training_models;
}

void ModelTraining::ScanModel(int p_views, std::string p_model_name, std::vector<std::vector<Vector3>> &p_scans, DepthIntegrator *p_integrator, GeometricPrimitive *p_geometricPrimitive, BufferedImageDevice *p_imageDevice, Environment * p_environment)
{		
	RandomSampler rs;				
	std::vector<Vector3> scan_points_per_view;
	std::string file_output_path;
	RadianceBuffer rbuffer(p_imageDevice->GetWidth(), p_imageDevice->GetHeight());
	ScanRenderer renderer(p_environment->GetScene(), p_integrator, p_imageDevice, p_environment->GetFilter(), &rbuffer, &scan_points_per_view, 1);		
	Vector2 sample;
	Vector3 camera_position;
	float camera_distance = 20.f;
	for (int i = 0; i<p_views; i++)
	{
		sample = rs.Get2DSample();
		//std::cout << "Sample("<<i<<"):: " << sample.ToString() << std::endl;
		//camera_position = Vector3::Zero + 20.f * Montecarlo::UniformSampleSphere(sample.X, sample.Y);
		camera_position = camera_distance * Montecarlo::CosineSampleHemisphere(sample.X, sample.Y);
		Maths::Swap(camera_position.Y, camera_position.Z);
		if (camera_position.Y > camera_distance * 0.8f) { i -= 1; continue; } // skip this position.
		std::cout << "[Scanning] CameraPosition("<<i<<"):: " << camera_position.ToString() << std::endl;

		renderer.GetScene()->GetCamera()->MoveTo(camera_position);
		renderer.GetScene()->GetCamera()->LookAt(Vector3::Zero);
		scan_points_per_view.clear();
		renderer.Render();
		renderer.Commit();
		//file_output_path = "Output/PointClouds/" + p_model_name +  "_view" + boost::lexical_cast<std::string, int>(i) + ".asc";
		//WritePointsToASCIIFile(&scan_points_per_view, file_output_path, i);
		p_scans.push_back(scan_points_per_view);
	}
}

void ModelTraining::WritePointsToASCIIFile(std::vector<Vector3> * p_points, std::string _filepath, int p_i)	{		
	std::ofstream myfile;		
	std::cout << "[Scanning View " << p_i << "] Writing " << p_points->size() << " points to " << _filepath;
	myfile.open (_filepath);
	for (int i = 0; i<p_points->size(); i++) myfile << p_points->at(i).X << " " << p_points->at(i).Y << " " << p_points->at(i).Z << "\n";
	myfile.close();	
	std::cout << " [DONE]" << std::endl;
}

void ModelTraining::WritePointsToASCIIFile(KDTreePointCloud &p_pointCloud, std::string _filepath)	{		
	std::ofstream myfile;		
	std::cout << "[Training] Writing " << p_pointCloud.GetSize() << " Points to ASCII File " << _filepath;
	myfile.open (_filepath);
	for (int i = 0; i<p_pointCloud.GetSize(); i++) {
		myfile << p_pointCloud.m_pointList[i].m_vertex.Position.X << " " << p_pointCloud.m_pointList[i].m_vertex.Position.Y << " " << p_pointCloud.m_pointList[i].m_vertex.Position.Z << "\n";
	}		
	myfile.close();	
	std::cout << " [DONE]" << std::endl;
}		

std::pair<bool, KDTreeMesh *> ModelTraining::GetShape(std::string p_name)
{
	if (primitive_shapes.find(p_name) == primitive_shapes.end())
	{
		//try loading it from disk
		if (LoadShape(p_name))			
		{	
			//Create a new instance of this KDTreeMesh and return it.
			//std::cout << "Cloning Shape " << p_name << std::endl;
			KDTreeMesh * source = primitive_shapes.at(p_name);
			KDTreeMesh * n_mesh = new KDTreeMesh(10, 10);		

			for (int i=0; i<source->VertexList.Size(); i++) 
			{
				Vertex v;
				v.Position.Set(source->VertexList[i].Position.X,source->VertexList[i].Position.Y,source->VertexList[i].Position.Z);
				n_mesh->AddVertex(v);						
			}
			n_mesh->AddIndexedTriangleList(source->TriangleList);
		
			for (int i = 0; i<n_mesh->TriangleList.Size(); i++) { n_mesh->TriangleList[i].ComputeEdges(); }

			n_mesh->ComputeBoundingVolume();
			n_mesh->UpdateNormals();
			n_mesh->Compile();
		
			return std::pair<bool, KDTreeMesh*>(true, n_mesh);		
		} 
		else
		{
			return std::pair<bool, KDTreeMesh*>(false,NULL);
		}
	}
	else
	{
		//Create a new instance of this KDTreeMesh and return it.
		//std::cout << "Cloning Shape " << p_name << std::endl;
		KDTreeMesh * source = primitive_shapes.at(p_name);
		KDTreeMesh * n_mesh = new KDTreeMesh(10, 10);		

		for (int i=0; i<source->VertexList.Size(); i++) 
		{
			Vertex v;
			v.Position.Set(source->VertexList[i].Position.X,source->VertexList[i].Position.Y,source->VertexList[i].Position.Z);
			n_mesh->AddVertex(v);						
		}

		n_mesh->AddIndexedTriangleList(source->TriangleList);
		
		for (int i = 0; i<n_mesh->TriangleList.Size(); i++) { n_mesh->TriangleList[i].ComputeEdges(); }							

		n_mesh->ComputeBoundingVolume();							
		n_mesh->Compile();
		
		return std::pair<bool, KDTreeMesh*>(true, n_mesh);		
	}
}

//Load shape and cache it in primitive_shapes
bool ModelTraining::LoadShape(std::string p_name)
{	
	std::string store_location = environment->GetShapeStore();		
	std::string model_loc;
	typedef boost::tokenizer<boost::char_separator<char>> tokenizer;
	boost::char_separator<char> sep("\t\n");
	tokenizer::iterator beg;

	typedef boost::tokenizer<boost::char_separator<char>> s_tokenizer;	
	boost::char_separator<char> s_sep(" \n");
	s_tokenizer::iterator s_beg;		

	//Read file 
	FILE *fp;
	bool quit = false;
	if (!(fp = fopen(store_location.c_str(), "r")))
	{
		std::cout << "[Model Loading] - Unable to open store file " << store_location << std::endl;
		quit = true;
	}
	else
	{
		quit = true;
		//std::cout << "[Model Loading] - Reading from store file " << store_location << std::endl;
		char buffer[1024];
		int line_count = 0;
		std::string model_name, model_type, model_location, line_data;
		bool found = false;

		while ((fgets(buffer, 512, fp)) && (!found))
		{
			line_count++;
			char *bufferp = buffer;
			while (isspace(*bufferp)) bufferp++;
				
			if (*bufferp == '#') continue;   //skip comments
			if (*bufferp == '\0') continue;	 //and blank lines				
				
			line_data = buffer;
			tokenizer tok(line_data, sep); 
			beg = tok.begin();
			model_name = *beg; ++beg;
			model_type = *beg; ++beg;
			model_location = *beg;
			
			//std::cout << "Comparing names : " << model_name << " to " << p_name << std::endl;
			if (model_name.compare(p_name) == 0)
			{				
				if (model_location.find(".off") != std::string::npos)
				{ 
					model_loc = model_location; 
					//std::cout << "[Model Loading] - Setting model location to " << model_loc << std::endl;
					found = true;
					quit = false;
				}
				else
				{
					std::cout << "[Model Loading] - Not an OFF File ... BREAKING" << std::endl;
					quit = true;
				}
			}
			
		}
		fclose(fp);		
	}
	
	if (!quit)  //load model
	{
		//off files
		FILE *fp;
		FILE *obj_fp;
		std::string model_loc_obj;
		std::string model_location, model_name;
		char buffer[1024];
		int nverts, nfaces, nedges, fvertices, line_count;			
		float x, y, z;
		VertexP nxtvrtx;
		List<Vector3> vertices;
		List<Vector3> tris;
		AxisAlignedBoundingBox model_bb;
		model_bb.Invalidate();						
		float min, max;

		int i1, i2, i3;
		boost::timer load_time;
		
		//Load and parse the .off file from the file system
		KDTreeMesh * mesh = new KDTreeMesh(10, 10);						

		if (!(fp = fopen(model_loc.c_str(), "r")))
		{
			std::cout << "\n[Model Loading] Unable to open .off file at " << model_loc << std::endl;				
			return false;
		} 

		nverts = 0;	nfaces = 0;	nedges = 0;	line_count = 0;
		fvertices = 0;
		bool parse_error = false;
		vertices.Clear();
		tris.Clear();				
		std::string line_data;
		bool init_file = false;
		int lines_read = 0;

		//std::cout << "\n[Model Loading - " << model_name << "] Reading from .off file at " << model_loc << std::endl;
		load_time.restart();
		//reading .off file
		while (fgets(buffer, 512, fp))
		{
			lines_read++;
			char *bufferp = buffer;
			while (isspace(*bufferp)) bufferp++;
			if (*bufferp == '#') continue;   //skip comments
			if (*bufferp == '\0') continue;	 //and blank lines

			line_data = buffer;					

			if (!init_file)
			{
				tokenizer tok(line_data, sep);		
				beg = tok.begin();
				//start populating						
				if ((*beg).compare("OFF") == 0)
				{
					//OK get the next line from the file
					fgets(buffer, 512, fp);
					line_data = buffer;
					s_tokenizer tok(line_data);
					int c = 0;
					for (s_beg = tok.begin(); s_beg != tok.end(); ++s_beg) c++;
					if (c != 3) 
					{
						std::cout << "Syntax error reading header on line " << line_count << " in file " << model_name << std::endl;
						parse_error = true;
						break;
					} 
					else
					{
						s_beg = tok.begin();
						nverts = boost::lexical_cast<int, std::string>(*s_beg); ++s_beg;
						nfaces = boost::lexical_cast<int, std::string>(*s_beg); ++s_beg;
						nedges = boost::lexical_cast<int, std::string>(*s_beg);
					}
					init_file = true;
					//std::cout << "[Model Loading - " << model_name << "] Init Data: V=" << nverts << " F=" << nfaces << std::endl;
					continue; //read the next line
				}
				else {						
					std::cout << "[Model Loading - " << model_name << "] Parse Error Reading Init data. Break!" << std::endl;
					break;
				}
			}					

			if (nverts > 0)  //Read vertices first
			{		
				s_tokenizer tok(line_data, s_sep);
				int c = 0;
				for (s_beg = tok.begin(); s_beg != tok.end(); ++s_beg) c++;					
				if (c != 3)
				{
					std::cout << "Syntax error with vertex coordinates on line " << line_count << " in file " << model_name << std::endl;
					parse_error = true;
					break;							
				} else {							
					s_beg = tok.begin();
					x = boost::lexical_cast<float, std::string>(*s_beg); ++s_beg;
					y = boost::lexical_cast<float, std::string>(*s_beg); ++s_beg;
					z = boost::lexical_cast<float, std::string>(*s_beg);
					vertices.PushBack(Vector3(x,y,z));							
					Vertex v;
					v.Position.Set(x,y,z);
					mesh->AddVertex(v);
					nverts--;
					continue;
				}						
			}					

			if (nfaces > 0)
			{		
				s_tokenizer tok(line_data);
				int c = 0;
				for (s_beg = tok.begin(); s_beg != tok.end(); ++s_beg) c++;

				if (c != 4)
				{
					std::cout << "Syntax error reading face indices at line " << line_count << " in file " << model_name << std::endl;
					parse_error = true;
					break;							
				} else {
					//Create Triangle
					s_beg = tok.begin(); s_beg++;
					i1 = boost::lexical_cast<int, std::string> (*s_beg); s_beg++;
					i2 = boost::lexical_cast<int, std::string> (*s_beg); s_beg++;
					i3 = boost::lexical_cast<int, std::string> (*s_beg);

					tris.PushBack(Vector3(vertices.At(i1)));
					tris.PushBack(Vector3(vertices.At(i2)));
					tris.PushBack(Vector3(vertices.At(i3)));
					mesh->AddIndexedTriangle(i1, i2, i3);							
					nfaces--;
					continue;
				}						
			}

			//See if we can get the vertex normals from the 

		}

		//Geometry loaded.		
		mesh->ComputeBoundingVolume();
				
		float mext0 = mesh->GetBoundingVolume()->GetExtent().Length();
		float scale = environment->GetScalingFactor() / mext0;
		Vector3 translate = mesh->GetBoundingVolume()->GetCentre();
		//float x,y,z;
		for (int i = 0; i<mesh->VertexList.Size(); i++)	 { mesh->VertexList[i].Position -= translate; mesh->VertexList[i].Position *= scale; } 					
		for (int i = 0; i<mesh->TriangleList.Size(); i++) { mesh->TriangleList[i].ComputeEdges(); }					
		for (int i = 0; i<tris.Size(); i++) { tris[i] -= translate; tris[i] *= scale; }					

		mesh->ComputeBoundingVolume();					
		mesh->ComputeRadii(min, max);					
		mesh->Compile();
		//std::cout << "[Model Loading - " << model_name << "] KD-Tree Computed in " << load_time.elapsed() << std::endl;
		float mext1 = mesh->GetBoundingVolume()->GetExtent().Length();					
					
		//std::cout << "[Model Loading - " << model_name << "] Bounding Volume Extent Updated from : " << mext0 << " to " << mext1 << std::endl;		

		//Add to list of shapes
		primitive_shapes[p_name] = mesh;
		return true;
	}
	return false;
}

bool ModelTraining::LoadAllShapes()
{
	//Load all shapes found in primitive_obj
	return true;
}

bool ModelTraining::CreateAndAddPrimitiveToSpace(std::string p_primitive_shape)
{
	//Create Primitive and add to Environment Space
	return true;
}
//-------------------------------------------
void ModelTraining::AddPCS(std::string _class, std::string p_model_name, PointCloudSegmentation *p_pcs)
{
	auto pcs_iterator = trained_pcs->find(_class);
	if (pcs_iterator == trained_pcs->end()) (*trained_pcs)[_class] = new std::map<std::string, PointCloudSegmentation*>();
	(*(trained_pcs->at(_class)))[p_model_name] = p_pcs;	
	std::cout << "[ScenePrimitivesDB] New PCS " << p_model_name << " added to " << _class << " class" << std::endl;
}
//-------------------------------------------

void ModelTraining::ToString()
{
	ResetDBGlobalIndex(); ResetDBLocalIndex();
	PointCloudSegmentation * nxt = GetNext();
	std::cout << "******DB OBJECTS LISTING - BEGIN******" << std::endl;
	while (nxt != NULL)
	{
		std::cout << "Class=" << crtActiveObjectClass << " Name=" << crtActiveObjectInClass << " PointCount= " << nxt->m_pointCloud->m_pointList.size() << " SurfSegmentCount= " << nxt->m_segmentsGraph->m_surfaceSegments->size() << std::endl;
		nxt = GetNext();
	}
	std::cout << "******DB OBJECTS LISTING - END******" << std::endl;
}