/*! \file	monoslam.cpp
 *  \brief	Definitions for the MONOSLAM node.
*/

#include "monoslam.hpp"

struct timeval cycle_timer;
//double elapsedTime;

int main(int argc, char** argv) {
	
	ROS_INFO("Node launched.");
	
	ros::init(argc, argv, "thermal_slam");
	
	ros::NodeHandle private_node_handle("~");
	
	slamData startupData;
		
	bool inputIsValid = startupData.obtainStartingData(private_node_handle);
	
	startupData.read_addr = argv[0];
	startupData.read_addr = startupData.read_addr.substr(0, startupData.read_addr.size()-12);
	
	if (!inputIsValid) {
		ROS_INFO("Configuration invalid.");
	}
		
	ROS_INFO("Startup data processed.");
	
	//ros::Rate loop_rate(25);
	
	ros::NodeHandle nh;
	
	boost::shared_ptr < slamNode > slam_node (new slamNode (nh, startupData));
	
	globalNodePtr = &slam_node;

	signal(SIGINT, mySigintHandler);
	
	ROS_INFO("Node configured.");
	
	ros::AsyncSpinner spinner(2);
	
	spinner.start();
	
	ros::waitForShutdown();
	
	ROS_INFO("Exiting.");
	
	return 0;
	
}

void slamNode::processScorecard() {
	
	// Open up file
	
	if ((configData.initializationScorecard[0] == '.') && (configData.initializationScorecard[1] == '.')) {
		configData.initializationScorecard = configData.read_addr + "nodes/monoslam/config/" + configData.initializationScorecard;
	}
	
	//ROS_ERROR("Opening scorecard file: (%s)", configData.initializationScorecard.c_str());
	
	ifstream ifs(configData.initializationScorecard.c_str());
	
	for (int iii = 0; iii < INITIALIZATION_SCORING_PARAMETERS; iii++) {
		ifs >> scorecardParams[iii][0];
	}
	
	for (int iii = 0; iii < INITIALIZATION_SCORING_PARAMETERS; iii++) {
		ifs >> scorecardParams[iii][1];
	}
	
	for (int iii = 0; iii < INITIALIZATION_SCORING_PARAMETERS; iii++) {
		ifs >> scorecardParams[iii][2];
	}
	
	ifs.close();
	
	for (int iii = 0; iii < INITIALIZATION_SCORING_PARAMETERS; iii++) {
		ROS_INFO("Criteria (%d) = (%f, %f, %f)", iii, scorecardParams[iii][0], scorecardParams[iii][1], scorecardParams[iii][2]);
	}
	
}

bool slamData::obtainStartingData(ros::NodeHandle& nh) {
	
	nh.param<std::string>("stream", stream, "null");
	
	if (stream != "null") {
		ROS_INFO("Tracker stream (%s) selected.", stream.c_str());
	} else {
		ROS_ERROR("No tracker stream specified.");
		return false;
	}
	
	nh.param<bool>("logErrors", logErrors, true);
	
	nh.param<bool>("keyframeEvaluationMode", keyframeEvaluationMode, false);
	
	nh.param<int>("maxTestsPerFrame", maxTestsPerFrame, 10);
	
	nh.param<int>("maxInitializationFrames", maxInitializationFrames, 30);
	
	nh.param<int>("minStartingSeparation", minStartingSeparation, 4);
	nh.param<int>("maxStartingSeparation", maxStartingSeparation, 12);
	
	
	nh.param<string>("initializationScorecard", initializationScorecard, "../config/default_scorecard.txt");
	
	return true;
}

bool slamNode::checkForKeyframe() {
	unsigned int image_idx_2 = currentPoseIndex + 1;
	unsigned int previousKeyframe = keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx;
	bool keyframeAdded = false;
	bool hasReverted = false;
	
	// IF CURRENT FRAME IS NOT A KEYFRAME, CHECK TO SEE IF IT SHOULD BE
	if (keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx != image_idx_2) {
		
		vector<unsigned int> tempIndices, maintainedIndices;

		getActiveTracks(tempIndices, featureTrackVector, previousKeyframe, image_idx_2);
		filterToCompleteTracks(maintainedIndices, tempIndices, featureTrackVector, previousKeyframe, image_idx_2);

		double motionScore = getFeatureMotion(featureTrackVector, maintainedIndices, previousKeyframe, image_idx_2);		
		
		vector<unsigned int> untriangulatedIndices;
		unsigned int trackedSinceLast = maintainedIndices.size();
		reduceActiveToTriangulated(featureTrackVector, maintainedIndices, untriangulatedIndices);
		
		//printf("%s::%s << Tracked since last keyframe (%d, %d): %d / %d", __PROGRAM__, __FUNCTION__, previousKeyframe, image_idx_2, trackedSinceLast, startingTracksCount);
		
		bool lowProportionTracked = false;
		if (trackedSinceLast < ((unsigned int) (((double) startingTracksCount) * configData.requiredTrackFrac))) {
			lowProportionTracked = true;
			//printf("%s::%s << Low proportion tracked: (%d / %d).", __PROGRAM__, __FUNCTION__, trackedSinceLast, startingTracksCount);
		}
		
		// Low feature count will only trigger new keyframe selection if the PROPORTION is also low...
		
		if (motionScore > configData.motionThreshold) {
			keyframe_store.addKeyframe(image_idx_2-1, blank);
			keyframeAdded = true;
			currentPoseIndex--;
			hasReverted = true;
			//printf("%s::%s << Marking (%d) as keyframe (large feature motion).", __PROGRAM__, __FUNCTION__, image_idx_2, motionScore);
		} else if ((image_idx_2 - previousKeyframe) == configData.maxKeyframeSeparation) {
			keyframe_store.addKeyframe(image_idx_2, blank);
			keyframeAdded = true;
			//printf("%s::%s << Marking (%d) as keyframe (max frames between).", __PROGRAM__, __FUNCTION__, image_idx_2);
		} else if (lowProportionTracked && (((int)trackedSinceLast) < configData.minTrackedFeatures) && (keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx != (image_idx_2-1))) {
			// test to see if too few features, and if previous frame is NOT a keyframe, then it should be set as one
			// ((initialPoints2.size() < configData.minTrackedFeatures) && (keyframe_store.keyframes.at(keyframe_store.count-1).idx != (iii-1))) {
			keyframe_store.addKeyframe(image_idx_2-1, blank);
			currentPoseIndex--;
			hasReverted = true;
			//printf("%s::%s << Marking (%d) [prev] as keyframe (too few features).", __PROGRAM__, __FUNCTION__, image_idx_2-1);
			keyframeAdded = true;
			//return false;
			//isKeyframe = true;
			//iii--;
			//keyframe_type = KF_TYPE_EXHAUSTED;
		} else if (lowProportionTracked && (((int)trackedSinceLast) < configData.minTrackedFeatures)) {
			keyframe_store.addKeyframe(image_idx_2, blank);
			keyframeAdded = true;
			// test to see if too few features since last keyframe, and if previous frame IS a keyframe, then current should be set as one
			//(initialPoints2.size() < configData.minTrackedFeatures) {
			//isKeyframe = true;
			//keyframe_type = KF_TYPE_WEAK;
			//printf("%s::%s << Marking (%d) as keyframe (too few features).", __PROGRAM__, __FUNCTION__, image_idx_2);
		} else if ((((int)maintainedIndices.size()) < configData.min3dPtsForPnP) && (keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx != (image_idx_2-1))) {
			// If there literally are not enough points to perform PNP for next frame
			keyframe_store.addKeyframe(image_idx_2-1, blank);
			currentPoseIndex--;
			hasReverted = true;
			keyframeAdded = true;
			//printf("%s::%s << Marking (%d) [prev] as keyframe (too few 3d points).", __PROGRAM__, __FUNCTION__, image_idx_2-1);
			//return false; 
		} else if (((int)maintainedIndices.size()) < configData.min3dPtsForPnP) {
			//printf("%s::%s << ERROR! Too few 3d points for PnP but previous frame already added...", __PROGRAM__, __FUNCTION__);

		}
	
	} else {
		
	}
	
	return hasReverted;
	
	if (hasReverted) {
		// Return true if the current frame is a keyframe
		return true;
	}
	
	if (keyframeAdded) {
		ROS_INFO("Added frame (%d) as Keyframe [%d]", keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx, currentPoseIndex);
		//return true;
	} else {
		//currentPoseIndex++;
		//printf("%s << Skipping...", __FUNCTION__);
		//return false;
	}
	
	return false;
}

void slamNode::show_poses() {

	for (int iii = 0; iii < currentPoseIndex; iii++) {
		if (ACM[iii].rows == 4) {
			
			cv::Mat temp;
			
			estimatePoseFromKnownPoints(temp, configData.cameraData, featureTrackVector, iii, eye4);
			
			ROS_INFO("EST[%d] = ", iii);
			cout << temp << endl;
			
			estimatePoseFromKnownPoints(temp, configData.cameraData, featureTrackVector, iii, ACM[iii]);
			
			ROS_INFO("ES2[%d] = ", iii);
			cout << temp << endl;
			
			ROS_INFO("ACM[%d] = ", iii);
			cout << ACM[iii] << endl;
		}
	}
	
}

void slamNode::getGuidingPose(cv::Mat *srcs, cv::Mat& dst, unsigned int idx) {
	
	cv::Mat eye4 = cv::Mat::eye(4, 4, CV_64FC1);
	
	unsigned int maxGuides = std::min(((int) idx), configData.maxGuides);
	
	if (srcs[idx].rows == 4) {
		// Already has an estimate
		srcs[idx].copyTo(dst);
	} else if (idx == 0) {
		// First frame so use identity
		eye4.copyTo(dst);
	} else if (srcs[idx-1].rows != 4) {
		// No previous frame at all so just use identity as guide
		eye4.copyTo(dst);
	} else {
		// Weighted combo of previous frames
		
		//printf("%s::%s << Estimating frame (%d) using previous frame poses...", __PROGRAM__, __FUNCTION__, idx);
		
		cv::Mat R_dev, t_dev;
		R_dev = cv::Mat::zeros(3, 1, CV_64FC1);
		t_dev = cv::Mat::zeros(3, 1, CV_64FC1);
		
		double total_contrib = 0.0;
		
		for (unsigned int iii = idx-maxGuides; iii < idx-1; iii++) {
			
			double contrib = ((double) idx) - ((double) iii);
			total_contrib += contrib;
			
			//printf("%s::%s << Using frame (%d:%d) [contrib = %f]", __PROGRAM__, __FUNCTION__, iii, iii+1, contrib);
			
			// For each pair of frames (iii & iii+1)
			
			cv::Mat R1, R2, t1, t2, rv1, rv2, rd, td;
			
			// Decompose into R-vec and t-vec
			decomposeTransform(srcs[iii], R1, t1);
			Rodrigues(R1, rv1);
			decomposeTransform(srcs[iii+1], R2, t2);
			Rodrigues(R2, rv2);
			
			// Find the derivative
			rd = rv2-rv1;
			
			// Need to make sure derivative is represented as minimum angular change
			for (unsigned int jjj = 0; jjj < 3; jjj++) {
				
				while (rd.at<double>(jjj, 0) > M_PI) {
					rd.at<double>(jjj, 0) -= 2*M_PI;
				}
				
				while (rd.at<double>(jjj, 0) < -M_PI) {
					rd.at<double>(jjj, 0) += 2*M_PI;
				}
			}
			
			//cout << "Deriv rotation = " << rd << endl;
			
			td = t2-t1;
			
			//cout << "Deriv translation = " << td << endl;
			
			// Add a weighted version of the derivative to R_dev & t_dev
			R_dev += pow(0.5, contrib) * rd;
			t_dev += pow(0.5, contrib) * td;
			
		}
		
		// Re-weight R_dev and t_dev
		R_dev /= total_contrib;
		t_dev /= total_contrib;
		
		//cout << "Weighted rotation = " << R_dev << endl;
		//cout << "Weighted translation = " << t_dev << endl;
		
		// Add R_dev & t_dev to the previous pose
		cv::Mat R_n, t_n, rv_n;
		decomposeTransform(srcs[idx-1], R_n, t_n);
		Rodrigues(R_n, rv_n);
		
		rv_n += R_dev;
		Rodrigues(rv_n, R_n);
		t_n += t_dev;
		
		//cout << "New rotation = " << rv_n << endl;
		//cout << "New translation = " << t_n << endl;
		
		cv::Mat T_n;
		composeTransform(R_n, t_n, T_n);
		transformationToProjection(T_n, dst);
		//compileTransform(dst, R_n, t_n);
		
		//cout << "Compiled transform = " << dst << endl;

	}
}

void mySigintHandler(int sig)
{
	wantsToShutdown = true;
	ROS_WARN("Requested shutdown... terminating feeds...");
	
	(*globalNodePtr)->prepareForTermination();
}

void slamNode::estimatePose(vector<unsigned int>& basisNodes, unsigned int idx) {

	//bool alreadyEstimated = false;
	
	//ROS_INFO("Estimating pose (%d)...", idx);
	
	if (ACM[idx].rows != 4) {
		
		if (configData.timeDebug) poseEstimationTime.startRecording();
		
		cv::Mat guidePose;
		getGuidingPose(ACM, guidePose, idx);
		
		
		
		
		//ROS_INFO("Guided pose gathered (%d)", idx);
		estimatePoseFromKnownPoints(ACM[idx], configData.cameraData, featureTrackVector, idx, guidePose);
		//cout << "guidePose = " << guidePose << endl;
		
		if (configData.timeDebug) poseEstimationTime.stopRecording();
		
		if (ACM[idx-1].rows != 4) {
			//printf("%s::%s << Estimating pose (%d) with no guidance...", __PROGRAM__, __FUNCTION__, currentPoseIndex+1);
			//estimatePoseFromKnownPoints(ACM[idx], configData.cameraData, featureTrackVector, idx, eye4);
		} else {
			//printf("%s::%s << Estimating pose (%d) with (%d) to guide...", __PROGRAM__, __FUNCTION__, currentPoseIndex+1, currentPoseIndex);
			
			
			
			//estimatePoseFromKnownPoints(ACM[idx], configData.cameraData, featureTrackVector, idx, ACM[idx-1]);
		}
		
	} else {
		//printf("%s::%s << Pose (%d) already estimated...", __PROGRAM__, __FUNCTION__, currentPoseIndex+1);
		//alreadyEstimated = true;
		//return;
	}
	
	//cout << "estPose = " << ACM[idx] << endl;
	
	// Some kind of subsystem BA to go here...
	
	if (1) { // (!alreadyEstimated) {
		double avgError;
		
		if (configData.timeDebug) bundleAdjustmentTime.startRecording();
		if (basisNodes.size() <= ((unsigned int) (configData.adjustmentFrames/2))) {
			//ROS_INFO("Basis nodes size is low: %d (%d)", basisNodes.size(), configData.flowback);
			avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.poseEstimateIterations, false, false, 1);
		} else if (basisNodes.size()-configData.flowback > 0) {
			//for (unsigned int iii = 0; iii < basisNodes.size(); iii++) {
				//cout << basisNodes.at(iii) << endl;
			//}
			//ROS_INFO("Basis nodes size is a bit larger: %d (%d)", basisNodes.size(), configData.flowback);
			avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.poseEstimateIterations, false, false, basisNodes.size()-configData.flowback);
		} else {
			//printf("Basis nodes size is poor: %d (%d)", basisNodes.size(), configData.flowback);
			avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.poseEstimateIterations, false, false, 1);
		}
		if (configData.timeDebug) bundleAdjustmentTime.stopRecording();
		//cout << "refinedPose = " << ACM[idx] << endl;
		
		//printf("Error adjusting latest pose (%d) estimate: %f", idx, avgError);
	
		if (configData.logErrors) {
			error_file << idx << " " << avgError << endl;
		}
	}
	
}

void slamNode::getBasisNodes(vector<unsigned int>& basisNodes, unsigned int idx) {
	
	//printf("%s << ENTERED.\n", __FUNCTION__);
	
	basisNodes.clear();
	
	if (idx < keyframe_store.keyframes.at(0).idx) {
		ROS_WARN("ERROR! IDX too low (%d / %d)", idx, keyframe_store.keyframes.at(0).idx);
		return;
	}
	
	int activeCameras = ((int) idx) + 1 - ((int) keyframe_store.keyframes.at(0).idx);
	
	if (activeCameras < 2) {
		return;
	}
	
	//printf("%s << DEBUG [%0d]\n", __FUNCTION__, 0);
	
	if (activeCameras <= configData.adjustmentFrames) {
		
		//printf("%s << DEBUG [%0d]\n", __FUNCTION__, 1);
		
		unsigned int firstNode = keyframe_store.keyframes.at(0).idx;
		
		for (unsigned int iii = firstNode; iii <= idx; iii++) {
			basisNodes.push_back(iii);
		}
		
		//printf("%s << DEBUG [%0d]\n", __FUNCTION__, 2);
		
		
	} else {
		// Need to come up with some kind of subsequence to help estimate latest pose
		
		// Want to use same number of cameras, but spread them out a bit
		
		// How many MORE cameras than acceptable are there?
		
		//printf("%s << DEBUG [%0d]\n", __FUNCTION__, 3);
		
		unsigned int surplus = activeCameras - configData.adjustmentFrames;
		unsigned int spread = surplus / configData.keyframeSpacing;
		
		spread = std::min(spread, ((unsigned int) (configData.adjustmentFrames-configData.flowback)));
		
		//printf("%s::%s << idx = %d; activeCameras = %d; adjFrames = %d; surplus = %d; spread = %d", __PROGRAM__, __FUNCTION__, idx, activeCameras, configData.adjustmentFrames, surplus, spread);
		
		// For every 6 in surplus, stretch out the early ones
		unsigned int starting_cam = idx - configData.adjustmentFrames + spread + 2; // this will be first used frame
		starting_cam = starting_cam - (starting_cam % configData.keyframeSpacing); // going down to nearest'5' mult
		starting_cam -= configData.keyframeSpacing*(spread-1);
		
		//printf("%s::%s << starting_cam = %d; spread = %d; absolute limit = %d", __PROGRAM__, __FUNCTION__, starting_cam, spread, keyframe_store.keyframes.at(0).idx);
		
		//printf("%s << DEBUG (%04d)\n", __FUNCTION__, 4);
		
		for (unsigned int iii = starting_cam; iii < (idx - configData.adjustmentFrames + spread); iii += configData.keyframeSpacing) {
			//printf("%s << node: %d", __FUNCTION__, iii);
			basisNodes.push_back(iii);
			
		}
		
		//printf("%s << DEBUG (%04d)\n", __FUNCTION__, 5);
		
		//idx - (configData.adjustmentFrames - spread)
		starting_cam = idx - configData.adjustmentFrames + 1 + spread + 1;
		for (unsigned int iii = starting_cam; iii <= idx; iii++) {
			//printf("%s << node: %d", __FUNCTION__, iii);
			basisNodes.push_back(iii);
		}
		
		//printf("%s << DEBUG (%04d)\n", __FUNCTION__, 6);
		
	}
	
	//printf("%s << EXITING.\n", __FUNCTION__);
	
}

void slamNode::processNextFrame() {
	
	//double avgError;
	
	//ROS_INFO("About to process frame (%d)", currentPoseIndex);
	
	//ROS_INFO("Checking first frame (%d)", keyframe_store.keyframes.at(0).idx);
	
	if (!matricesAreEqual(ACM[keyframe_store.keyframes.at(0).idx], eye4)) {
		//ROS_ERROR("First camera (%d) corrupted...", keyframe_store.keyframes.at(0).idx);
		while (1) {}
	}
	
	vector<unsigned int> basisNodes;
	
	
	// Try triangulating points and fixing new camera at the same time...
	if (1) {
		
		if (configData.timeDebug) triangulationTime.startRecording();
		unsigned int triangulationIndex = currentPoseIndex-configData.flowback;
		
		ROS_INFO("Getting basis nodes...");
		getBasisNodes(basisNodes, triangulationIndex);
		
		if (basisNodes.size() > 0) {
			
			ROS_INFO("Finding relevant indices...");
			
			vector<unsigned int> triangulatedIndices, untriangulatedIndices;
			findRelevantIndices(featureTrackVector, triangulatedIndices, untriangulatedIndices, basisNodes.at(0), triangulationIndex);
					
			// unsigned int points_in_3d;
			if (untriangulatedIndices.size() > 0) {
				
				vector<unsigned int> triangulatableIndices;
				findTriangulatableTracks3(featureTrackVector, triangulatableIndices, triangulationIndex, configData.framesForTriangulation);
				
				if (triangulatableIndices.size() > 0) {
					//ROS_INFO("About to triangulate (%d) new tracks...", triangulatableIndices.size());
					triangulateTracks(featureTrackVector, triangulatableIndices, configData.cameraData, ACM, basisNodes.at(0), triangulationIndex);
					//ROS_INFO("Tracks triangulated.");
					//double avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, basisNodes.size()-3);
					//printf("%s::%s << F(%d) Adjustment error with newly triangulated points = %f (k = %d)", __PROGRAM__, __FUNCTION__, currentPoseIndex, avgError, basisNodes.size());
					
					if (currentPoseIndex > 60) {
						//while (1) {}
					}
					
				}
				
				
			}
			
			if (configData.timeDebug) triangulationTime.stopRecording();
			
			
			ROS_INFO("Performing adjustment... (%d)", ((int)basisNodes.size()));
			
			for (unsigned int iii = 0; iii < basisNodes.size(); iii++) {
				//ROS_INFO("basisNodes.at(%d) = (%d)", iii, basisNodes.at(iii));
			}
			
			for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
				
				if (featureTrackVector.at(iii).locations.size() > 0) {
					//ROS_INFO("featureTrackVector.at(%d).locations.at(%d) = (%d)", iii, featureTrackVector.at(iii).locations.size()-1, featureTrackVector.at(iii).locations.at(featureTrackVector.at(iii).locations.size()-1).imageIndex);
				}
				
			}
			
			if (configData.timeDebug) bundleAdjustmentTime.startRecording();
			double avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, basisNodes.size());
			ROS_INFO("F(%d) Adjustment error with newly triangulated points = %f (k = %d)", currentPoseIndex, avgError, ((int)basisNodes.size()));
			if (configData.timeDebug) bundleAdjustmentTime.stopRecording();
		}
	
		
	}
	
	
	
	// Make sure previous pose has been estimated
	
	/*
	getBasisNodes(basisNodes, currentPoseIndex-1);
	
	avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, std::max(1, ((int)basisNodes.size())-10));
	printf("%s::%s << F(%d) Free adjustment error with = %f (k = %d)", __PROGRAM__, __FUNCTION__, currentPoseIndex, avgError, basisNodes.size());
	*/
	
	ROS_INFO("Getting more basis nodes...");
	getBasisNodes(basisNodes, currentPoseIndex);
	ROS_INFO("Estimating pose...");
	estimatePose(basisNodes, currentPoseIndex);
	
	if (configData.timeDebug) {
		if ((currentPoseIndex % configData.timeSpacing) == 0) {
			//double factor;
			trackHandlingTime.calcParameters();
			triangulationTime.calcParameters();
			poseEstimationTime.calcParameters();
			bundleAdjustmentTime.calcParameters();							// * ((double) trackHandlingTime.cycles) / ((double) currentPoseIndex)
			ROS_WARN("Showing timing summary:");
			ROS_INFO("Track summary: (%d, %f, %f)", trackHandlingTime.cycles, trackHandlingTime.average * ((double) trackHandlingTime.cycles) / ((double) currentPoseIndex), trackHandlingTime.sigma* ((double) trackHandlingTime.cycles) / ((double) currentPoseIndex));
			ROS_INFO("Triangulation summary: (%d, %f, %f)", triangulationTime.cycles, triangulationTime.average* ((double) triangulationTime.cycles) / ((double) currentPoseIndex), triangulationTime.sigma* ((double) triangulationTime.cycles) / ((double) currentPoseIndex));
			ROS_INFO("Pose summary: (%d, %f, %f)", poseEstimationTime.cycles, poseEstimationTime.average* ((double) poseEstimationTime.cycles) / ((double) currentPoseIndex), poseEstimationTime.sigma* ((double) poseEstimationTime.cycles) / ((double) currentPoseIndex));
			ROS_INFO("Bundle summary: (%d, %f, %f)", bundleAdjustmentTime.cycles, bundleAdjustmentTime.average* ((double) bundleAdjustmentTime.cycles) / ((double) currentPoseIndex), bundleAdjustmentTime.sigma* ((double) bundleAdjustmentTime.cycles) / ((double) currentPoseIndex));
			
		}
	}
	
	
	if (1) {
		update_display();
		currentPoseIndex++;
		return;
	}
	
	if (0) {
		vector<unsigned int> triangulatedIndices, untriangulatedIndices;
		findRelevantIndices(featureTrackVector, triangulatedIndices, untriangulatedIndices, keyframe_store.keyframes.at(0).idx, currentPoseIndex);
				
		//unsigned int points_in_3d;
		if (untriangulatedIndices.size() > 0) {
			
			vector<unsigned int> triangulatableIndices;
			findTriangulatableTracks3(featureTrackVector, triangulatableIndices, currentPoseIndex, configData.framesForTriangulation);
			
			if (triangulatableIndices.size() > 0) {
				//ROS_INFO("About to triangulate (%d) new tracks...", triangulatableIndices.size());
				triangulateTracks(featureTrackVector, triangulatableIndices, configData.cameraData, ACM, keyframe_store.keyframes.at(0).idx, currentPoseIndex);
				double avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, basisNodes.size()-3);
				if (configData.verboseMode) { ROS_INFO("F(%d) Adjustment error with newly triangulated points = %f (k = %d)", currentPoseIndex, avgError, ((int)basisNodes.size())); };
			}
			
			
		}
	}
	
	
	
	// Checks next frame to determine keyframe
	if (checkForKeyframe()) {
		
		if ((currentPoseIndex+1) == keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx) {
			// reverted
			//ROS_INFO("Keyframe found (reverted); processing (%d)", currentPoseIndex+1);
		} else {
			// Not a keyframe
			update_display();
			currentPoseIndex++;
			return;
		}
		
		
	} else {
		
		if ((currentPoseIndex+1) == keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx) {
			// not reverted
			//ROS_INFO("Keyframe found (non-reverted); processing (%d)", currentPoseIndex+1);
			basisNodes.clear();
			getBasisNodes(basisNodes, currentPoseIndex+1);
			estimatePose(basisNodes, currentPoseIndex+1);
		} else {
			// Not a keyframe
			update_display();
			currentPoseIndex++;
			return;
		}
		
	}
	
	if (0) {
		update_display();
		currentPoseIndex++;
		return;
	}
	
	// This block is used to get a decent initial estimate of the latest pose
	
	
	unsigned int image_idx_1 = keyframe_store.keyframes.at(keyframe_store.keyframes.size()-2).idx;
	unsigned int image_idx_2 = keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx;
	
	//ROS_INFO("Currently processing frame (%d:%d) as Keyframe", image_idx_1, image_idx_2);

	vector<cv::Point2f> pts1, pts2;
	vector<cv::Point3f> objectPoints;
	getTriangulatedFullSpanPoints(featureTrackVector, pts1, pts2, image_idx_1, image_idx_2, objectPoints);

	
	//estimatePoseFromKnownPoints(ACM[image_idx_2], configData.cameraData, featureTrackVector, image_idx_2, eye4);
	
	
	vector<unsigned int> adjustableKeyframeIndices;
	/*
	for (unsigned int iii = 0; iii < keyframe_store.keyframes.size(); iii++) {
		adjustableKeyframeIndices.push_back(keyframe_store.keyframes.at(iii).idx);
	}
	
	while (adjustableKeyframeIndices.size() > (MAX_CAMERAS_FOR_POSE_EST_BA / 2)) {
		adjustableKeyframeIndices.erase(adjustableKeyframeIndices.begin());
	}
	* */
	
	vector<unsigned int> keyframe_indices, tmp_indices, subseq_indices;
				
	for (unsigned int iii = 0; iii < keyframe_store.keyframes.size()-1; iii++) {
		keyframe_indices.push_back(keyframe_store.keyframes.at(iii).idx);
	}
	
	// Risky mod
	//while (keyframe_indices.size() > (DEFAULT_ADJUSTMENT_FRAMES / 2)) {
	while (keyframe_indices.size() > (configData.adjustmentFrames / 2)) {
		keyframe_indices.erase(keyframe_indices.begin());
	}
	
	for (int iii = keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx+1; iii < currentPoseIndex+1; iii++) {
		tmp_indices.push_back(iii);
	}
	
	// Risky mod
	//randomSelection(tmp_indices, subseq_indices, (DEFAULT_ADJUSTMENT_FRAMES / 2));
	randomSelection(tmp_indices, subseq_indices, (configData.adjustmentFrames / 2));
	
	
	//unsigned int fixed_cameras = keyframe_indices.size();
	
	adjustableKeyframeIndices.insert(adjustableKeyframeIndices.end(), keyframe_indices.begin(), keyframe_indices.end());
	adjustableKeyframeIndices.insert(adjustableKeyframeIndices.end(), subseq_indices.begin(), subseq_indices.end());
	adjustableKeyframeIndices.push_back(image_idx_2);
	
	//cout << "Estimated pose: " << ACM[image_idx_2] << endl;
	
	//printf("%s::%s << DEBUG [%d]", __PROGRAM__, __FUNCTION__, 5);
	
	//unsigned int startingPoseForFullSys = ((unsigned int) std::max(((int) image_idx_2)-CAMERAS_PER_SYS, ((int) lastBasePose)));
	
	//unsigned int startingPoseForFullSys =  keyframe_store.keyframes.at(0).idx;
	
	//printf("%s::%s << DEBUG [%d]", __PROGRAM__, __FUNCTION__, 51);
	
	// DETERMINE NUMBER OF TRIANGULATED POINTS CURRENTLY BEING TRACKED
	//numTriangulatedPoints = countActiveTriangulatedTracks(fullSpanIndices, featureTrackVector);
	
	// ADJUST THE MINIMUM NUMBER OF FRAMES SO THAT YOU GET A SUFFICIENT AMOUNT
	
	//avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, true);
	//avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, adjustableKeyframeIndices, configData.keyframeIterations, false, false, fixed_cameras);
	
	if (ACM[currentPoseIndex+2].rows == 4) {
		//printf("%s::%s << Pose (%d) somehow estimated at this point [%d]", __PROGRAM__, __FUNCTION__, currentPoseIndex+2, 2);
	}
		
	//ROS_INFO("F(%d) Initial adjustment error = %f (k = %d)", image_idx_2, avgError, adjustableKeyframeIndices.size());
	
	if (0) {
		update_display();
		currentPoseIndex++;
		return;
	}
	
	//cout << "After kF adjustment: " << ACM[image_idx_2] << endl;
	/*
	for (unsigned int iii = 0; iii < keyframe_store.keyframes.size(); iii++) {
		
		cout << "(" << iii << ") Actual pose: " << ACM[keyframe_store.keyframes.at(iii).idx] << endl;
		
		Mat tmp;
		estimatePoseFromKnownPoints(tmp, configData.cameraData, featureTrackVector, keyframe_store.keyframes.at(iii).idx, ACM[keyframe_store.keyframes.at(iii).idx]);
		
		cout << "(" << iii << ") Re-estimated pose: " << tmp << endl;
	}
	*/
	
	vector<unsigned int> triangulatedIndices, untriangulatedIndices;
	
	//reduceActiveToTriangulated(featureTrackVector, triangulatedIndices, untriangulatedIndices);
	
	findRelevantIndices(featureTrackVector, triangulatedIndices, untriangulatedIndices, image_idx_1, image_idx_2);
			
	//printf("%s::%s << untriangulatedIndices.size() = %d vs %d", __PROGRAM__, __FUNCTION__, untriangulatedIndices.size(), triangulatedIndices.size());
	
	//unsigned int points_in_3d;
	if (untriangulatedIndices.size() > 0) {

		vector<unsigned int> triangulatableIndices;
		findTriangulatableTracks3(featureTrackVector, triangulatableIndices, image_idx_2, configData.framesForTriangulation); // (image_idx_2-image_idx_1)/2
		
		if (triangulatableIndices.size() > 0) {
			//ROS_INFO("About to triangulate (%d) new tracks...", triangulatableIndices.size());
			triangulateTracks(featureTrackVector, triangulatableIndices, configData.cameraData, ACM, image_idx_1, image_idx_2);
			//avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, adjustableKeyframeIndices, configData.keyframeIterations, false, true);
			//ROS_INFO("F(%d) Adjustment error with newly triangulated points = %f (k = %d)", image_idx_2, avgError, adjustableKeyframeIndices.size());
		}
		
		
	}
		
	if (1) {
		update_display();
		currentPoseIndex++;
		return;
	}
	
	ROS_INFO("Establishing new adjustment indices...");
	
	vector<unsigned int> newAdjustmentIndices;
	for (unsigned int iii = 0; iii < keyframe_store.keyframes.size(); iii++) {
		
		newAdjustmentIndices.push_back(keyframe_store.keyframes.at(iii).idx);
		
		if (keyframe_store.keyframes.at(iii).idx == image_idx_1) {
			for (unsigned int jjj = image_idx_1+1; jjj < image_idx_2; jjj++) {
				newAdjustmentIndices.push_back(jjj);
			}
		}
	}
	
	update_display();
	currentPoseIndex++;
}

void slamNode::update_display() {
	
	if (currentPoseIndex != -1) {
		if (ACM[currentPoseIndex].rows == 4) {
			assignPose(currentPose, ACM[currentPoseIndex]);
			pose_pub.publish(currentPose);
		}
		
		vector<unsigned int> displayIndices;
		for (int iii = 0; iii <= currentPoseIndex; iii++) {
			if (ACM[iii].rows == 4) {
				displayIndices.push_back(iii);
			}
		}
		
		for (unsigned int iii = 0; iii < displayIndices.size(); iii++) {
			//cout << "ACM[" << displayIndices.at(iii) << "] = " << ACM[displayIndices.at(iii)] << endl;
		}
		
		assignPartialSystem(display_sys, featureTrackVector, configData.cameraData, ACM, displayIndices, false);
	
		
		
	} else {
		assignPose(currentPose, eye4);
		pose_pub.publish(currentPose);
		
		display_sys.nodes.clear();
		display_sys.tracks.clear();
	}
	
	drawGraph2(display_sys, camera_pub, points_pub, path_pub, decimation, bicolor);
	

}

void slamNode::update_cameras_to_pnp() {
	
	#pragma omp parallel for
	for (int iii = 0; iii < currentPoseIndex; iii++) {
		if (ACM[iii].rows == 4) {
			estimatePoseFromKnownPoints(ACM[iii], configData.cameraData, featureTrackVector, iii, eye4);
		}
	}
	
}

void slamNode::handle_tracks(const thermalvis::feature_tracksConstPtr& msg) {
	
	if (configData.verboseMode) { ROS_INFO("Handling projections (%d)...", msg->projection_count); }
	
	if (!infoProcessed) {
		//return;
	}
	
	if (msg->indices.size() == 0) {
		ROS_WARN("No new tracks.");
		return;
	}
        
	//ROS_WARN("XY (%d)", 0);
	
	if (configData.timeDebug) trackHandlingTime.startRecording();
	
	main_mutex.lock();
	
	featureTrack blankTrack;
	
	//ROS_WARN("XY (%d)", 1);
	
	unsigned int newest_track = 0;	
	
	for (unsigned int iii = 0; iii < msg->projection_count; iii++) {
		
		//ROS_WARN("XY [%d] (%d)", iii, 0);
		
		if (msg->indices[iii] > newest_track) {
			newest_track = msg->indices[iii];
		}
		
	}
	
	//ROS_INFO("Newest track = (%d)", newest_track);
	
	if (newest_track >= featureTrackVector.size()) {
		for (unsigned int iii = featureTrackVector.size(); iii <= newest_track; iii++) {
			featureTrackVector.push_back(blankTrack);
		}
	}
	
	//ROS_INFO("Active tracks = (%d)", featureTrackVector.size());
	
	for (unsigned int iii = 0; iii < msg->projection_count; iii++) {
		
		bool alreadyAdded = false;
		
		//ROS_INFO("%d vs %d", msg->cameras.at(iii), latestFrame);
		
		if (((int) msg->cameras.at(iii)) > latestFrame) {
			latestFrame = msg->cameras.at(iii);
		}
		
		
		//ROS_INFO("Checking projection (%d) = (index: %d / %d)", iii, msg->indices.at(iii), featureTrackVector.size());
	
		for (unsigned int jjj = 0; jjj < featureTrackVector.at(msg->indices.at(iii)).locations.size(); jjj++) {
				
			//ROS_INFO("Checking feature track loc (%d)", jjj);
				
			if (featureTrackVector.at(msg->indices.at(iii)).locations.at(jjj).imageIndex == ((int) msg->cameras.at(iii))) {
				//ROS_WARN("Already exists");
				alreadyAdded = true;
				break;
			}
			
			
			
		}
		
		if (!alreadyAdded) {
			//ROS_WARN("Adding...");
			
			cv::Point2f proj(((float) msg->projections_x.at(iii)), ((float) msg->projections_y.at(iii)));
			
			indexedFeature newFeature(msg->cameras.at(iii), proj);
			
			featureTrackVector.at(msg->indices.at(iii)).addFeature(newFeature);
			
			//ROS_WARN("Added.");
		}
		
	}
	
	cv::Mat trackMatrix;
	
	if (0) {
		if (featureTrackVector.size() > 0) {
			// ROS_WARN("Creating track matrix...");
			if (createTrackMatrix(featureTrackVector, trackMatrix)) {
				//ROS_WARN("Track matrix created.");
				cv::imshow("trackMatrix_received", trackMatrix);
				cv::waitKey(1);
			}
			
		}
	}
	

	

	if (configData.timeDebug) trackHandlingTime.stopRecording();

	
	//elapsedTime = timeElapsedMS(cycle_timer);
	
	main_mutex.unlock();
}

void slamNode::main_loop(const ros::TimerEvent& event) {
	
	if (configData.keyframeEvaluationMode && evaluationCompleted) {
		return;
	}
	
	if (firstIteration) {
		main_mutex.lock();
		update_display();
		main_mutex.unlock();
		firstIteration = false;
	}
	
	if (!infoProcessed) {
		//printf("%s << info not processed!", __FUNCTION__);
		return;
	}
	
	if (!structureValid) {
		if ((!configData.keyframeEvaluationMode) || (latestFrame >= configData.maxInitializationFrames)) {
			main_mutex.lock();
			structureValid = findStartingFrames();
			main_mutex.unlock();
			return;
		} else if (!f1 && configData.keyframeEvaluationMode) {
			f1 = true;
			ROS_INFO("Waiting for sufficient frames for evaluation...");
		}
		
	}
	
	if (!structureValid) {
		return;
	} else if (!structureFormed) {
		main_mutex.lock();
		structureFormed = formInitialStructure();
		putativelyEstimatedFrames = currentPoseIndex-1;
		main_mutex.unlock();
	}

	if (!structureFormed) {
		return;
	}
	
	
	
		
	//while ((currentPoseIndex < latestFrame) && (keyframe_store.keyframes.size() <= 7)) {
	while (currentPoseIndex < latestFrame) {
		main_mutex.lock();
		
		processNextFrame();
		
		if (currentPoseIndex == ((int) keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx)) {
			//refreshPoses();
		}
		
		/*
		if (keyframe_store.keyframes.size() == 3) {
			adjustFullSystem(featureTrackVector, configData.cameraData, ACM, currentPoseIndex, 20);
		} else if (keyframe_store.keyframes.size() == 4) {
			adjustFullSystem(featureTrackVector, configData.cameraData, ACM, currentPoseIndex, 10);
		} else if (keyframe_store.keyframes.size() == 5) {
			adjustFullSystem(featureTrackVector, configData.cameraData, ACM, currentPoseIndex, 5);
		} else {
			adjustFullSystem(featureTrackVector, configData.cameraData, ACM, currentPoseIndex, 3);
		}
		*/
		
		//update_display();
		
		//show_poses();
		
		main_mutex.unlock();
	}
	
	if (keyframe_store.keyframes.size() > 3) {
		
		//update_cameras_to_pnp();
		
	}
	
	//bool doneRefinement = false;
	
	//while (0) {
	//while (currentPoseIndex == latestFrame) {
	//if ((keyframe_store.keyframes.size() > 3) && (!doneRefinement)) {
	//printf("%s << currentPoseIndex = %d (%d)", __FUNCTION__, currentPoseIndex, latestFrame);
	if (0) { // ((elapsedTime > 5000.0) && (currentPoseIndex == latestFrame)) {
		
		//main_mutex.lock();
		
		if (keyframe_store.keyframes.size() > 2) {
			
			unsigned int starting_cam = keyframe_store.keyframes.at(0).idx; //((unsigned int) max(0, ((int) currentPoseIndex-50)));
			
			vector<unsigned int> allIndices;
			for (int iii = starting_cam; iii <= currentPoseIndex; iii++) {
				if (ACM[iii].rows == 4) {
					allIndices.push_back(iii);
				}
			}
			
			double avgError;
			
			ROS_INFO("About to full-system adjust (%d)", currentPoseIndex);
			
			avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, allIndices, 5, true);

			ROS_INFO("Full-system (%d) adjustment error = %f (k = %d)", currentPoseIndex, avgError, ((int)allIndices.size()));
	
			//avgError = adjustFullSystem(featureTrackVector, configData.cameraData, ACM, starting_cam, currentPoseIndex, 1);
			
			//retrieveFullSystem(fullsys, ACM, featureTrackVector, currentPoseIndex);
			//printf("%s::%s << Full-sys can be optimized to: %f", __PROGRAM__, __FUNCTION__, avgError);
			
			update_display();
			
			//doneRefinement = true;
			
			/*
			// optimizeFullSystem(sys, featureTrackVector, configData.cameraData, ACM, currentPoseIndex);
			
			//keyframes_sem.lock();
			vector<unsigned int> keyframeIndices;
			for (unsigned int iii = 0; iii < keyframe_store.keyframes.size(); iii++) {
				keyframeIndices.push_back(keyframe_store.keyframes.at(iii).idx);
				
			}
			
			//printf("%s << keyframeIndices.size() = %d", __FUNCTION__, keyframeIndices.size());
			
			//keyframes_sem.unlock();
			double avgError;
			
			
			assignPartialSystem(sys, featureTrackVector, configData.cameraData, ACM, keyframeIndices, true);
			
			printf("%s << sys.nodes() = %d; sys.tracks() = %d", __FUNCTION__, sys.nodes.size(), sys.tracks.size());
			
			avgError = optimizeSystem(sys, 1e-4, 1 );
			retrievePartialSystem(sys, ACM, featureTrackVector, keyframeIndices);
			
			
			//retrieveFullSystem(sys, ACM, featureTrackVector, keyframe_store.keyframes.at(0).idx);
			printf("%s::%s << Optimizing full system while waiting... (%f)", __PROGRAM__, __FUNCTION__, avgError);
			*/
		}
		
		//main_mutex.unlock();

		
	}
	
	/*
	if (currentPoseIndex < latestFrame) {
		printf("%s << DEBUG [%d]", __FUNCTION__, 12);
		repetitionNoted = false;
		processNextFrame();
		printf("%s << DEBUG [%d]", __FUNCTION__, 13);
	} else {
		if (!repetitionNoted) {
			printf("%s << CurrentPose at latestFrame [%d]", __FUNCTION__, latestFrame);
		} else {
			repetitionNoted = true;
		}
		
	}
	*/
	
}

void slamNode::handle_info(const sensor_msgs::CameraInfoConstPtr& info_msg) {
	
	//printf("%s::%s << Entered.", __PROGRAM__, __FUNCTION__);
	
	drawGraph2(display_sys, camera_pub, points_pub, path_pub, decimation, bicolor);
	
	if (!infoProcessed) {
		
		ROS_INFO("Handling camera info.");
		
		try	{
			
			configData.cameraData.K = cv::Mat::eye(3, 3, CV_64FC1);
			
			for (unsigned int mmm = 0; mmm < 3; mmm++) {
				for (unsigned int nnn = 0; nnn < 3; nnn++) {
					configData.cameraData.K.at<double>(mmm, nnn) = info_msg->K[3*mmm + nnn];
				}
			}
			
			cout << configData.cameraData.K << endl;
			
			
			
			configData.cameraData.K_inv = configData.cameraData.K.inv();
			

			
			configData.cameraData.cameraSize.width = info_msg->width;
			configData.cameraData.cameraSize.height = info_msg->height;
		
			//printf("%s << (%d, %d)", __FUNCTION__, configData.cameraData.cameraSize.width, configData.cameraData.cameraSize.height);
			
			unsigned int maxDistortionIndex;
			if (info_msg->distortion_model == "plumb_bob") {
				maxDistortionIndex = 5;
			} else {
				
				if (info_msg->distortion_model != "rational_polynomial") {
					ROS_ERROR("Unfamiliar with <info_msg->distortion_model> of (%s)", info_msg->distortion_model.c_str());
				}
				
				maxDistortionIndex = 8;
			}
			
			configData.cameraData.distCoeffs = cv::Mat::zeros(1, maxDistortionIndex, CV_64FC1);
			configData.cameraData.blankCoeffs = cv::Mat::zeros(1, maxDistortionIndex, CV_64FC1);
			
			for (unsigned int iii = 0; iii < maxDistortionIndex; iii++) {
				configData.cameraData.distCoeffs.at<double>(0, iii) = info_msg->D[iii];
			}
			
			cout << configData.cameraData.distCoeffs << endl;
			
			configData.cameraData.newCamMat = cv::Mat::zeros(3, 3, CV_64FC1);
			
			cv::Rect* validPixROI = 0;
			
			double alpha = 0.00;
			bool centerPrincipalPoint = true;
			
			configData.cameraData.newCamMat = getOptimalNewCameraMatrix(configData.cameraData.K, configData.cameraData.distCoeffs, configData.cameraData.cameraSize, alpha, configData.cameraData.cameraSize, validPixROI, centerPrincipalPoint);
			
			//(configData.cameraData.newCamMat).copyTo(configData.cameraData.K);
			
			cout << configData.cameraData.newCamMat << endl;
			
			infoProcessed = true;
			
			addFixedCamera(display_sys, configData.cameraData, eye4);
			
			drawGraph2(display_sys, camera_pub, points_pub, path_pub, decimation, bicolor);
			
			assignPose(currentPose, eye4);
			pose_pub.publish(currentPose);
			
		} catch (...) /*(sensor_msgs::CvBridgeException& e)*/ {
			ROS_ERROR("Some failure in reading in the camera parameters...");
		}
		
		ROS_INFO("Camera information processed.");
		
	} 
	
	

	
}

void slamNode::refreshPoses() {
	
	/*
	for (unsigned int iii = keyframe_store.keyframes.at(0).idx; iii < keyframe_store.keyframes.at(keyframe_store.keyframes.size()-1).idx; iii++) {
		
		//estimatePoseFromKnownPoints(ACM[iii], configData.cameraData, featureTrackVector, iii, eye4);
		
		if (ACM[iii].rows == 4) {
			Mat test;
			estimatePoseFromKnownPoints(test, configData.cameraData, featureTrackVector, iii, eye4);
			
			//cout << "est[" << iii << "] = " << test << endl;
			//cout << "acm[" << iii << "] = " << ACM[iii] << endl;
		}
		
	}
	*/
	
	/*
	for (unsigned int iii = 0; iii < currentPoseIndex; iii++) {
		
		estimatePoseFromKnownPoints(ACM[iii], configData.cameraData, featureTrackVector, iii, eye4);
		
		if (ACM[iii].rows == 4) {
			Mat test;
			estimatePoseFromKnownPoints(test, configData.cameraData, featureTrackVector, iii, eye4);
			
			//cout << "est[" << iii << "] = " << test << endl;
			//cout << "acm[" << iii << "] = " << ACM[iii] << endl;
		}
		
	}
	*/
	for (unsigned int iii = 0; iii < keyframe_store.keyframes.size(); iii++) {
		//estimatePoseFromKnownPoints(ACM[keyframe_store.keyframes.at(iii).idx], configData.cameraData, featureTrackVector, keyframe_store.keyframes.at(iii).idx, eye4);
	}
	
}

void slamNode::prepareForTermination() {
	return;
}

bool slamNode::findStartingFrames() {
	
	//ROS_INFO("Finding initial keyframes (latest frame = %d)", latestFrame);
	
	//printf("%s::%s << Finding initial keyframes...", __PROGRAM__, __FUNCTION__);
	
	bool foundStartingPair = false;
	
	if (latestFrame < 1) {
		return foundStartingPair;
	}

	double keyframe_scores[5];
	cv::Mat startingTrans;
	
	cv::Mat blankMat = cv::Mat::zeros(80, 640, CV_8UC3);
	
	if ((configData.keyframeEvaluationMode) && (!evaluationStream.is_open())) {
		ROS_WARN("Opening evaluation summary stream...");
		evaluationStream.open(configData.evaluationFile.c_str(), ios::out | ios::app);
	}

	for (int jjj = configData.minStartingSeparation; jjj < min(latestFrame+1, configData.maxInitializationFrames); jjj++) {
		
		//ROS_INFO("Checking up to frame (%d) : (%d /%d)", jjj, latestFrame+1, configData.maxInitializationFrames);
		
		vector<unsigned int> startersToTest;
		
		for (int iii = max(0, ((int)jjj)-configData.maxStartingSeparation); iii < jjj-configData.minStartingSeparation; iii++) {
			startersToTest.push_back(iii);
		}
		
		
		while (((int)startersToTest.size()) > configData.maxTestsPerFrame) {
			
			unsigned int randIndex = rand() % startersToTest.size();
			startersToTest.erase(startersToTest.begin() + randIndex);
			
		}
		
		
		//#pragma omp parallel for
		for (unsigned int iii = 0; iii < startersToTest.size(); iii++) {
			
			if ((foundStartingPair) && (!configData.keyframeEvaluationMode)) {
				break;
			}
			
			if (keyframeTestFlags.at<unsigned char>(startersToTest.at(iii),jjj) == 0) {

				vector<unsigned int> activeTracks;
				getActiveTracks(activeTracks, featureTrackVector, startersToTest.at(iii), jjj);

				//ROS_ERROR("About to test frames (%d) & (%d)", startersToTest.at(iii), jjj);
				startingTrans = cv::Mat();
				keyframeTestScores.at<double>(startersToTest.at(iii),jjj) = testKeyframePair(featureTrackVector, configData.cameraData, scorecardParams, startersToTest.at(iii), jjj, keyframe_scores, startingTrans, true /*configData.keyframeEvaluationMode*/, true);
				//ROS_INFO("Frames tested");
				
				if (keyframeTestScores.at<double>(startersToTest.at(iii),jjj) > 0.0) {
					//ROS_INFO("score (%d) -> (%d) = (%f)", startersToTest.at(iii), jjj, keyframeTestScores.at<double>(startersToTest.at(iii),jjj));
				}
				
				if (configData.keyframeEvaluationMode) {
					
					clearSystem();
					
					bool result = true;
					char outputString[256];
					
					ROS_INFO("Assigning as starting frames (%d, %d)...", startersToTest.at(iii), jjj);
					assignStartingFrames(startersToTest.at(iii), jjj, keyframe_scores, startingTrans);
					ROS_INFO("Starting frames assigned.");
					
					if (startingTrans.rows > 0) {
						
						//cout << "startingTrans = " << startingTrans << endl;
						formInitialStructure();
						
						ROS_INFO("Initial structure formed.");
						
						char response = ' ';
						
						blankMat = cv::Mat(blankMat.size(), blankMat.type(), CV_RGB(0,255,0));
						
						//while (0) {
						while ((response != 'y') && (response != 'n')) {
							cv::imshow("readybar", blankMat);
							response = cv::waitKey();
						}
						
						blankMat = cv::Mat(blankMat.size(), blankMat.type(), CV_RGB(255,0,0));
						cv::imshow("readybar", blankMat);
						cv::waitKey(1);
						
						if (response == 'y') {
							result = true;
						} else {
							result = false;
						}
					} else {
						ROS_INFO("Initial starting transformation invalid, returning failure.");
						result = false;
					}
					
					
					
					ROS_INFO("Preparing string...");
					sprintf(outputString, "%06d %06d %+02.2f %+02.2f %+02.2f %+02.2f %+02.2f %1.2f %d", startersToTest.at(iii), jjj, keyframe_scores[0], keyframe_scores[1], keyframe_scores[2], keyframe_scores[3], keyframe_scores[4], keyframeTestScores.at<double>(startersToTest.at(iii),jjj), (result ? 1 : 0));
					ROS_INFO("String written.");
					/*
					evaluationStream << startersToTest.at(iii) << " " << jjj;
					evaluationStream << " " << keyframe_scores[0];
					evaluationStream << " " << keyframe_scores[1];
					evaluationStream << " " << keyframe_scores[2];
					evaluationStream << " " << keyframe_scores[3];
					evaluationStream << " " << keyframe_scores[4];
					evaluationStream << " " << 1 << endl;
					*/
					
					evaluationStream << outputString << endl;
				}
				
				
				keyframeTestFlags.at<unsigned char>(startersToTest.at(iii),jjj) = 1;

				if (keyframeTestScores.at<double>(startersToTest.at(iii),jjj) >= configData.minStartupScore) {
					//ROS_INFO("Valid keyframe pair found at (%d, %d) [%f] > [%f]", startersToTest.at(iii), jjj, keyframeTestScores.at<double>(startersToTest.at(iii),jjj), configData.minStartupScore);
					
					foundStartingPair = true;

				} 
				
				ROS_INFO("Keyframe pair (%03d, %03d) initialization score = [%f] {%1.2f, %1.2f, %1.2f, %1.2f, %1.2f}", startersToTest.at(iii), jjj, keyframeTestScores.at<double>(startersToTest.at(iii),jjj), keyframe_scores[0], keyframe_scores[1], keyframe_scores[2], keyframe_scores[3], keyframe_scores[4]);
				
			}

		}
		
		if ((foundStartingPair) && (!configData.keyframeEvaluationMode)) {
			break;
		}
		
	}
	
	if (configData.keyframeEvaluationMode) {
		evaluationStream.close();
		evaluationCompleted = true;
		ROS_INFO("Keyframe evaluation results finalize. (CTRL + C) to terminate.");
		
		blankMat = cv::Mat(blankMat.size(), blankMat.type(), CV_RGB(0,0,255));
		cv::imshow("readybar", blankMat);
		cv::waitKey();
		return false;
	}
	
	if (foundStartingPair) {
		
		double a, b;
		cv::Point min_coord, max_coord;
		unsigned int best_iii, best_jjj;
		minMaxLoc(keyframeTestScores, &a, &b, &min_coord, &max_coord); //, keyframeTestFlags);
		
		best_iii = max_coord.y;
		best_jjj = max_coord.x;
		
		assignStartingFrames(best_iii, best_jjj, keyframe_scores, startingTrans);
		
		//nextFrame = best_jjj+1;
	}
	
	return foundStartingPair;
	
	
}

void slamNode::clearSystem() {
	
	keyframe_store.connections.clear();
	keyframe_store.keyframes.clear();
	keyframe_store.count = 0;
	
	for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
		featureTrackVector.at(iii).isTriangulated = false;
	}
	
	int finalIndex;
	
	if (configData.keyframeEvaluationMode) {
		finalIndex = configData.maxInitializationFrames + 1;
	} else {
		finalIndex = latestFrame + 1;
	}
	for (int iii = 0; iii < finalIndex; iii++) {
		ACM[iii] = cv::Mat();
	}
	
}

double slamNode::assignStartingFrames(unsigned int best_iii, unsigned int best_jjj, double* keyframe_scores, cv::Mat& startingTrans) {
	
	printf("%s << Adding (%d) & (%d) to keyframe store... (already (%d) large)", __FUNCTION__, best_iii, best_jjj, ((int)keyframe_store.keyframes.size()));
	
	keyframe_store.addKeyframe(best_iii, blank);
	keyframe_store.addKeyframe(best_jjj, blank);
	
	double kfScore = testKeyframePair(featureTrackVector, configData.cameraData, scorecardParams, best_iii, best_jjj, keyframe_scores, startingTrans, configData.keyframeEvaluationMode, true);
	
	ACM[best_iii] = cv::Mat::eye(4, 4, CV_64FC1);
	startingTrans.copyTo(ACM[best_jjj]);
	
	ROS_INFO("kfScore = %f (%d, %d)", kfScore, best_iii, best_jjj);
	cout << ACM[best_iii] << endl;
	cout << ACM[best_jjj] << endl;
	
	//while (1) {}
	
	keyframe_store.addConnection(keyframe_store.count-2, keyframe_store.count-1, KF_CONNECTION_GEOMETRIC, F_arr[best_iii]);
	
	return kfScore;
	
}

bool slamNode::formInitialStructure() {
	
	//ROS_INFO("Entered <formInitialStructure> (%d).", baseConnectionNum);
	
	//cout << "ACM[0] = " << ACM[0] << endl;
	
	double keyframe_scores[5];
	cv::Mat startingTrans;
	bool formedInitialStructure = false;

	int keyframe_idx_1 = keyframe_store.connections.at(baseConnectionNum).idx1;
	int keyframe_idx_2 = keyframe_store.connections.at(baseConnectionNum).idx2;
	
	//printf("%s << keyframe_idx = (%d, %d) [%d]", __FUNCTION__, keyframe_idx_1, keyframe_idx_2, keyframe_store.connections.size());
	
	unsigned int image_idx_1 = keyframe_store.keyframes.at(keyframe_idx_1).idx;
	unsigned int image_idx_2 = keyframe_store.keyframes.at(keyframe_idx_2).idx;
	
	//printf("%s << image_idx = (%d, %d)", __FUNCTION__, image_idx_1, image_idx_2);
	
	lastBasePose = image_idx_1;
	
	vector<cv::Point2f> pts1, pts2;
	getPointsFromTracks(featureTrackVector, pts1, pts2, image_idx_1, image_idx_2);
	
	vector<unsigned int> activeTrackIndices, fullSpanIndices, triangulatedIndices;
		
	getActiveTracks(activeTrackIndices, featureTrackVector, image_idx_1, image_idx_2);
	filterToCompleteTracks(fullSpanIndices, activeTrackIndices, featureTrackVector, image_idx_1, image_idx_2);
	
	startingTracksCount = fullSpanIndices.size();
	
	//printf("%s << Active indices for this subsequence: %d", __FUNCTION__, activeTrackIndices.size());
	//printf("%s << Full-span indices for this subsequence: %d", __FUNCTION__, fullSpanIndices.size());
	
	//int numTriangulatedPoints = countActiveTriangulatedTracks(fullSpanIndices, featureTrackVector);
	
	//printf("%s << Number of triangulated full-span tracks for this subsequence = %d", __FUNCTION__, numTriangulatedPoints);
		
	vector<cv::Point3d> ptsInCloud;
	cv::Mat P1, R1, t1; //, CX[4];
	cv::Mat Rvec, C;
	vector<cv::Point2f> correspPoints;

	//ROS_INFO("Reconstructing initialization pair with (%d) & (%d)...", image_idx_1, image_idx_2);
	
	//cout << "ACM[0] = " << ACM[0] << endl;
	
	bool worked = reconstructFreshSubsequencePair(featureTrackVector, ptsInCloud, triangulatedIndices, ACM[image_idx_1], ACM[image_idx_2], configData.cameraData, image_idx_1, image_idx_2);
	
	//ROS_INFO("Initial fresh cameras:");
	//cout << ACM[image_idx_1] << endl;
	//cout << ACM[image_idx_2] << endl;
	
	if (!worked) {
		ROS_ERROR("2-Frame reconstruction didn't work.");
	}
	
	//cam_mutex.unlock();
	
	//printf("%s::%s << DEBUG [%d]", __PROGRAM__, __FUNCTION__, 0);
	
	double keyframeError;
	keyframeError = testKeyframePair(featureTrackVector, configData.cameraData, scorecardParams, image_idx_1, image_idx_2, keyframe_scores, startingTrans, configData.keyframeEvaluationMode);
	
	//printf("%s::%s << DEBUG [%d]", __PROGRAM__, __FUNCTION__, 1);
	
	//cout << "(" << keyframeError << ") = " << startingTrans << endl;
	startingTrans.copyTo(ACM[image_idx_2]);
	//cam_mutex.lock();
	//printf("%s << Pre-optimized initialization cameras:", __FUNCTION__);
	//ROS_INFO("Initial keyframe cameras:");
	//cout << ACM[image_idx_1] << endl;
	//cout << ACM[image_idx_2] << endl;
	
	//cout << "ACM[0] = " << ACM[0] << endl;
	/*
	keyframeError = optimizeKeyframePair(featureTrackVector, configData.cameraData, image_idx_1, image_idx_2, ACM);
	printf("%s << Freshly reconstructed initial cameras:", __FUNCTION__);
	cout << ACM[image_idx_1] << endl;
	cout << ACM[image_idx_2] << endl;
	*/
	//cam_mutex.unlock();
	
	//double twoViewError;
	
	vector<unsigned int> adjustableKeyframeIndices;
	
	adjustableKeyframeIndices.push_back(image_idx_1);
	adjustableKeyframeIndices.push_back(image_idx_2);
	
	//cam_mutex.lock();
	
	keyframeError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, adjustableKeyframeIndices, configData.initialStructureIterations, false, false, 1);
	ROS_INFO("Adjusted error: %f", keyframeError);
	//cout << ACM[image_idx_1] << endl;
	//cout << ACM[image_idx_2] << endl;
	
	//cam_mutex.unlock();

	
	//real_C0.copyTo(ACM[image_idx_1]);
	//real_C1.copyTo(ACM[image_idx_2]);
						
	if (!worked) {
		ROS_ERROR("Reconstruction of fresh subsequence pair failed.");
		return false;
	}
	
	ROS_INFO("Getting active 3d points...");
	
	//printf("%s << ptsInCloud.size() (pre-update) = %d", __FUNCTION__, ptsInCloud.size());
	getActive3dPoints(featureTrackVector, triangulatedIndices, ptsInCloud);
	//printf("%s << ptsInCloud.size() (post-update) = %d", __FUNCTION__, ptsInCloud.size());
	
	ROS_INFO("Points acquired. GT.");
	
	int relativeIndex = image_idx_2-image_idx_1;
	
	printf("%s << DEBUG (%04d)\n", __FUNCTION__, 0);
	
	SysSBA subsys;
	addPointsToSBA(subsys, ptsInCloud);
	
	printf("%s << DEBUG (%04d)\n", __FUNCTION__, 1);
	
	//cam_mutex.lock();
	addFixedCamera(subsys, configData.cameraData, ACM[image_idx_1]);
	
	printf("%s << DEBUG (%04d)\n", __FUNCTION__, 2);
	
	addNewCamera(subsys, configData.cameraData, ACM[image_idx_2]);
	
	//cam_mutex.unlock();
	
	printf("%s << DEBUG (%04d)\n", __FUNCTION__, 3);
	
	subsys.nFixed = 1;
	
	addProjectionsToSBA(subsys, pts1, 0);	// keyframe_idx_1
	
	printf("%s << DEBUG (%04d)\n", __FUNCTION__, 4);
	
	addProjectionsToSBA(subsys, pts2, 1);
	
	ROS_INFO("Going through relative indices...");
	
	for (int jjj = 1; jjj < relativeIndex; jjj++) {
		
		vector<cv::Point2f> latestPoints;
		
		printf("%s << About to get corresponding points (%d)...\n", __FUNCTION__, jjj);
		getCorrespondingPoints(featureTrackVector, pts1, latestPoints, image_idx_1, image_idx_1+jjj);
		printf("%s << Corresponding points acquired (%d)\n", __FUNCTION__, jjj);
		
		
		vector<cv::Point3f> objectPoints;
		cv::Point3f tmpPt;
		
		for (unsigned int kkk = 0; kkk < ptsInCloud.size(); kkk++) {
			tmpPt = cv::Point3f((float) ptsInCloud.at(kkk).x, (float) ptsInCloud.at(kkk).y, (float) ptsInCloud.at(kkk).z);
			objectPoints.push_back(tmpPt);
		}
		
		cv::Mat t, R, Rvec;
		
		printf("%s << DEBUG (%03d)\n", __FUNCTION__, 0);
		
		printf("%s << Solving PnP... (%d) (oP.size() = %d; lP.size() = %d)\n", __FUNCTION__, jjj, ((int)objectPoints.size()), ((int)latestPoints.size()));
		
		if (objectPoints.size() != latestPoints.size()) {
			ROS_ERROR("Unable to find proper corresponding points!");
			continue;
		}
		//solvePnPRansac(InputArray objectPoints, InputArray imagePoints, InputArray cameraMatrix, InputArray distCoeffs, OutputArray rvec, OutputArray tvec, bool useExtrinsicGuess=false, int iterationsCount=100, float reprojectionError=8.0, int minInliersCount=100, OutputArray inliers=noArray() );
		solvePnPRansac(objectPoints, latestPoints, configData.cameraData.K, configData.cameraData.blankCoeffs, Rvec, t);
		
		printf("%s << DEBUG (%03d)\n", __FUNCTION__, 1);
		
		Rodrigues(Rvec, R);
		
		//cout << __FUNCTION__ << " << [" << jjj << "] R = " << R << endl;
		//cout << __FUNCTION__ << " << [" << jjj << "] t = " << t << endl;
		
		
		cv::Mat newCam;
		cv::Mat T;
		composeTransform(R, t, T);
		transformationToProjection(T, newCam);
		// compileTransform(newCam, R, t);
		
		// Inverting camera "learned" by PnP since it always seems to need to be inverted when passed to 
		// and from SBA...
		
		cv::Mat newCamC;
		projectionToTransformation(newCam, newCamC);
		newCamC = newCamC.inv();
		transformationToProjection(newCamC, newCam);
		
		addNewCamera(subsys, configData.cameraData, newCam);
		
		addProjectionsToSBA(subsys, latestPoints, jjj+1);
		
		printf("%s << DEBUG (%03d)\n", __FUNCTION__, 2);
		
		//avgError = optimizeSystem(subsys, 1e-4, 10);
		//printf("%s << Progressive subsequence BA error = %f.", __FUNCTION__, avgError);
		
		//drawGraph2(subsys, camera_pub, points_pub, path_pub, decimation, bicolor);
		
	}
	
	//printf("%s << About to optimize subsystem... (nFixed = %d)", __FUNCTION__, subsys.nFixed);
	//ROS_INFO("About to optimize starting sequence...");
	
	//cout << "ACM[0] = " << ACM[0] << endl;
	//if (iii == 0) {
		
	subsys.nFixed = 1;
	ROS_INFO("About to rescale (1)");
	rescaleSBA(subsys, 0, 1);
	ROS_INFO("About to optimize between rescalings...");
	double avgError = optimizeSystem(subsys, 1e-4, configData.subsequenceIterations);
	ROS_INFO("About to rescale (2)");
	rescaleSBA(subsys, 0, 1);
	ROS_INFO("Rescaling done");

	
	if (avgError < 0.0) {
		ROS_ERROR("Subsystem optimization failed to converge..");
	}
	
	//ROS_INFO("Subsystem optimized; err = %f", avgError);
	
	
	//cout << "ACM[0] = " << ACM[0] << endl;
	//printf("%s << DEBUG [%d] looking for crash...", __FUNCTION__, 0);
	
	//cam_mutex.lock();
	retrieveCameraPose(subsys, 0, ACM[image_idx_1]);
	retrieveCameraPose(subsys, 1, ACM[image_idx_2]);
	
	//ROS_INFO("Starting sequence cameras =");
	//cout << ACM[image_idx_1] << endl;
	
	
	//printf("%s << DEBUG [%d] looking for crash...", __FUNCTION__, 1);
	
	#pragma omp parallel for
	for (unsigned int ttt = 1; ttt < image_idx_2-image_idx_1; ttt++) {
		retrieveCameraPose(subsys, ttt+1, ACM[image_idx_1+ttt]);
		//cout << "ACM[0]" << ACM[image_idx_1+ttt] << endl;
	}
	
	//cout << ACM[image_idx_2] << endl;
	
	//cout << "ACM[0] = " << ACM[0] << endl;
	
	//cam_mutex.unlock();

	
	ptsInCloud.clear();
	retrieveAllPoints(ptsInCloud, subsys);
	
	updateTriangulatedPoints(featureTrackVector, triangulatedIndices, ptsInCloud);
	
	vector<unsigned int> basisNodes;
	
	for (unsigned int iii = image_idx_1; iii <= image_idx_2; iii++) {
		basisNodes.push_back(iii);
	}
												
												keyframeError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, 1);
												//ROS_INFO("Full-sys error 1: %f", keyframeError);
												// So by here you've done all basic triangulation..
												/*
												assignFullSystem(sys, featureTrackVector, configData.cameraData, ACM, image_idx_1, image_idx_2);
												printf("%s << SYS: nodes = %d; tracks = %d", __FUNCTION__, sys.nodes.size(), sys.tracks.size());
												sys.nFixed = 1;
												rescaleSBA(sys, 0, sys.nodes.size()-1);
												avgError = optimizeSystem(sys, 1e-4, 10);
												rescaleSBA(sys, 0, sys.nodes.size()-1);
												printf("%s << retrieving full system with image (%d) to start with", __FUNCTION__, image_idx_1);
												retrieveFullSystem(sys, ACM, featureTrackVector, image_idx_1, image_idx_2);
												sys.nodes.clear();
												sys.tracks.clear();
												//currentPoseIndex = image_idx_2;
												//formedInitialStructure = true;
												//printf("%s::%s << Initial structure formed.", __PROGRAM__, __FUNCTION__);
												//return formedInitialStructure;
												*/
				
	
	
	//getBasisNodes(basisNodes, currentPoseIndex);
	
	if (1) {
		
		vector<unsigned int> triangulatedIndices, untriangulatedIndices;
		findRelevantIndices(featureTrackVector, triangulatedIndices, untriangulatedIndices, image_idx_1, image_idx_2);
				
		//unsigned int points_in_3d;
		if (untriangulatedIndices.size() > 0) {
			
			vector<unsigned int> triangulatableIndices;
			findTriangulatableTracks3(featureTrackVector, triangulatableIndices, image_idx_2, configData.framesForTriangulation);
			
			if (triangulatableIndices.size() > 0) {
				//ROS_INFO("About to triangulate (%d) new tracks...", triangulatableIndices.size());
				triangulateTracks(featureTrackVector, triangulatableIndices, configData.cameraData, ACM, image_idx_1, image_idx_2);
				//ROS_INFO("Tracks triangulated.");
				//double avgError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, basisNodes.size()-3);
				//printf("%s::%s << F(%d) Adjustment error with newly triangulated points = %f (k = %d)", __PROGRAM__, __FUNCTION__, currentPoseIndex, avgError, basisNodes.size());
				
				if (currentPoseIndex > 60) {
					//while (1) {}
				}
				
			}
			
			
		}
	}
	
	//cout << "ACM[0] = " << ACM[0] << endl;
	
												keyframeError = keyframeBundleAdjustment(configData.cameraData, featureTrackVector, ACM, basisNodes, configData.keyframeIterations, false, false, 1);
												//ROS_INFO("Full-sys error 2: %f", keyframeError);
												/*
												assignFullSystem(sys, featureTrackVector, configData.cameraData, ACM, image_idx_1, image_idx_2);
												//printf("%s << SYS: nodes = %d; tracks = %d", __FUNCTION__, sys.nodes.size(), sys.tracks.size());
												printf("%s::%s << Optimizing with triangulated points...", __PROGRAM__, __FUNCTION__);
												sys.nFixed = 1;
												rescaleSBA(sys, 0, sys.nodes.size()-1);
												avgError = optimizeSystem(sys, 1e-4, 10);
												rescaleSBA(sys, 0, sys.nodes.size()-1);
												printf("%s::%s << Final error = %f", __PROGRAM__, __FUNCTION__, avgError);
												
												cout << "ACM[0] = " << ACM[0] << endl;
												//printf("%s << retrieving full system with image (%d) to start with", __FUNCTION__, image_idx_1);
												retrieveFullSystem(sys, ACM, featureTrackVector, image_idx_1, image_idx_2);
												*/
												currentPoseIndex = image_idx_2;
												formedInitialStructure = true;
												//ROS_INFO("Initial structure formed.");
												//cout << "ACM[0] = " << ACM[0] << endl;
												
												for (unsigned int iii = image_idx_1; iii <= image_idx_2; iii++) {
													//cout << "ACM[" << iii << "] = " << ACM[iii] << endl;
												}
												
												
												if (1) {
													update_display();
												}
												
												//while (1) { }
												
												
												return formedInitialStructure;	
												
	ROS_ERROR("What!? How did I get here??");
	cin.get();
												
	//updateTriangulatedPoints(featureTrackVector, triangulatedIndices, ptsInCloud);
	
	//// NOW GO THROUGH FOR INTERMEDIATE POINTS
	//vector<unsigned int> secondTierIndices;
	
	////printf("%s << Getting remaining indices left to be triangulated...", __FUNCTION__);
	
	//getIndicesForTriangulation(secondTierIndices, activeTrackIndices, triangulatedIndices);

	
	//Point3d *secondTierPoints;
	//int *triangulationsCount;
	//triangulationsCount = new int[secondTierIndices.size()];
	//secondTierPoints = new Point3d[secondTierIndices.size()];

	//#pragma omp parallel for
	//for (unsigned int rrr = 0; rrr < secondTierIndices.size(); rrr++) {
		////printf("%s << secondTierIndices.at(%d) = %d", __FUNCTION__, rrr, secondTierIndices.at(rrr));
		//triangulationsCount[rrr] = 0;
		//secondTierPoints[rrr] = Point3d(0.0, 0.0, 0.0);
	//}
	
	//unsigned int startIndex = image_idx_1;
	//unsigned int finishIndex = image_idx_2;
	
	////printf("%s << DEBUG [%d]", __FUNCTION__, 2);
	
	////cam_mutex.lock();
	
	//for (unsigned int ttt = 0; ttt < secondTierIndices.size(); ttt++) {
	
		//// For each pair of frames in this subsequence				
		//for (unsigned int rrr = startIndex; rrr < finishIndex; rrr++) {
			
			//// Get P0
			//Mat temp_C0;
			
			////retrieveCameraPose(subsys, temp_C0, rrr - startIndex);
			
			
			//ACM[rrr].copyTo(temp_C0);
			
			
			
			////temp_C0 = temp_C0.inv();
			
			////cout << "temp_C0 = " << temp_C0 << endl;
			
			
			//for (unsigned int sss = rrr + 1; sss <= finishIndex; sss++) {
				

				
				//Mat temp_C1;
				
				////retrieveCameraPose(subsys, temp_C1, sss - startIndex);
				
				//ACM[sss].copyTo(temp_C1);
				
				////temp_C1 = temp_C1.inv();
				
				////cout << "temp_C1 = " << temp_C1 << endl;
				
				////printf("%s << Triangulating second tier points between indices (%d, %d)", __FUNCTION__, rrr, sss);
				
				//// Get P1
				
				//vector<unsigned int> activeTrackIndices_t, fullSpanIndices_t, triangulatedIndices_t;
				//getActiveTracks(activeTrackIndices_t, featureTrackVector, rrr, sss);
				//filterToCompleteTracks(fullSpanIndices_t, activeTrackIndices_t, featureTrackVector, rrr, sss);
				
				//// So now should have indices that are active between the two frames
				
				////printf("%s << Active indices (%d, %d) = %d", __FUNCTION__, rrr, sss, fullSpanIndices_t.size());
				
				
				////printf("%s << ready...", __FUNCTION__);
				
				//unsigned int uuu = 0;
				
				////printf("%s << Looking for index #%d (%d)...", __FUNCTION__, ttt, secondTierIndices.at(ttt));
				
				//while ((uuu < fullSpanIndices_t.size()) && (fullSpanIndices_t.at(uuu) < secondTierIndices.at(ttt))) {
					//uuu++;
				//}
				
				//if (uuu == fullSpanIndices_t.size()) {
					////printf("%s << Exhausted all indices...", __FUNCTION__);
					//break;
				//}
				
				//if (fullSpanIndices_t.at(uuu) == secondTierIndices.at(ttt)) {
					
					////printf("%s << Triangulating point for track (%d) cams (%d, %d)", __FUNCTION__, secondTierIndices.at(ttt), rrr, sss);
					//// triangulate the point
					//Point2f pt1_, pt2_;
					//Point3d pt3d_;
					
					//// Aha... need to actually GET pt1_ and pt2_.... :P
					
					//pt1_ = featureTrackVector.at(secondTierIndices.at(ttt)).getCoord(rrr);
					//pt2_ = featureTrackVector.at(secondTierIndices.at(ttt)).getCoord(sss);
					
					////cout << temp_C0 << endl;
					////cout << temp_C1 << endl;
					
					
					
					//Triangulate(pt1_, pt2_, configData.cameraData.K, configData.cameraData.K_inv,	temp_C1, temp_C0, pt3d_, false);
					
					//if (ttt == 0) {
						////printf("%s << Point at (%f, %f) & (%f, %f)", __FUNCTION__, pt1_.x, pt1_.y, pt2_.x, pt2_.y);
						////printf("%s << Point triangulated: (%f, %f, %f) (%d)", __FUNCTION__, pt3d_.x, pt3d_.y, pt3d_.z, triangulationsCount[ttt]);
						////return -1;
					//}
					
					
					////if ((pointIsInFront(temp_C0, pt3d_)) && (pointIsInFront(temp_C1, pt3d_))) {
						//secondTierPoints[ttt].x += pt3d_.x;
						//secondTierPoints[ttt].y += pt3d_.y;
						//secondTierPoints[ttt].z += pt3d_.z;
						//triangulationsCount[ttt]++;
					////}
					
				//}
				
			//}
			
			////vector<Point2f> pts_t1, pts_t2;
			////getPointsFromTracks(featureTrackVector, pts_t1, pts_t2, rrr, sss);
			
		//}
		
	//}

	
	////vector<Point3d> newlyTriangulatedPoints;
	
	//#pragma omp parallel for
	//for (unsigned int rrr = 0; rrr < secondTierIndices.size(); rrr++) {
		
		//if (triangulationsCount[rrr] > 0) {
			//secondTierPoints[rrr].x /= ((double) triangulationsCount[rrr]);
			//secondTierPoints[rrr].y /= ((double) triangulationsCount[rrr]);
			//secondTierPoints[rrr].z /= ((double) triangulationsCount[rrr]);
			
			////printf("%s << Adding point: (%f, %f, %f) (%d)", __FUNCTION__, secondTierPoints[rrr].x, secondTierPoints[rrr].y, secondTierPoints[rrr].z, triangulationsCount[rrr]);

			

			//featureTrackVector.at(secondTierIndices.at(rrr)).set3dLoc(secondTierPoints[rrr]);
		//}
		
	//}
	
	
	//vector<unsigned int> activeCameraIndices;
	//activeCameraIndices.clear();
		
	//for (unsigned int rrr = startIndex; rrr <= finishIndex; rrr++) {
		//activeCameraIndices.push_back(rrr);
	//}
	
	//double newError = optimizeSubsystem(configData.cameraData, ACM, activeCameraIndices, featureTrackVector, activeTrackIndices, configData.subsequenceIterations);
			
	//ROS_INFO("Subsystem error including intermediate points: %f", newError);
	
	//assignFullSystem(sys, featureTrackVector, configData.cameraData, ACM, image_idx_1, image_idx_2);
	
	//ROS_INFO("SYS: nodes = %d; tracks = %d", sys.nodes.size(), sys.tracks.size());
	
	//avgError = optimizeSystem(sys, 1e-4, configData.fullSystemIterations * 100);
	
	//ROS_INFO("retrieving full system with image (%d) to start with", image_idx_1);
	
	//retrieveFullSystem(sys, ACM, featureTrackVector, image_idx_1, image_idx_2);
	
	
	//for (unsigned int jjj = 0; jjj <= image_idx_2; jjj++) {
		
		//if (ACM[jjj].rows != 0) {
			////addNewCamera(display_sys, configData.cameraData, ACM[jjj]);
		//}
				
	//}
	
	////printf("%s << DEBUG [%d]", __FUNCTION__, 5);
	
	//currentPoseIndex = image_idx_2;
	
	//formedInitialStructure = true;

	//ROS_INFO("Initial structure formed.");
	
	//return formedInitialStructure;
}

void slamNode::assignPose(geometry_msgs::PoseStamped& pPose, cv::Mat& C) {
	pPose.header.seq = currentPoseIndex;
	pPose.header.stamp = ros::Time::now();
	
	cv::Mat R, t;
	Quaterniond Q;
	decomposeTransform(C, R, t);
	matrixToQuaternion(R, Q);
	
	// tried: 1,0,2; 1,2,0; 0,2,1; 2,0,1; 2,1,0; 0,1,2
	// x-corresponds to graph -x; y to graph -z; z to graph -y
	
	pPose.pose.position.x = t.at<double>(2,0); //;
	pPose.pose.position.y = -t.at<double>(0,0); //t.at<double>(1,0);
	pPose.pose.position.z = -t.at<double>(1,0); //t.at<double>(2,0);
	
	if (abs(pPose.pose.position.x) > MAX_RVIZ_DISPLACEMENT) {
		pPose.pose.position.x = 0.0;
	}

	if (abs(pPose.pose.position.y) > MAX_RVIZ_DISPLACEMENT) {
		pPose.pose.position.y = 0.0;
	}
	
	if (abs(pPose.pose.position.z) > MAX_RVIZ_DISPLACEMENT) {
		pPose.pose.position.z = 0.0;
	}
	
	//printf("%s << QUAT = (%f, %f, %f, %f)", __FUNCTION__, Q.x(), Q.y(), Q.z(), Q.w());
	
	// tried x,y,z,w
	pPose.pose.orientation.x = Q.z();
	pPose.pose.orientation.y = -Q.x();
	pPose.pose.orientation.z = -Q.y();
	pPose.pose.orientation.w = Q.w();
}

void slamNode::serverCallback(thermalvis::monoslamConfig &config, uint32_t level) {
    
    configData.flowback = config.flowback;
    
    configData.verboseMode = config.verboseMode;
    
    configData.timeSpacing = config.timeSpacing;
    
    configData.poseEstimateIterations = config.poseEstimateIterations;
    
    configData.minStartupScore = config.minStartupScore;
    
    configData.adjustmentFrames = config.adjustmentFrames;
    
    configData.motionThreshold = config.motionThreshold;
    
    configData.timeDebug = config.timeDebug;
    
    configData.keyframeIterations = config.keyframeIterations;
    
    configData.framesForTriangulation = config.framesForTriangulation;
    
    configData.maxKeyframeSeparation = config.maxKeyframeSeparation;
    
    configData.min3dPtsForPnP = config.min3dPtsForPnP;
    
    configData.camerasPerSys = config.camerasPerSys;
    
    configData.minTrackedFeatures = config.minTrackedFeatures;
    
    configData.minGeometryScore = config.minGeometryScore;
    
    configData.minKeyframeScore = config.minKeyframeScore;
    
    configData.requiredTrackFrac = config.requiredTrackFrac;
    
    configData.fullSystemIterations = config.fullSystemIterations;
    
    configData.subsequenceIterations = config.subsequenceIterations;
    
    configData.keyframeSpacing = config.keyframeSpacing;
    
    configData.maxGuides = config.maxGuides;
    
    configData.initialStructureIterations = config.initialStructureIterations;
    
    //ROS_WARN("Switched keyframe evaluation to (%d)", configData.keyframeEvaluationMode ? 1 : 0);
	
}

slamNode::slamNode(ros::NodeHandle& nh, slamData startupData) {
	
	configData = startupData;
	
	scorecardParams = new double*[INITIALIZATION_SCORING_PARAMETERS];
	
	for (int iii = 0; iii < INITIALIZATION_SCORING_PARAMETERS; iii++) {
		scorecardParams[iii] = new double[3];
	}
	
	processScorecard();
	
	sprintf(nodeName, "%s", ros::this_node::getName().c_str());
	
	srand(time(NULL));
	
	evaluationCompleted = false;
	
	//cam_sem = Semaphore("cameras");
	//tracks_sem = Semaphore("tracks");
	//keyframes_sem = Semaphore("keyframes");
	
	putativelyEstimatedFrames = 0;
	
	firstIteration = true;
	
	boost::mutex cam_mutex;
	boost::mutex tracks_mutex;
	boost::mutex keyframes_mutex;
	
	char timeString[256];
	
	sprintf(timeString, "%010d.%09d", ros::Time::now().sec, ros::Time::now().nsec);
	//stringstream convert;
	//convert << ros::Time::now().sec << "." << ros::Time::now().nsec;
	
	//configData.evaluationFile = configData.read_addr + "nodes/monoslam/log/" + convert.str() + ".txt";
	configData.evaluationFile = configData.read_addr + "nodes/monoslam/log/" + timeString + "-" + ros::this_node::getName().substr(1,ros::this_node::getName().size()-1) + ".txt";
	
	if (configData.keyframeEvaluationMode) {
		ROS_INFO("evaluationFile = (%s)", configData.evaluationFile.c_str());
	}
	
	
	decimation = DEFAULT_DRAWGRAPH_DECIMATION;
	bicolor = DEFAULT_DRAWGRAPH_BICOLOR;
	
	display_sys.tracks.clear();
	display_sys.nodes.clear();
	
	eye4 = cv::Mat::eye(4, 4, CV_64FC1);
	
	
	repetitionNoted = false;
	
	currentPoseIndex = -1;
	
	isTracking = true;
	
	baseConnectionNum = 0;
	
	lastBasePose = -1;
	
	latestFrame = -1;
	
	path_pub = nh.advertise<visualization_msgs::Marker>( "path", 1 );
	camera_pub = nh.advertise<visualization_msgs::Marker>( "cameras", 1 );
	points_pub = nh.advertise<visualization_msgs::Marker>( "points", 1);
	
	
	
	structureValid = false;
	structureFormed = false;
	
	sys.verbose = 0;
	display_sys.verbose = 0;
	
	
	
	ROS_INFO("Setting up node.");
	
	infoProcessed = false;
	
	//std::string topic_info = nh.resolveName(configData.stream + "camera_info");
	
	std::string topic_tracks = configData.stream + "tracks";
	
	ROS_INFO("Connecting to tracks topic. %s", topic_tracks.c_str());
	tracks_sub = nh.subscribe<thermalvis::feature_tracks>(topic_tracks, 1, &slamNode::handle_tracks, this);
	
	std::string topic_info = configData.stream + "camera_info";
	
	ROS_INFO("Connecting to camera_info. %s", topic_info.c_str());
	
	info_sub = nh.subscribe<sensor_msgs::CameraInfo>(topic_info, 1, &slamNode::handle_info, this);
	
	
	
	ROS_INFO("Node setup.");
	
	keyframeTestScores = cv::Mat::zeros(configData.maxInitializationFrames, configData.maxInitializationFrames, CV_64FC1);
	keyframeTestFlags = cv::Mat::zeros(configData.maxInitializationFrames, configData.maxInitializationFrames, CV_8UC1);
	
	sprintf(pose_pub_name, "thermalvis%s/pose", nodeName);
	ROS_INFO("Configuring pose topic. %s", pose_pub_name);
	
	currentPose.header.frame_id = "/pgraph"; //pose_pub_name;
	pose_pub = nh.advertise<geometry_msgs::PoseStamped>(pose_pub_name, 1);
	
	timer = nh.createTimer(ros::Duration(0.01), &slamNode::main_loop, this);
	
	//elapsedTime = timeElapsedMS(cycle_timer);
	
	if (configData.logErrors) {
		configData.errorFile = configData.read_addr + "nodes/monoslam/log/stability/" + timeString + "-" + ros::this_node::getName().substr(1,ros::this_node::getName().size()-1) + ".txt";
		ROS_INFO("Current debug filename: %s", configData.errorFile.c_str());
		error_file.open(configData.errorFile.c_str());
	}
	
	ROS_INFO("Establishing server callback...");
	f = boost::bind (&slamNode::serverCallback, this, _1, _2);
    server.setCallback (f);
	
}

timeAnalyzer::timeAnalyzer() {
	
	cycles = 0;
	average = 0.0;
	sigma = 0.0;
	
	timeElapsedMS(cycle_timer, true);
}

void timeAnalyzer::calcParameters() {
	if (cycles == 0) {
		return;
	}
	
	average = 0.0;
	sigma = 0.0;
	
	for (int iii = 0; iii < cycles; iii++) {
		average += vals[iii] / ((double) cycles);
	}
	
	for (int iii = 0; iii < cycles; iii++) {
		sigma += pow(vals[iii] - average, 2.0) / ((double) cycles);
	}
	
	sigma = pow(sigma, 0.5);
	
}

void timeAnalyzer::startRecording() {
	timeElapsedMS(cycle_timer, true);
}

void timeAnalyzer::stopRecording() {
	
	if (cycles < 1024) {
		vals[cycles] = timeElapsedMS(cycle_timer, true);
		cycles++;
	} else {
		ROS_ERROR("Too many cycles (%d) / (%d)", cycles, 1024);
	}
	
}


