/*! \file	videoslam.cpp
 *  \brief	Definitions for the VIDEOSLAM node.
*/

#include "videoslam.hpp"

struct timeval cycle_timer;
//double elapsedTime;

int main(int argc, char** argv) {	
	
	ROS_INFO("Node launched.");
	
	ros::init(argc, argv, "videoslam");
	
	ros::NodeHandle private_node_handle("~");
	
	videoslamData startupData;
		
	bool inputIsValid = startupData.obtainStartingData(private_node_handle);
	
	startupData.read_addr = argv[0];
	startupData.read_addr = startupData.read_addr.substr(0, startupData.read_addr.size()-12);
	
	if (!inputIsValid) {
		ROS_INFO("Configuration invalid.");
	}
		
	ROS_INFO("Startup data processed.");
	
	//ros::Rate loop_rate(25);
	
	ros::NodeHandle nh;
	
	ROS_INFO("About to create shared pointer..");
	boost::shared_ptr < videoslamNode > videoslam_node (new videoslamNode (nh, startupData));
	ROS_INFO("Shared pointer created.");
	
	globalNodePtr = &videoslam_node;

	signal(SIGINT, mySigintHandler);
	
	ROS_INFO("Node configured.");
	
	ros::AsyncSpinner spinner(2);
	
	spinner.start();
	
	while (!wantsToShutdown) { };
	
	mySigintHandler(1);
	//ros::waitForShutdown();
	ros::shutdown();
	
	ROS_INFO("Exiting.");
	
	return 0;
	
}

bool videoslamData::obtainStartingData(ros::NodeHandle& nh) {
		
	nh.param<std::string>("extrinsicsFile", extrinsicsFile, "extrinsicsFile");
	
	if (extrinsicsFile == "extrinsicsFile") {
		ROS_ERROR("No extrinsics specified! Please provide extrinsic calibration data.");
	} else {
		ROS_INFO("Extrinsics at (%s) selected.", extrinsicsFile.c_str());
	}
	
	nh.param<bool>("writePoses", writePoses, false);
	nh.param<bool>("clearTriangulations", clearTriangulations, false);
	
	nh.param<double>("maxPoseDelay", maxPoseDelay, 1.0);
	nh.param<double>("terminationTime", terminationTime, -1.0);
	nh.param<double>("restartTime", restartTime, -1.0);
	//nh.param<bool>("evaluationMode", evaluationMode, false);
	
	
	
	nh.param<int>("evaluateParameters", evaluateParameters, 0);
	
	nh.param<std::string>("flowSource", flowSource, "/thermalvis/flow/");
	nh.param<std::string>("mapperSource", mapperSource, "/thermalvis/mapper"); // /pose
	
	nh.param<bool>("publishPoints", publishPoints, false);
	nh.param<bool>("publishKeyframes", publishKeyframes, false);
	
	return true;
}

void mySigintHandler(int sig)
{
	wantsToShutdown = true;
	ROS_WARN("Requested shutdown... terminating feeds...");

}

bool videoslamNode::checkConnectivity(unsigned int seq) {
	
	//main_mutex.lock();
	
	
	
	unsigned int projectionsCount = 0;
	
	unsigned int longTracksCount = 0;
	
	for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
		
		if (featureTrackVector.at(iii).locations.size() < 2) {
			continue;
		}
		
		longTracksCount++;
		
		for (unsigned int jjj = 0; jjj < featureTrackVector.at(iii).locations.size(); jjj++) {
			
			//printf("%s << featureTrackVector.at(%d).locations.at(%d).imageIndex = (%d) vs (%d)\n", __FUNCTION__, iii, jjj, featureTrackVector.at(iii).locations.at(jjj).imageIndex, seq);
			
			if (featureTrackVector.at(iii).locations.at(jjj).imageIndex == seq) {
				projectionsCount++;
			}
		}
	}
	
	if (configData.verboseMode) { ROS_INFO("Projections for current tracks #(%d) = (%d, %d)", seq, projectionsCount, longTracksCount); }
	
	//main_mutex.unlock();
	
	if (projectionsCount < 8) return false;
	
	return true;
	
}

bool videoslamNode::updateKeyframePoses(const geometry_msgs::PoseStamped& pose_msg, bool fromICP) {
	
	if (configData.verboseMode) { ROS_INFO("Entered <%s>", __FUNCTION__); }
	
	// First, some kind of check to make sure that the tracks/connections for this frame are decent..
	
	if ( (storedPosesCount > 0) && !checkConnectivity(pose_msg.header.seq) ) {
		if (configData.verboseMode) { ROS_WARN("Frame (%d) has insufficient connectivity...", pose_msg.header.seq); }
		return false;
	}
	
	if (configData.verboseMode) { ROS_WARN("Considering tracks frame (%d) as a keyframe (has sufficient connectivity)", pose_msg.header.seq); }
	
	if (fromICP) {
		
		unsigned int floatersCleared = 0;
		
		for (unsigned int iii = 0; iii < storedPosesCount; iii++) {
			
			if (!keyframeTypes[iii]) {
			
				for (unsigned int jjj = iii; jjj < storedPosesCount-1; jjj++) {
					keyframePoses[jjj] = keyframePoses[jjj+1];
					keyframeTypes[jjj] = keyframeTypes[jjj+1];
				}
				
				storedPosesCount--;
				iii--;
				floatersCleared++;
				
			}
			
			
		}
		
		if (configData.verboseMode && (floatersCleared > 0)) { ROS_INFO("Cleared (%d) poses (%d remain) because now have received ICP-based estimate.", floatersCleared, storedPosesCount); }
	}
	
	if (((int)storedPosesCount) < configData.adjustmentFrames) {
		keyframePoses[storedPosesCount] = pose_msg;
		keyframeTypes[storedPosesCount] = fromICP;
		if (configData.verboseMode) { ROS_INFO("Adding keyframe pose with index (%d)", pose_msg.header.seq); }
		storedPosesCount++;
		return true;
	} /* else if (fromICP) {
		
		for (unsigned int iii = 0; iii < storedPosesCount; iii++) {
			
			if (!keyframeTypes[iii]) {
			
				for (unsigned int jjj = iii; jjj < storedPosesCount-1; jjj++) {
					keyframePoses[jjj] = keyframePoses[jjj+1];
					keyframeTypes[jjj] = keyframeTypes[jjj+1];
				}
				
				storedPosesCount--;
				
				if (configData.verboseMode) { ROS_WARN("Appending most recent frame (%d) to keyframes to replace video-based frame", pose_msg.header.seq); }
				keyframePoses[storedPosesCount] = pose_msg;
				keyframeTypes[storedPosesCount] = fromICP;
				storedPosesCount++;
				return true;
				
			}
		}
	}*/
	
	while (((int)storedPosesCount) >= ((int)configData.adjustmentFrames)) {

		double maxDistance = 0.0;
		unsigned int maxIndex = 0;
		
		for (unsigned int iii = 0; iii < storedPosesCount; iii++) {
			double dist = pow(pose_msg.pose.position.x - keyframePoses[iii].pose.position.x, 2.0) + pow(pose_msg.pose.position.y - keyframePoses[iii].pose.position.y, 2.0) + pow(pose_msg.pose.position.z - keyframePoses[iii].pose.position.z, 2.0);
			
			if (dist >= maxDistance) {
				maxDistance = dist;
				maxIndex = iii;
			}
			
		}
		
		maxDistance = pow(maxDistance, 0.5);
		
		if ( (maxDistance <= configData.maxDistance) && (configData.maxDistance != 0.0) ) {
			break;
		}
		
		if (configData.verboseMode) { ROS_INFO("Removing frame (%d) because of distance (%f) > (%f)", keyframePoses[maxIndex].header.seq, maxDistance, configData.maxDistance); }
		
		for (unsigned int iii = maxIndex; iii < storedPosesCount-1; iii++) {
			keyframePoses[iii] = keyframePoses[iii+1];
			keyframeTypes[iii] = keyframeTypes[iii+1];
		}
		
		storedPosesCount--;
		
		if (((int)storedPosesCount) == ((int)configData.adjustmentFrames-1)) {
			if (configData.verboseMode) { ROS_WARN("Appending most recent frame (%d) to keyframes in max loop", pose_msg.header.seq); }
			keyframePoses[storedPosesCount] = pose_msg;
			keyframeTypes[storedPosesCount] = fromICP;
			storedPosesCount++;
			return true;
		}
		
	}
	
	// Otherwise, need to find the least informative... (want to include newest, too!)
	
	while (((int)storedPosesCount) >= ((int)configData.adjustmentFrames)) {
		
		double minDistance = 9e99;
		unsigned int minIndex = 0;
		
		for (unsigned int iii = 0; iii < storedPosesCount; iii++) {
		
			double dist = 0.0;
			double div = 0.0;
			
			if (iii > 0) {
				dist += pow(keyframePoses[iii].pose.position.x - keyframePoses[iii-1].pose.position.x, 2.0) + pow(keyframePoses[iii].pose.position.y - keyframePoses[iii-1].pose.position.y, 2.0) + pow(keyframePoses[iii].pose.position.z - keyframePoses[iii-1].pose.position.z, 2.0);
				div += 1.0;
			} 
			
			if(iii < (storedPosesCount-1)) {
				dist += pow(keyframePoses[iii].pose.position.x - keyframePoses[iii+1].pose.position.x, 2.0) + pow(keyframePoses[iii].pose.position.y - keyframePoses[iii+1].pose.position.y, 2.0) + pow(keyframePoses[iii].pose.position.z - keyframePoses[iii+1].pose.position.z, 2.0);
				div += 1.0;
			}
			
			if (div == 0.0) {
				ROS_ERROR("Trying to update keyframe list but only one seems to exist!");
			}
			
			dist /= div;
			
			if (dist <= minDistance) {
				minDistance = dist;
				minIndex = iii;
			}
			
		}
		
		minDistance = pow(minDistance, 0.5);
		
		if (configData.verboseMode) { ROS_WARN("Removing %dth frame (%d) which is least distant (%f)", minIndex, keyframePoses[minIndex].header.seq, minDistance); }
		
		for (unsigned int iii = minIndex; iii < storedPosesCount-1; iii++) {
			keyframePoses[iii] = keyframePoses[iii+1];
			keyframeTypes[iii] = keyframeTypes[iii+1];
		}
		
		storedPosesCount--;
		
	}
	
	if (configData.verboseMode) { ROS_WARN("Adding keyframe (%d) at end of function", pose_msg.header.seq); }
	keyframePoses[storedPosesCount] = pose_msg;
	keyframeTypes[storedPosesCount] = fromICP;
	storedPosesCount++;
	
	return true;
	
	
}

void videoslamNode::trimFeatureTrackVector() {
	
	unsigned int jjj;
	
	int preservationBuffer = 0;
	
	int newestSafeIndex = max(lastTestedFrame-preservationBuffer, 0);

	// ROS_ERROR("Features trimmed for cameras below (%d)", newestSafeIndex);
	
	for (int iii = 0; iii < ((int)featureTrackVector.size()); iii++) {
		
		jjj = 0;
		
		while (jjj < featureTrackVector.at(iii).locations.size()) {
			
			bool validImage = false;
			
			if (((int)featureTrackVector.at(iii).locations.at(jjj).imageIndex) >= newestSafeIndex) {
				validImage = true;
			} else {
				
				for (unsigned int kkk = 0; kkk < storedPosesCount; kkk++) {
					if (keyframePoses[kkk].header.seq == featureTrackVector.at(iii).locations.at(jjj).imageIndex) {
						validImage = true;
						break;
					} 
				}
			}
			
			if (!validImage) {
				featureTrackVector.at(iii).locations.erase(featureTrackVector.at(iii).locations.begin()+jjj);
			} else {
				jjj++;
			}
		}

		if (featureTrackVector.at(iii).locations.size() == 0) {
			featureTrackVector.erase(featureTrackVector.begin()+iii);
			iii--;
		}

	}

	//ROS_ERROR("featureTrackVector.size() = (%d)", featureTrackVector.size());
}

void shiftPose(const geometry_msgs::Pose& pose_src, geometry_msgs::Pose& pose_dst, cv::Mat transformation) {
	
	//ROS_ERROR("Transforming from (%f, %f, %f) (%f, %f, %f, %f)...", pose_src.position.x, pose_src.position.y, pose_src.position.z, pose_src.orientation.w, pose_src.orientation.x, pose_src.orientation.y, pose_src.orientation.z);
	
	// pose_dst = pose_src;
	
	QuaternionDbl quat_src;
	quat_src = QuaternionDbl(pose_src.orientation.w, pose_src.orientation.x, pose_src.orientation.y, pose_src.orientation.z);
	
	//cout << "transformation = " << transformation << endl;
	
	cv::Mat src_T, src_R, src_P;
	
	quaternionToMatrix(quat_src, src_R);
	
	src_T = cv::Mat::zeros(3,1,CV_64FC1);
	src_T.at<double>(0,0) = pose_src.position.x;
	src_T.at<double>(1,0) = pose_src.position.y;
	src_T.at<double>(2,0) = pose_src.position.z;
	
	composeTransform(src_R, src_T, src_P);

	cv::Mat dst_P, dst_R, dst_T;
	
	dst_P = src_P * transformation;

	decomposeTransform(dst_P, dst_R, dst_T);
	
	QuaternionDbl quat_dst;
	matrixToQuaternion(dst_R, quat_dst);
	
	pose_dst.orientation.w = quat_dst.w();
	pose_dst.orientation.x = quat_dst.x();
	pose_dst.orientation.y = quat_dst.y();
	pose_dst.orientation.z = quat_dst.z();
	
	pose_dst.position.x = dst_T.at<double>(0,0);
	pose_dst.position.y = dst_T.at<double>(1,0);
	pose_dst.position.z = dst_T.at<double>(2,0);
	
	//ROS_ERROR("Transforming _to_ (%f, %f, %f) (%f, %f, %f, %f)...", pose_dst.position.x, pose_dst.position.y, pose_dst.position.z, pose_dst.orientation.w, pose_dst.orientation.x, pose_dst.orientation.y, pose_dst.orientation.z);
	
}

bool interpolatePose(const geometry_msgs::Pose& pose1, ros::Time time1, const geometry_msgs::Pose& pose2, ros::Time time2, geometry_msgs::Pose& finalPose, ros::Time time3) {
	
	double time_gap = time2.toSec() - time1.toSec();
	double prediction_gap = time3.toSec() - time1.toSec();
	
	double biasFactor = prediction_gap / time_gap;
	
	if (0) { ROS_INFO("times = (%f, %f, %f) : (%f, %f)", time1.toSec(), time2.toSec(), time3.toSec(), time_gap, prediction_gap); }
	if (0) { ROS_INFO("biasFactor = (%f)", biasFactor); }
	
	if ( (abs(time_gap) > MAX_TIME_GAP_FOR_INTERP) || (abs(prediction_gap) > MAX_TIME_GAP_FOR_INTERP) ) {
		return false;
	}
	
	finalPose.position.x = (1.0 - biasFactor) * pose1.position.x + biasFactor * pose2.position.x;
	finalPose.position.y = (1.0 - biasFactor) * pose1.position.y + biasFactor * pose2.position.y;
	finalPose.position.z = (1.0 - biasFactor) * pose1.position.z + biasFactor * pose2.position.z;
	
	
	QuaternionDbl quat_1, quat_2, quat_i;
	
	quat_1 = QuaternionDbl(pose1.orientation.w, pose1.orientation.x, pose1.orientation.y, pose1.orientation.z);
	quat_2 = QuaternionDbl(pose2.orientation.w, pose2.orientation.x, pose2.orientation.y, pose2.orientation.z);
	
	quat_i = quat_2.slerp(biasFactor, quat_1);
	
	finalPose.orientation.x = quat_i.x();
	finalPose.orientation.y = quat_i.y();
	finalPose.orientation.z = quat_i.z();
	finalPose.orientation.w = quat_i.w();
	
	return true;
}

bool videoslamNode::findNearestPoses(int& index1, int& index2, const ros::Time& targetTime) {
	
	int minPosInd = -1, minNegInd = -1, twoPosInd = -1, twoNegInd = -1;
	
	int posAssigned = 0, negAssigned = 0;
	
	
	double minPosDiff = 9e99, twoPosDiff = 9e99, minNegDiff = 9e99, twoNegDiff = 9e99;
	
	//ROS_WARN("Searching for time (%f) with a total of (%d) poses to check", targetTime.toSec(), poseHistoryCounter);
	
	for (int iii = 0; iii < min(int(poseHistoryCounter), MAX_HISTORY); iii++) {
		
		//ROS_WARN("Testing time (%f) with (%f)", targetTime.toSec(), poseHistoryBuffer[iii % MAX_HISTORY].header.stamp.toSec());
		
		if (poseHistoryBuffer[iii % MAX_HISTORY].header.stamp.toSec() == 0.0) {
			continue;
		}
		
		double diff = targetTime.toSec() - poseHistoryBuffer[iii % MAX_HISTORY].header.stamp.toSec();
		
		//ROS_WARN("diff = (%f)", diff);
		
		//ROS_INFO("diff = (%f); (%f & (%d)%f)", diff, latestTracksTime, iii, poseHistoryBuffer[iii % MAX_HISTORY].header.stamp.toSec());
		
		if (diff > 0.0) {
			if (abs(diff) < minPosDiff) {
				twoPosDiff = minPosDiff;
				minPosDiff = abs(diff);
				twoPosInd = minPosInd;
				minPosInd = iii;
				//ROS_INFO("updating pos index 1: (%d)", iii);
				posAssigned++;
			} else if (posAssigned < 2) {
				twoPosDiff = abs(diff);
				twoPosInd = iii;
				posAssigned++;
				//ROS_INFO("updating pos index 2: (%d)", iii);
			}
		} else if (diff < 0.0) {
			if (abs(diff) < minNegDiff) {
				twoNegDiff = minNegDiff;
				minNegDiff = abs(diff);
				twoNegInd = minNegInd;
				minNegInd = iii;
				//ROS_INFO("updating neg indices");
			} else if (negAssigned < 2) {
				twoNegDiff = abs(diff);
				twoNegInd = iii;
				negAssigned++;
			}
		} else {
			twoNegDiff = minNegDiff;
			minNegDiff = 0.0;
			twoNegInd = minNegInd;
			minNegInd = iii;
			
			twoPosDiff = minPosDiff;
			minPosDiff = 0.0;
			twoPosInd = minPosInd;
			minPosInd = iii;
			//ROS_INFO("updating ALL indices");
		}
		
	}
	
	if ( (minPosInd >= 0) && (minNegInd >= 0) ) { // && (minPosDiff <= twoNegDiff) && (minNegDiff <= twoPosDiff)
		
		index1 = minPosInd;
		index2 = minNegInd;
		
		//ROS_WARN("minPosDiff = (%f), twoPosDiff = (%f), minNegDiff = (%f), twoNegDiff = (%f)", minPosDiff, twoPosDiff, minNegDiff, twoNegDiff);
		
		return true;
		 
	} else {
		
		//ROS_WARN("Entering non-surrounded segment...");
		
		if (minPosDiff >= twoNegDiff) {
			
			index1 = minNegInd;
			index2 = twoNegInd;
			//ROS_WARN("Two negatives... (%d, %d)", index1, index2);
		} else if (minNegDiff >= twoPosDiff) {
			
			index1 = twoPosInd;
			index2 = minPosInd;
			//ROS_WARN("Two positives... (%d, %d)", index1, index2);
		} else {
			ROS_ERROR("Shouldn't be in here!!");
		}
		
	}
	
	return false;
	
}

bool videoslamNode::updateLocalPoseEstimates() {
	
	double depthTime = poseHistoryBuffer[(poseHistoryCounter-1) % MAX_HISTORY].header.stamp.toSec();
	
	if (0) { ROS_INFO("%s << depthTime = (%f)", __FUNCTION__, depthTime); }
	
	if (0) { ROS_INFO("%s << depthTime = (%d) (%d) (%d)", __FUNCTION__, frameProcessedCounter, frameHeaderHistoryCounter, MAX_HISTORY); }
	
	frameProcessedCounter = max(frameProcessedCounter, frameHeaderHistoryCounter - MAX_HISTORY);
	
	for (unsigned int jjj = frameProcessedCounter; jjj < frameHeaderHistoryCounter; jjj++) { 
	//for (unsigned int jjj = max(frameProcessedCounter, int(frameHeaderHistoryCounter)-MAX_HISTORY+1); jjj < frameHeaderHistoryCounter; jjj++) { 
		
		if (0) { ROS_WARN("Considering with (%d) and frameProcessedCounter (%d)", jjj, frameProcessedCounter); }
		
		int minPosInd = -1, minNegInd = -1;
		if (!findNearestPoses(minPosInd, minNegInd, frameHeaderHistoryBuffer[jjj % MAX_HISTORY].stamp)) {
			if (0) { ROS_WARN("Surrounding depth frames were not able to be found!"); }
			return false;
		}
		
		if (0) { ROS_WARN("Progressing.."); }
		
		geometry_msgs::PoseStamped tracksFrameInterpolatedPose, tracksFrameShiftedPose;
		tracksFrameInterpolatedPose.header = frameHeaderHistoryBuffer[jjj % MAX_HISTORY];
		
		if (0) { ROS_INFO("Considering frame (%d) with time = (%f)", jjj, frameHeaderHistoryBuffer[jjj % MAX_HISTORY].stamp.toSec()); }
		if (0) { ROS_INFO("Best matching indices were (%d) and (%d) : [%f] {%f} [%f]:", minPosInd, minNegInd, poseHistoryBuffer[minPosInd % MAX_HISTORY].header.stamp.toSec(), frameHeaderHistoryBuffer[jjj % MAX_HISTORY].stamp.toSec(), poseHistoryBuffer[minNegInd % MAX_HISTORY].header.stamp.toSec()); }
		
		if (!interpolatePose(poseHistoryBuffer[minPosInd % MAX_HISTORY].pose, poseHistoryBuffer[minPosInd % MAX_HISTORY].header.stamp, poseHistoryBuffer[minNegInd % MAX_HISTORY].pose, poseHistoryBuffer[minNegInd % MAX_HISTORY].header.stamp, tracksFrameInterpolatedPose.pose, frameHeaderHistoryBuffer[jjj % MAX_HISTORY].stamp)) {
			if (configData.verboseMode) { ROS_WARN("Pose unable to be interpolated."); }
			return false;
		} else {
			if (configData.verboseMode) { ROS_WARN("Pose was interpolated."); }
		}
		
		shiftPose(tracksFrameInterpolatedPose.pose, tracksFrameShiftedPose.pose, extrinsicCalib_P);
		
		tracksFrameShiftedPose.header = tracksFrameInterpolatedPose.header;
		
		main_mutex.lock();
		bool updated = updateKeyframePoses(tracksFrameShiftedPose, true);
		lastTestedFrame = tracksFrameShiftedPose.header.seq;
		if (configData.publishKeyframes) { drawKeyframes(camera_pub, keyframePoses, storedPosesCount); }
		main_mutex.unlock();
		
		
		if (updated) {
			
			main_mutex.lock();
			if (configData.clearTriangulations) {
				for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
					featureTrackVector.at(iii).isTriangulated = false;
				}
			}
			triangulatePoints();
			main_mutex.unlock();
			
		}

		if (0) { ROS_INFO("Updated.. (%d)", updated); }
		frameProcessedCounter++;
	
	}
	
	return true;
	
}

void videoslamNode::triangulatePoints() {
	if (storedPosesCount >= 2) {
		vector<unsigned int> triangulatableIndices;
		vector<unsigned int> cameraIndices;
		
		for (unsigned int iii = 0; iii < storedPosesCount; iii++) {
			// ROS_INFO("Camera indices to consider includes [%d] : (%u)", iii, keyframePoses[iii].header.seq);
			cameraIndices.push_back(keyframePoses[iii].header.seq);
		}
	
		// ROS_INFO("Finding triangulatable tracks with latest frame of (%d)", keyframePoses[storedPosesCount-1].header.seq);
		//main_mutex.lock();
		
		//maxPairs = itriangle(minProjections-1);
		
		int minProjections_ = minProjections(configData.pairsForTriangulation);
		
		//unsigned int minAppearances = triangle(configData.pairsForTriangulation-1);
		findTriangulatableTracks(featureTrackVector, triangulatableIndices, cameraIndices, minProjections_);
		//findTriangulatableTracks3(featureTrackVector, triangulatableIndices, keyframePoses[storedPosesCount-1].header.seq, configData.pairsForTriangulation);
		
		
		// unsigned int triangulatableCount = ((int)triangulatableIndices.size());
		
		int minimumlyProjectedTracks = 0;
		for (unsigned int zzz = 0; zzz < featureTrackVector.size(); zzz++) {
			if (featureTrackVector.at(zzz).locations.size() >= minProjections_) { minimumlyProjectedTracks++; }
		}
		
		unsigned int actuallyTriangulated = 0;
		if (triangulatableIndices.size() > 0) {	
			actuallyTriangulated = initialTrackTriangulation(featureTrackVector, triangulatableIndices, configData.cameraData, keyframePoses, storedPosesCount, configData.minSeparation, configData.maxSeparation, configData.pairsForTriangulation, configData.maxStandardDev, configData.maxReprojectionDisparity);
		}
		
		
		
		if (configData.verboseMode) { ROS_INFO("Keyframe (%d): (%d) points triangulated out of (%d) valid, (%d) potential and (%d) total", keyframePoses[storedPosesCount-1].header.seq, actuallyTriangulated, ((int)triangulatableIndices.size()), minimumlyProjectedTracks, ((int)featureTrackVector.size())); }
		
		//main_mutex.unlock();
	}
}

bool videoslamNode::determinePose() {
	
	// TO PUBLISH POSE FOR LATEST RECEIVED TRACKS
		// MAY NEED TO EXTRAPOLATE IF THESE TRACKS POSES HAVE NOT BEEN INTERPOLATED
		
	// if (configData.verboseMode) { ROS_INFO("<%s> entered..", __FUNCTION__); } // , frameHeaderHistoryBuffer[(frameHeaderHistoryCounter-1) % MAX_HISTORY].stamp.toSec(), poseHistoryBuffer[(poseHistoryCounter-1) % MAX_HISTORY].header.stamp.toSec());
	
	int idx1, idx2;
	
	bool surrounded = findNearestPoses(idx1, idx2, frameHeaderHistoryBuffer[(frameHeaderHistoryCounter-1) % MAX_HISTORY].stamp);
	
	//ROS_INFO("Found indices (%d, %d) : (%f, %f)", idx1, idx2, poseHistoryBuffer[idx1 % MAX_HISTORY].header.stamp.toSec(), poseHistoryBuffer[idx2 % MAX_HISTORY].header.stamp.toSec());
	
	
	
	if (frameHeaderHistoryBuffer[(frameHeaderHistoryCounter-1) % MAX_HISTORY].stamp.toSec() < poseHistoryBuffer[(poseHistoryCounter-1) % MAX_HISTORY].header.stamp.toSec()) {
		if (surrounded == false) ROS_WARN("No surrounding poses exist for tracks message, however, its timestamp is old, so assuming bag is being looped..");
	} else {
		if (surrounded == true) ROS_WARN("Surrounding poses exist for tracks message, but more recently received pose has very new timestamp, so assuming bag is being looped.."); 
	}
	
	currentPose.header = frameHeaderHistoryBuffer[(frameHeaderHistoryCounter-1) % MAX_HISTORY];
	currentPose.header.frame_id = "/world";
	
	if (surrounded) {
		interpolatePose(poseHistoryBuffer[idx1 % MAX_HISTORY].pose, poseHistoryBuffer[idx1 % MAX_HISTORY].header.stamp, poseHistoryBuffer[idx2 % MAX_HISTORY].pose, poseHistoryBuffer[idx2 % MAX_HISTORY].header.stamp, currentPose.pose, frameHeaderHistoryBuffer[(frameHeaderHistoryCounter-1) % MAX_HISTORY].stamp);	
	} else {
		if (0) { ROS_WARN("No appropriate surrounding frames were found.."); }
		
		if ( (idx1 < 0) || (idx2 < 0) ) {
			if ((configData.verboseMode) && (poseHistoryCounter > 1)) { 
				ROS_ERROR("No pair of indices were able to be found to estimate a position!"); 
				cin.get();
			}
			
			if (currentPose.pose.position.x == 9e99) {
				return false;
			} 
			//
		} else {
			if (configData.verboseMode) { ROS_WARN("Estimating a position, but not based on surrounding poses..."); }
			interpolatePose(poseHistoryBuffer[idx1 % MAX_HISTORY].pose, poseHistoryBuffer[idx1 % MAX_HISTORY].header.stamp, poseHistoryBuffer[idx2 % MAX_HISTORY].pose, poseHistoryBuffer[idx2 % MAX_HISTORY].header.stamp, currentPose.pose, frameHeaderHistoryBuffer[(frameHeaderHistoryCounter-1) % MAX_HISTORY].stamp);	
		}
		
		// Just use the previous "currentPose" as the best estimate for the current one..
			
	}
	
	savedPose = currentPose;
	
	// Convert: currentPose.pose into usable format
	cv::Mat estimatedPose, t, R, c; 
	Eigen::Quaternion<double> Q;
	
	convertPoseFormat(currentPose.pose, t, Q);
	quaternionToMatrix(Q, R);
	composeTransform(R, t, c);
	
	
	// ROS_ERROR("About to attempt to estimate pose for index (%d) using PnP", frameHeaderHistoryBuffer[(frameHeaderHistoryCounter-1) % MAX_HISTORY].seq);
	
	// int minProjections_ = minProjections(configData.pairsForTriangulation);
	
	framesProcessed++;
	
	pnpError = -1.0;
	pnpInlierProp = -1.0;
	
	main_mutex.lock();
	//ROS_WARN("about to <estimatePoseFromKnownPoints> with seq = (%d), lastTestedFrame = (%d), latestHandledTracks = (%d)", frameHeaderHistoryBuffer[(frameHeaderHistoryCounter-1) % MAX_HISTORY].seq, lastTestedFrame, latestHandledTracks);
	bool res = estimatePoseFromKnownPoints(estimatedPose, configData.cameraData, featureTrackVector, frameHeaderHistoryBuffer[(frameHeaderHistoryCounter-1) % MAX_HISTORY].seq, c, 1, configData.pnpIterations, configData.maxReprojectionDisparity, configData.inliersPercentage, &pnpError, &pnpInlierProp, configData.debugTriangulation);
	main_mutex.unlock();
	
	predictiveError = configData.maxAllowableError;
	
	if (res) {
		
		pnpSuccesses++;
		
	
		
		//cout << "guide = " << c << endl;
		//cout << "estimated = " << estimatedPose << endl << endl;
		
		cv::Mat R_, t_; 
		Eigen::Quaternion<double> Q_;
		
		decomposeTransform(estimatedPose, R_, t_);
		matrixToQuaternion(R_, Q_);
		convertPoseFormat(t_, Q_, currentPose.pose);
		
		pnpPose = currentPose;
		
		if (configData.verboseMode) { ROS_INFO("Pose for index (%d) able to be estimated accurately using PnP... (%f, %f, %f)", frameHeaderHistoryBuffer[(frameHeaderHistoryCounter-1) % MAX_HISTORY].seq, currentPose.pose.position.x, currentPose.pose.position.y, currentPose.pose.position.z); }
		
	} else {
		predictiveError = 9e99;
		if (configData.verboseMode) { ROS_WARN("Pose for index (%d) unable to be estimated accurately using PnP", frameHeaderHistoryBuffer[(frameHeaderHistoryCounter-1) % MAX_HISTORY].seq); }
	}
	
	
	//ROS_ERROR("Base pose #1 (%f, %f, %f) [%f, %f, %f, %f]", poseHistoryBuffer[idx1 % MAX_HISTORY].pose.position.x, poseHistoryBuffer[idx1 % MAX_HISTORY].pose.position.y, poseHistoryBuffer[idx1 % MAX_HISTORY].pose.position.z, poseHistoryBuffer[idx1 % MAX_HISTORY].pose.orientation.w, poseHistoryBuffer[idx1 % MAX_HISTORY].pose.orientation.x, poseHistoryBuffer[idx1 % MAX_HISTORY].pose.orientation.y, poseHistoryBuffer[idx1 % MAX_HISTORY].pose.orientation.z);
	//ROS_ERROR("Base pose #2 (%f, %f, %f) [%f, %f, %f, %f]", poseHistoryBuffer[idx2 % MAX_HISTORY].pose.position.x, poseHistoryBuffer[idx2 % MAX_HISTORY].pose.position.y, poseHistoryBuffer[idx2 % MAX_HISTORY].pose.position.z, poseHistoryBuffer[idx2 % MAX_HISTORY].pose.orientation.w, poseHistoryBuffer[idx2 % MAX_HISTORY].pose.orientation.x, poseHistoryBuffer[idx2 % MAX_HISTORY].pose.orientation.y, poseHistoryBuffer[idx2 % MAX_HISTORY].pose.orientation.z);
	//ROS_ERROR("About to publish pose of (%f, %f, %f) [%f, %f, %f, %f]", currentPose.pose.position.x, currentPose.pose.position.y, currentPose.pose.position.z, currentPose.pose.orientation.w, currentPose.pose.orientation.x, currentPose.pose.orientation.y, currentPose.pose.orientation.z);
	
	
	bundleTransShift = -1.0;
	bundleRotShift = -1.0;
	
	usedTriangulations = -1;
	pointShift = -1.0;
	
	if ( (configData.adjustmentIterations > 0) && (storedPosesCount >= 2)) {
		//ROS_INFO("currentPose.seq = (%d); pos = (%f, %f, %f), q = (%f, %f, %f, %f)", currentPose.header.seq, currentPose.pose.position.x, currentPose.pose.position.y, currentPose.pose.position.z, currentPose.pose.orientation.w, currentPose.pose.orientation.x, currentPose.pose.orientation.y, currentPose.pose.orientation.z);

		// ROS_INFO("Have (%d) stored poses.", storedPosesCount);
		main_mutex.lock();
		if (configData.verboseMode) { ROS_WARN("About to perform <predictiveBundleAdjustment> with (%d) cameras...", storedPosesCount); }
		
		if (configData.writePoses) { std::cout.rdbuf( lStream.rdbuf() ); }
		predictiveError = predictiveBundleAdjustment(configData.cameraData, featureTrackVector, keyframePoses, keyframeTypes, storedPosesCount, currentPose, configData.adjustmentIterations, configData.debugSBA, configData.baMode, configData.baStep, &usedTriangulations, &pointShift);
		if (configData.writePoses) { std::cout.rdbuf( lBufferOld ); }
		main_mutex.unlock();
		
		bundleTransShift = pow(pow(currentPose.pose.position.x-pnpPose.pose.position.x, 2.0) + pow(currentPose.pose.position.y-pnpPose.pose.position.y, 2.0) + pow(currentPose.pose.position.z-pnpPose.pose.position.z, 2.0), 0.5);
		bundleRotShift = pow(pow(currentPose.pose.orientation.w-pnpPose.pose.orientation.w, 2.0) + pow(currentPose.pose.orientation.x-pnpPose.pose.orientation.x, 2.0) + pow(currentPose.pose.orientation.y-pnpPose.pose.orientation.y, 2.0) + pow(currentPose.pose.orientation.z-pnpPose.pose.orientation.z, 2.0), 0.5);
		
		main_mutex.lock();
		filterNearPoints(featureTrackVector, currentPose.pose.position.x, currentPose.pose.position.y, currentPose.pose.position.z);
		main_mutex.unlock();
		
		
		
		//odometryBundleAdjustment(configData.cameraData, featureTrackVector, keyframePoses, storedPosesCount, configData.adjustmentIterations, configData.debugSBA);

	}
	
	if (configData.verboseMode) { 
		if (predictiveError > configData.maxAllowableError) {
			ROS_INFO("predictiveError = ( PNP-FAIL )"); 
		} else if (predictiveError == -1.0) {
			ROS_INFO("predictiveError = ( SBA-FAIL )");

		} else if (predictiveError == configData.maxAllowableError) {
			ROS_INFO("predictiveError = ( PNP-ONLY )"); 
		} else {
			ROS_INFO("predictiveError = ( %8.5f )", predictiveError); 
		}	
			

	}
	
	if ((predictiveError < configData.maxAllowableError) && (predictiveError > -1.0)) {
		baAverage *= double(baSuccesses);
		dsAverage *= double(baSuccesses);
		baAverage += predictiveError;
		dsAverage += pow(pow(currentPose.pose.position.x-savedPose.pose.position.x, 2.0)+pow(currentPose.pose.position.y-savedPose.pose.position.y, 2.0)+pow(currentPose.pose.position.z-savedPose.pose.position.z, 2.0),0.5);
		baSuccesses++;
		baAverage /= double(baSuccesses);
		dsAverage /= double(baSuccesses);
	}

	if ( ( (predictiveError > 0.0) && (predictiveError <= configData.maxAllowableError) ) ) { //   || (configData.adjustmentIterations == 0)
		
		return true;
		
	} else {
		return false;
	}
	
	
	
	

}

void videoslamNode::publishPoints(ros::Time stamp, unsigned int seq) {
//void videoslamNode::publishPoints(const geometry_msgs::PoseStamped& pose_msg) {
	vector<cv::Point3d> testPoints3d;
	
	if (0) { ROS_INFO("About to try and extract 3D points.."); }
	
	//main_mutex.lock();
	getPoints3dFromTracks(featureTrackVector, testPoints3d);
	//main_mutex.unlock();
	
	//transformPoints(testPoints3d, 4);
	//transformPoints(testPoints3d, configData.transformationCode);
	
	
	cv::Point3d midPoint(0.0, 0.0, 0.0);
	for (unsigned int iii = 0; iii < testPoints3d.size(); iii++) {
		//pointsToPublish.push_back(pcl::PointXYZ(testPoints3d.at(iii).x, testPoints3d.at(iii).y, testPoints3d.at(iii).z));
		
		/*
		testPoints3d.at(iii).x = testPoints3d.at(iii).y;
		testPoints3d.at(iii).y = 3.0 - testPoints3d.at(iii).x;
		testPoints3d.at(iii).z = -testPoints3d.at(iii).z;
		*/
		
		// ROS_INFO("Point(%d) = (%f, %f, %f)", iii, testPoints3d.at(iii).x,testPoints3d.at(iii).y, testPoints3d.at(iii).z);
		
		double x, y, z;
		x = testPoints3d.at(iii).x;
		y = testPoints3d.at(iii).y;
		z = testPoints3d.at(iii).z;
		
		midPoint.x += x / double(testPoints3d.size());
		midPoint.y += y / double(testPoints3d.size());
		midPoint.z += z / double(testPoints3d.size());
		
	}
	
	if (0) { ROS_INFO("Cloud midPoint = (%f, %f, %f)", midPoint.x, midPoint.y, midPoint.z); }
	
	/*
	sys.tracks.clear();
	sys.nodes.clear();
	
	SysSBA sys_temp;
	addPointsToSBA(sys_temp, testPoints3d);
	*/

	
	
	
	
	


	pcl::PointCloud<pcl::PointXYZ> pointsToPublish;
	
	for (unsigned int iii = 0; iii < testPoints3d.size(); iii++) {
		pointsToPublish.push_back(pcl::PointXYZ(testPoints3d.at(iii).x, testPoints3d.at(iii).y, testPoints3d.at(iii).z));
	}
	
	//pointsToPublish.push_back(pcl::PointXYZ(configData.x, configData.y, configData.z));
	
	if (configData.verboseMode) { ROS_INFO("Publishing (%d) points", ((int)pointsToPublish.size())); }
	
	pcl::toROSMsg(pointsToPublish, pointCloud_message);
	
	pointCloud_message.header.frame_id = "/world";
	pointCloud_message.header.stamp = stamp; // pose_msg.header.stamp;
	pointCloud_message.header.seq = seq; // pose_msg.header.seq;
	
	//ROS_ERROR("pointCloud_message.size() = (%d, %d)", pointCloud_message.height, pointCloud_message.width);
	
	points_pub.publish(pointCloud_message);
}

void videoslamNode::handle_pose(const geometry_msgs::PoseStamped& pose_msg) {
	
	if (wantsToShutdown) return;
	
	//if (configData.verboseMode) { ROS_WARN("Handling mapper pose (%d) at (%f)", pose_msg.header.seq, pose_msg.header.stamp.toSec()); }
	
	if (configData.terminationTime != -1.0) {
		
		if (pose_msg.header.stamp.toSec() > configData.terminationTime) {
			
			if ( (pose_msg.header.stamp.toSec() < configData.restartTime) || (configData.restartTime == -1.0) ) {
				
				if (!hasTerminatedFeed && !configData.writePoses) {
					ROS_ERROR("Terminating feed: incoming poses timestamped after (%f)", configData.terminationTime);
				}
				
				hasTerminatedFeed = true;
				return;
				
			} else {
				
				if (hasTerminatedFeed && !configData.writePoses) {
					ROS_ERROR("Restarting feed: incoming poses timestamped after (%f)", configData.restartTime);
				}
				
				hasTerminatedFeed = false;
			}

			
		}
	}
	
	latestReceivedPoseProcessed = false;
	
	poseHistoryBuffer[poseHistoryCounter % MAX_HISTORY] = pose_msg;
	poseHistoryCounter++;
	
	if (updateLocalPoseEstimates()) {
		if (0) { ROS_INFO("Successfully updated pose estimates..?"); }
	}
	
	latestReceivedPoseProcessed = true;
	
	if (0) { ROS_WARN("frameProcessedCounter = (%d) vs frameHeaderHistoryCounter (%d)", frameProcessedCounter, frameHeaderHistoryCounter); }

}


void videoslamNode::integrateNewTrackMessage(const thermalvis::feature_tracksConstPtr& msg) {
	
	//ROS_WARN("Entered <%s>", __FUNCTION__);
	
	featureTrack blankTrack;
	
	unsigned int addedTracks = 0, addedProjections = 0;
	
	for (unsigned int iii = 0; iii < msg->projection_count; iii++) {
		
		//ROS_WARN("Searching for track index of (%d)..", msg->indices[iii]);
		int trackPos = findTrackPosition(featureTrackVector, msg->indices[iii]);
		
		//ROS_WARN("For projection (%d), found track (%d) at vector position (%d)", ((int)iii), ((int)msg->indices[iii]), trackPos);
		
		if (trackPos == -1) {
			
			// Put the track in, but at the correct spot..
			unsigned int jjj = 0;
			
			if (featureTrackVector.size() > 0) {
				while (featureTrackVector.at(jjj).trackIndex < msg->indices[iii]) {
					jjj++;
					
					if (jjj >= featureTrackVector.size()) {
						break;
					}
					
				}
			}			
			
			blankTrack.trackIndex = msg->indices[iii];
			featureTrackVector.insert(featureTrackVector.begin()+jjj, blankTrack);
			addedTracks++;
			
			trackPos = jjj;
			
		}
		
		//ROS_INFO("Got here - position now (%d).", trackPos);
		
		// Now you have the track in place for this projection...
		bool alreadyAdded = false;
		for (unsigned int jjj = 0; jjj < featureTrackVector.at(trackPos).locations.size(); jjj++) {
								
			if (featureTrackVector.at(trackPos).locations.at(jjj).imageIndex == ((int) msg->cameras.at(iii))) {
				alreadyAdded = true;
				//ROS_INFO("Point already added...");
				break;
			}		

		}
		
		if (!alreadyAdded) {
			cv::Point2f proj(((float) msg->projections_x.at(iii)), ((float) msg->projections_y.at(iii)));		
			indexedFeature newFeature(msg->cameras.at(iii), proj);
			
			//ROS_INFO("Adding point...");
			featureTrackVector.at(trackPos).addFeature(newFeature);
			addedProjections++;
			
			
		}
		
	}
	
	if (0) { ROS_INFO("Integrating (%d), Added (%d) new tracks and (%d) new projections", msg->header.seq, addedTracks, addedProjections); }
	
	//checkConnectivity(msg->header.seq);
	
	if (configData.debugMode) {
		cv::Mat trackMatrix;
		
		if (createTrackMatrix(featureTrackVector, trackMatrix)) {
			cv::imshow("trackMatrix", trackMatrix);
			cv::waitKey(1);
		}		
	}
	
}

void videoslamNode::handle_tracks(const thermalvis::feature_tracksConstPtr& msg) {
	
	if ((msg->header.seq % 4) != 0) {
		//return;
	}
	
	
	if (wantsToShutdown) return;
	
	if ((configData.evaluateParameters > 0) && (msg->header.seq > configData.evaluateParameters)) {
		// print summary
		ROS_ERROR("Reached evaluation frame (%d/%d), shutting down...", msg->header.seq, configData.evaluateParameters);
		ROS_WARN("Summary(1): (%d, %d, %d, %d)", framesArrived, framesProcessed, pnpSuccesses, baSuccesses);
		ROS_WARN("Summary(2): (%f, %f, %f, %f)", double(pnpSuccesses)/double(framesProcessed), double(baSuccesses)/double(framesProcessed), baAverage, dsAverage);
		
		wantsToShutdown = true;
		mySigintHandler(1);
		
		return;
	}
	
	if (!infoProcessed) {
		return;
	}
	
	framesArrived++;
	
	
	
	if (msg->indices.size() == 0) {
		ROS_WARN("No tracks in message.");
		return;
	}
	
	latestTracksTime = msg->header.stamp.toSec();
	
	
	if (configData.verboseMode) { ROS_WARN("Handling new tracks seq (%d) at (%f)", msg->header.seq, latestTracksTime); }
	
	frameHeaderHistoryBuffer[frameHeaderHistoryCounter % MAX_HISTORY] = msg->header;
	frameHeaderHistoryCounter++;
        	
	main_mutex.lock();
	integrateNewTrackMessage(msg);
	main_mutex.unlock();
	
	//latestHandledTracks = msg->header.seq;
	
	main_mutex.lock();
	//ROS_INFO("[pre-trimming] featureTrackVector.size() = (%d)", featureTrackVector.size());
	if (configData.trimFeatureTracks) trimFeatureTrackVector(); 
	//ROS_INFO("[post-trimming] featureTrackVector.size() = (%d)", featureTrackVector.size());
	main_mutex.unlock();
	
	if (determinePose()) {
		
		
		
		publishPose();
		
		if (configData.publishPoints) {	publishPoints(currentPose.header.stamp, currentPose.header.seq); }
		
		double elapsed = latestTracksTime - poseHistoryBuffer[(poseHistoryCounter-1) % MAX_HISTORY].header.stamp.toSec();
		
		//ROS_INFO("elapsed = (%f) vs (%f) & (%f)", elapsed, latestTracksTime, poseHistoryBuffer[(poseHistoryCounter-1) % MAX_HISTORY].header.stamp.toSec());
		
		if (elapsed > configData.maxPoseDelay) {

			if (configData.verboseMode) { ROS_INFO("Considering video-based pose estimate as a keyframe..."); }
			
			
			main_mutex.lock();
			bool updated = updateKeyframePoses(currentPose, false);
			lastTestedFrame = currentPose.header.seq;
			if (configData.publishKeyframes) { drawKeyframes(camera_pub, keyframePoses, storedPosesCount); }
			main_mutex.unlock();
			
			if (updated) {
			
				main_mutex.lock();
				if (configData.clearTriangulations) {
					for (unsigned int iii = 0; iii < featureTrackVector.size(); iii++) {
						featureTrackVector.at(iii).isTriangulated = false;
					}
				}
				triangulatePoints();
				main_mutex.unlock();
				
			}
			
		}
		
		
	}
	
	
	
}

void videoslamNode::publishPose() {
	
	thermalvis::pose_confidence confidence_msg;
	confidence_msg.source = configData.flowSource;
	confidence_msg.header = frameHeaderHistoryBuffer[(frameHeaderHistoryCounter-1) % MAX_HISTORY];
	
	pose_pub.publish(currentPose);
	if (configData.verboseMode) { ROS_INFO("Publishing currentPose (%d) of (%f, %f, %f)", currentPose.header.seq, currentPose.pose.position.x, currentPose.pose.position.y, currentPose.pose.position.z); }
	
	confidence_msg.metric_count = 7;
	
	// First will publish the PnP convergence error
	
	// Second will publish the SBA convergence error (if any)
	if (predictiveError == configData.maxAllowableError) {
		confidence_msg.scores.push_back(-1.0);
		//confidence_msg.data = 0.0;	// This means SBA failed, so PnP estimate only
	} else {
		confidence_msg.scores.push_back(predictiveError);
		//confidence_msg.data = 1.0 - min(predictiveError/3.0,1.0);
	}
	
	confidence_msg.scores.push_back(bundleTransShift);
	confidence_msg.scores.push_back(bundleRotShift);
	confidence_msg.scores.push_back(float(usedTriangulations)); // 4th - not very useful...
	confidence_msg.scores.push_back(pointShift);
	confidence_msg.scores.push_back(pnpError);
	confidence_msg.scores.push_back(pnpInlierProp);
	
	//confidence_msg.header = currentPose.header;
	confidence_pub.publish(confidence_msg);
	
	if (configData.writePoses) { 
		
		printf("%f %d %8.5f %8.5f %8.5f %8.5f %8.5f %8.5f %8.5f", currentPose.header.stamp.toSec(), currentPose.header.seq, currentPose.pose.position.x, currentPose.pose.position.y, currentPose.pose.position.z, currentPose.pose.orientation.w, currentPose.pose.orientation.x, currentPose.pose.orientation.y, currentPose.pose.orientation.z); 
		
		for (unsigned int iii = 0; iii < confidence_msg.metric_count; iii++) {
			printf(" %8.5f", confidence_msg.scores.at(iii));
		}
		
		printf("\n");
		
		}
	
}

void videoslamNode::main_loop(const ros::TimerEvent& event) {
	
	// main_mutex.lock();
	// main_mutex.unlock();
	
	// ROS_INFO("Entered main loop...");
	
	// addFixedCamera(display_sys, configData.cameraData, eye4);
			
	// pose_pub.publish(currentPose);
	
}

void videoslamNode::handle_info(const sensor_msgs::CameraInfoConstPtr& info_msg) {
	
	if (wantsToShutdown) return;
	
	if (!infoProcessed) {
		
		ROS_INFO("Handling camera info...");
		
		try	{
			
			configData.cameraData.K = cv::Mat::eye(3, 3, CV_64FC1);
			
			for (unsigned int mmm = 0; mmm < 3; mmm++) {
				for (unsigned int nnn = 0; nnn < 3; nnn++) {
					configData.cameraData.K.at<double>(mmm, nnn) = info_msg->K[3*mmm + nnn];
				}
			}
			
			cout << configData.cameraData.K << endl;
			
			configData.cameraData.K_inv = configData.cameraData.K.inv();
			
			configData.cameraData.cameraSize.width = info_msg->width;
			configData.cameraData.cameraSize.height = info_msg->height;
		
			unsigned int maxDistortionIndex;
			
			if (info_msg->distortion_model == "rational_polynomial") {
				maxDistortionIndex = 8;
			} else { /*if (info_msg->distortion_model == "plumb_bob") {*/
				maxDistortionIndex = 5;
			}
			
			configData.cameraData.distCoeffs = cv::Mat::zeros(1, maxDistortionIndex, CV_64FC1);
			configData.cameraData.blankCoeffs = cv::Mat::zeros(1, maxDistortionIndex, CV_64FC1);
			
			for (unsigned int iii = 0; iii < maxDistortionIndex; iii++) {
				configData.cameraData.distCoeffs.at<double>(0, iii) = info_msg->D[iii];
			}
			
			cout << "Distortion: " << configData.cameraData.distCoeffs << endl;
			
			configData.cameraData.newCamMat = cv::Mat::zeros(3, 3, CV_64FC1);
			
			cv::Rect* validPixROI = 0;
			
			double alpha = 0.00;
			bool centerPrincipalPoint = true;
			
			configData.cameraData.newCamMat = getOptimalNewCameraMatrix(configData.cameraData.K, configData.cameraData.distCoeffs, configData.cameraData.cameraSize, alpha, configData.cameraData.cameraSize, validPixROI, centerPrincipalPoint);
			
			cout << configData.cameraData.newCamMat << endl;
			
			infoProcessed = true;
			
		} catch (...) /*(sensor_msgs::CvBridgeException& e)*/ {
			ROS_ERROR("Some failure in reading in the camera parameters...");
		}
		
		ROS_INFO("Camera information processed.");
		
	} 
	
}

void videoslamNode::serverCallback(thermalvis::videoslamConfig &config, uint32_t level) {
    
    configData.cameraLatency = config.cameraLatency;
    configData.adjustmentFrames = config.adjustmentFrames;
    configData.debugMode = config.debugMode;
    configData.pairsForTriangulation = config.pairsForTriangulation;
    configData.dataTimeout = config.dataTimeout;
    configData.adjustmentIterations = config.adjustmentIterations;
    configData.verboseMode = config.verboseMode;
    configData.maxDistance = config.maxDistance;
    configData.minSeparation = config.minSeparation;
    configData.maxSeparation = config.maxSeparation;
    configData.maxStandardDev = config.maxStandardDev;
    configData.debugSBA = config.debugSBA;
    configData.debugTriangulation = config.debugTriangulation;
    configData.trimFeatureTracks = config.trimFeatureTracks;
    configData.baMode = config.baMode;
    configData.baStep = config.baStep;
    configData.maxAllowableError = config.maxAllowableError;
    configData.pnpIterations = config.pnpIterations;
    configData.inliersPercentage = config.inliersPercentage;
    
    configData.maxReprojectionDisparity = config.maxReprojectionDisparity;
    
	
}

videoslamNode::videoslamNode(ros::NodeHandle& nh, videoslamData startupData) {
	
	ROS_INFO("X..");
	
	configData = startupData;
	
	
	
	if (configData.verboseMode) { ROS_INFO("Initializing node.."); }
	
	sprintf(nodeName, "%s", ros::this_node::getName().c_str());
	
	if (configData.writePoses) {
		// http://stackoverflow.com/questions/8478851/suppressing-cout-output-with-in-a-function
		ROS_WARN("writePoses == true, therefore suppressing cout messages from SBA libraries..");
		lStream.open( "garbage.txt" );
		lBufferOld = std::cout.rdbuf();
	}
	
	
	framesArrived = 0;
	framesProcessed = 0;
	pnpSuccesses = 0;
	baSuccesses = 0;
	baAverage = 0.0;
	dsAverage = 0.0;
	
	hasTerminatedFeed = false;
	
	latestReceivedPoseProcessed = false;
	
	currentPose.pose.position.x = 9e99;
	
	extrinsicCalib_R = cv::Mat::eye(3,3,CV_64FC1);
	extrinsicCalib_T = cv::Mat::zeros(3,1,CV_64FC1);
	
	if (configData.extrinsicsFile != "extrinsicsFile") {
		
		if (configData.verboseMode) { ROS_INFO("Reading extrinsics file (%s)...", configData.extrinsicsFile.c_str()); }
		
		try {
			cv::FileStorage fs(configData.extrinsicsFile, cv::FileStorage::READ);
			fs["R1"] >> extrinsicCalib_R;
			fs["T1"] >> extrinsicCalib_T;
			fs.release();

			ROS_INFO("Extrinsics data read.");

			if (extrinsicCalib_R.empty()){
				ROS_ERROR("Extrinsics file (%s) invalid! Please check path and filecontent...", configData.extrinsicsFile.c_str());
				wantsToShutdown = true;
			}

		} catch (int e) {
			ROS_ERROR("Some failure in reading in the extrinsics (%s).", configData.extrinsicsFile.c_str());
			wantsToShutdown = true;
		}
	} else {
		if (configData.verboseMode) { ROS_INFO("No extrinsics provided."); }
	}
	
	//matrixToQuaternion(extrinsicCalib_R, extrinsicCalib_quat);
	composeTransform(extrinsicCalib_R, extrinsicCalib_T, extrinsicCalib_P);
	
	if (configData.verboseMode) { ROS_INFO("Transforms complete."); }
	
	// To Apply
	// actualThermal_quat = extrinsicCalib_quat * virtualDepth_quat;
	// actualThermal_P = virtualDepth_P * extrinsicCalib_P;
	
	frameProcessedCounter = 0;
	frameHeaderHistoryCounter = 0;
	poseHistoryCounter = 0;
	
	//latestHandledTracks = -1;
	
	decimation = DEFAULT_DRAWGRAPH_DECIMATION;
	bicolor = DEFAULT_DRAWGRAPH_BICOLOR;
	
	distanceTravelled = 0.0;
	
	storedPosesCount = 0;
	
	sys.verbose = 0;
		
	eye4 = cv::Mat::eye(4, 4, CV_64FC1);
	
	lastTestedFrame = -1;
	
	if (configData.verboseMode) { ROS_INFO("Initializing topics.."); }
	
	/*
	char path_pub_name[256];
	sprintf(path_pub_name, "/thermalvis%s/path", nodeName);
	path_pub = nh.advertise<visualization_msgs::Marker>( path_pub_name, 1 );
	*/
	
	char camera_pub_name[256];
	sprintf(camera_pub_name, "/thermalvis%s/cameras", nodeName);
	camera_pub = nh.advertise<visualization_msgs::Marker>( camera_pub_name, 1 );
	
	char points_pub_name[256];
	sprintf(points_pub_name, "/thermalvis%s/points", nodeName);
	points_pub = nh.advertise<sensor_msgs::PointCloud2>(points_pub_name, 1);
	
	char confidence_pub_name[256];
	sprintf(confidence_pub_name, "/thermalvis%s/confidence", nodeName);
	
	//confidence_pub  = nh.advertise<std_msgs::Float32>(confidence_pub_name, 1);
	
	ROS_INFO("Publishing confidence data at (%s)", confidence_pub_name);
	ros::AdvertiseOptions op = ros::AdvertiseOptions::create<thermalvis::pose_confidence>(confidence_pub_name, 1, &connected, &disconnected, ros::VoidPtr(), NULL);
	op.has_header = false;
	confidence_pub = nh.advertise(op);
	
	publishPoints(ros::Time::now(), 0);
	
	ROS_INFO("Setting up node.");
	
	infoProcessed = false;
	
	std::string topic_tracks = configData.flowSource + "tracks";
	ROS_INFO("Connecting to tracks topic. %s", topic_tracks.c_str());
	tracks_sub = nh.subscribe<thermalvis::feature_tracks>(topic_tracks, 1, &videoslamNode::handle_tracks, this);
	
	std::string topic_info = configData.flowSource + "camera_info";
	ROS_INFO("Connecting to camera_info. %s", topic_info.c_str());
	info_sub = nh.subscribe<sensor_msgs::CameraInfo>(topic_info, 1, &videoslamNode::handle_info, this);
	
	std::string topic_pose = configData.mapperSource + "pose";
	ROS_INFO("Connecting to pose topic. %s", topic_pose.c_str());
	pose_sub = nh.subscribe(topic_pose, 1, &videoslamNode::handle_pose, this);
	
	ROS_INFO("Node setup.");
	
	sprintf(pose_pub_name, "/thermalvis%s/pose", nodeName);
	ROS_INFO("Configuring pose topic. %s", pose_pub_name);
	
	currentPose.header.frame_id = "/world"; //pose_pub_name;
	
	//pose_pub = nh.advertise<geometry_msgs::PoseStamped>(pose_pub_name, 1);
	//pose_pub = nh.advertise(op);
	
	ros::AdvertiseOptions op1 = ros::AdvertiseOptions::create<geometry_msgs::PoseStamped>(pose_pub_name, 1, &connected, &disconnected, ros::VoidPtr(), NULL);
	op1.has_header = false;
	pose_pub = nh.advertise(op1);
	
	timer = nh.createTimer(ros::Duration(0.05), &videoslamNode::main_loop, this);
	
	ROS_INFO("Establishing server callback...");
	f = boost::bind (&videoslamNode::serverCallback, this, _1, _2);
    server.setCallback (f);
	
}
