/*! \file	depth.cpp
 *  \brief	Definitions for the DEPTH node.
*/

#include "depth.hpp"

int main(int argc, char** argv) {
	
	printf("%s::%s << Node launched.\n", __PROGRAM__, __FUNCTION__);
	
	ros::init(argc, argv, "depth");
	
	ros::NodeHandle private_node_handle("~");
	
	stereoData startupData;
	
	startupData.read_addr = argv[0];
	
	
	
	printf("%s::%s << startupData.read_addr = %s\n", __PROGRAM__, __FUNCTION__, startupData.read_addr.c_str());
		
	bool inputIsValid = startupData.obtainStartingData(private_node_handle);
	
	if (!inputIsValid) {
		printf("%s << Configuration invalid.\n", __FUNCTION__);
	}
		
	printf("%s::%s << Startup data processed.\n", __PROGRAM__, __FUNCTION__);
	
	//ros::Rate loop_rate(25);
	
	
	ros::NodeHandle nh;
	
	stereoDepthNode stereo_node(nh, startupData);
	
	printf("%s::%s << Node configured.\n", __PROGRAM__, __FUNCTION__);
	
	ros::spin();
	return 0;
	
}

stereoDepthNode::stereoDepthNode(ros::NodeHandle& nh, stereoData startupData) {
	
	alphaChanged = true;
	
	configData = startupData;
	
	
	printf("%s << Initializing 'stereoDepthNode'...\n", __FUNCTION__);
	
	// look for "left" and "right" cameras in video_stream
	
	std::string topic_info_1 = nh.resolveName(configData.video_stream + "left/camera_info");
	std::string topic_info_2 = nh.resolveName(configData.video_stream + "right/camera_info");
	
	std::string topic_1 = nh.resolveName(configData.video_stream + "left/image_raw");
	std::string topic_2 = nh.resolveName(configData.video_stream + "right/image_raw");
	
	printf("%s << topic_1 = %s; topic_2 = %s\n", __FUNCTION__, topic_1.c_str(), topic_2.c_str());
	
	image_transport::ImageTransport it(nh);
	camera_sub_1 = it.subscribeCamera(topic_1, 1, &stereoDepthNode::handle_image_1, this);
	camera_sub_2 = it.subscribeCamera(topic_2, 1, &stereoDepthNode::handle_image_2, this);
	
	timer = nh.createTimer(ros::Duration(0.01), &stereoDepthNode::timed_loop, this);
	
	infoProcessed_1 = false;
	infoProcessed_2 = false;
	
	frameCount_1 = 0;
	frameCount_2 = 0;
	frameCount = 0;
	
	firstPair = true;
	
	firstImProcessed_1 = false;
	firstImProcessed_2 = false;
	firstPairProcessed = false;
	
	// Set up stereo
	
	enum { STEREO_BM=0, STEREO_SGBM=1, STEREO_HH=2 };
	int alg = STEREO_HH; //STEREO_SGBM;
		
	SADWindowSize = 5;
	numberOfDisparities = 0;
	sgbm.preFilterCap = 31; //63;
    sgbm.SADWindowSize = SADWindowSize > 0 ? SADWindowSize : 3;
	
	
    int cn = 1; //img1.channels();

	unsigned int width = 640;
    numberOfDisparities = 128;
    numberOfDisparities = numberOfDisparities > 0 ? numberOfDisparities : width/8;
    
    bm.state->roi1 = roi1;
    bm.state->roi2 = roi2;
    bm.state->preFilterSize = 41;
    bm.state->preFilterCap = 31;
    bm.state->SADWindowSize = SADWindowSize > 0 ? SADWindowSize : 41;
    bm.state->minDisparity = -64;
    bm.state->numberOfDisparities = numberOfDisparities;
    bm.state->textureThreshold = 10;
    bm.state->uniquenessRatio = 15;
    bm.state->speckleWindowSize = 100;
    bm.state->speckleRange = 32;
    bm.state->disp12MaxDiff = 1;
    
    sgbm.P1 = 32*cn*sgbm.SADWindowSize*sgbm.SADWindowSize;
    sgbm.P2 = 128*cn*sgbm.SADWindowSize*sgbm.SADWindowSize;
    sgbm.minDisparity = 0;
    sgbm.numberOfDisparities = numberOfDisparities;
    sgbm.uniquenessRatio = 10;
    sgbm.speckleWindowSize = bm.state->speckleWindowSize;
    sgbm.speckleRange = bm.state->speckleRange;
    sgbm.disp12MaxDiff = 1;
    sgbm.fullDP = alg == STEREO_HH;
    
    
    disparityPublisher = nh.advertise<stereo_msgs::DisparityImage>( "thermalvis/disparity", 1);
    
    printf("%s << read_addr = %s\n", __FUNCTION__, configData.read_addr.c_str());
    printf("%s << extrinsics = %s\n", __FUNCTION__, configData.extrinsics.c_str());

	if ((configData.extrinsics.at(0) == '.') && (configData.extrinsics.at(1) == '.')) {
		configData.read_addr.replace(configData.read_addr.end()-5, configData.read_addr.end(), configData.extrinsics);
		//configData.read_addr = configData.read_addr + "/" + configData.extrinsics;
	} else {
		configData.read_addr = configData.extrinsics;
	}
	
	printf("%s << read_addr = %s\n", __FUNCTION__, configData.read_addr.c_str());
	
	FileStorage fs(configData.read_addr, FileStorage::READ);
	fs["R_0"] >> R0;
	fs["R_1"] >> R1;
	fs["P_0"] >> P0;
	fs["P_1"] >> P1;
	fs["F"] >> F;
	fs["Q1"] >> Q1;
	fs["Q2"] >> Q2;
	fs["T1"] >> t;
	fs.release();
	
	cout << "R0 = " << R0 << endl;
	cout << "R1 = " << R1 << endl;
	cout << "t = " << t << endl;
	// cout << "P0 = " << P0 << endl;
	// cout << "P1 = " << P1 << endl;
	
	sprintf(cam_pub_name_1, "thermalvis/left/image_rect_color");
	sprintf(cam_pub_name_2, "thermalvis/right/image_rect_color");
	
	cam_pub_1 = it.advertiseCamera(cam_pub_name_1, 1);
	cam_pub_2 = it.advertiseCamera(cam_pub_name_2, 1);
	
	ROS_INFO("Establishing server callback...");
	f = boost::bind (&stereoDepthNode::serverCallback, this, _1, _2);
    server.setCallback (f);

}

bool stereoData::obtainStartingData(ros::NodeHandle& nh) {
	
	nh.param<std::string>("video_stream", video_stream, "video_stream");
	
	if (video_stream != "video_stream") {
		printf("%s << Video stream (%s) selected.\n", __FUNCTION__, video_stream.c_str());
	} else {
		printf("%s << ERROR! No video stream specified.\n", __FUNCTION__);
		return false;
	}
	
	nh.param<std::string>("extrinsics", extrinsics, "extrinsics");
	
	printf("%s << Extrinsics at %s selected.\n", __FUNCTION__, extrinsics.c_str());
	
	
	nh.param<bool>("debug_mode", debugMode, false);
	
	if (debugMode) {
		printf("%s << Running in DEBUG mode.\n", __FUNCTION__);
	} else {
		printf("%s << Running in OPTIMIZED mode.\n", __FUNCTION__);
	}
	
	nh.param<bool>("autoAlpha", autoAlpha, false);
	
	nh.param<double>("alpha", alpha, 0.0);
	
	maxTimeDiff = 0.02;

	
	return true;
}

void stereoDepthNode::handle_image_1(const sensor_msgs::ImageConstPtr& msg_ptr, const sensor_msgs::CameraInfoConstPtr& info_msg) {
	
	ROS_INFO("Entered (handle_image_1)...");
	
	while (!infoProcessed_1) {
		
		printf("%s::%s << Trying to process info (1) ... \n", __PROGRAM__, __FUNCTION__);
		
		try	{
			// Read in camera matrix
			configData.cameraData1.K = Mat::eye(3, 3, CV_64FC1);
			
			for (unsigned int mmm = 0; mmm < 3; mmm++) {
				for (unsigned int nnn = 0; nnn < 3; nnn++) {
					configData.cameraData1.K.at<double>(mmm, nnn) = info_msg->K[3*mmm + nnn];
				}
			}
			
			cout << configData.cameraData1.K << endl;
			
			configData.cameraData1.cameraSize.width = info_msg->width;
			configData.cameraData1.cameraSize.height = info_msg->height;
			
						
			printf("%s << (%d, %d)\n", __FUNCTION__, configData.cameraData1.cameraSize.width, configData.cameraData1.cameraSize.height);
			
			unsigned int maxDistortionIndex;
			if (info_msg->distortion_model == "rational_polynomial") {
				maxDistortionIndex = 8;
			} else {
				maxDistortionIndex = 5;
				if (info_msg->distortion_model != "plumb_bob") {
					ROS_WARN("Unrecognized distortion model (%s)", info_msg->distortion_model.c_str());
				}
			}
			
			configData.cameraData1.distCoeffs = Mat::zeros(1, maxDistortionIndex, CV_64FC1);
			
			for (unsigned int iii = 0; iii < maxDistortionIndex; iii++) {
				configData.cameraData1.distCoeffs.at<double>(0, iii) = info_msg->D[iii];
			}
			
			cout << configData.cameraData1.distCoeffs << endl;
			
			configData.cameraData1.newCamMat = Mat::zeros(3, 3, CV_64FC1);
					
			
			// bool centerPrincipalPoint = false;
			
			
			
			configData.cameraData1.P = Mat::zeros(3, 4, CV_64FC1);
			
			for (unsigned int mmm = 0; mmm < 3; mmm++) {
				for (unsigned int nnn = 0; nnn < 4; nnn++) {
					configData.cameraData1.P.at<double>(mmm, nnn) = info_msg->P[4*mmm + nnn];
					//cout << "P[" << (3*mmm + nnn) << "] = " << info_msg->P[3*mmm + nnn] << endl;
				}
			}
			
			cout << "P1 = " << configData.cameraData1.P << endl;
			
			/*
			struct cameraParameters {
				Mat cameraMatrix;
				Mat distCoeffs;
				Mat blankCoeffs;
				Mat newCamMat;
				Mat imageSize;
				Size cameraSize;
				
				Mat K, K_inv;
				
				bool getCameraParameters(string intrinsics);
				
			};
			*/
		
			//ROS_INFO("Initializing map (1)...");
			
			cout << "R0 = " << R0 << endl;
			
			//configData.cameraData1.newCamMat = getOptimalNewCameraMatrix(configData.cameraData1.K, configData.cameraData1.distCoeffs, configData.cameraData1.cameraSize, configData.alpha, configData.cameraData1.cameraSize, &roi1, centerPrincipalPoint);
			
			//cout << configData.cameraData1.newCamMat << endl;
			//initUndistortRectifyMap(configData.cameraData1.K, configData.cameraData1.distCoeffs, R0, configData.cameraData1.newCamMat, configData.cameraData1.cameraSize, CV_32FC1, map11, map12);
			
			//ROS_INFO("Map (1) initialized.");
			
			//assignDebugCameraInfo();
			
			//printf("%s::%s << Debug data assigned.\n", __PROGRAM__, __FUNCTION__);
			
			/*
			msg_debug.width = configData.cameraData1.cameraSize.width; 
			msg_debug.height = configData.cameraData1.cameraSize.height;
			msg_debug.encoding = "bgr8";
			msg_debug.is_bigendian = false;
			msg_debug.step = configData.cameraData1.cameraSize.width*3;
			msg_debug.data.resize(configData.cameraData1.cameraSize.width*configData.cameraData1.cameraSize.height*3);
			*/
			
			//minVal = configData.minLimit;
			//maxVal = configData.maxLimit;
			
			infoProcessed_1 = true;
			
		} catch (...) { // (sensor_msgs::CvBridgeException& e) {
			ROS_ERROR("Some failure in reading in the camera parameters...");
		} 
		
	}
	
	// ROS_WARN("Camera (1) : %f", info_msg->header.stamp.toSec());
	
	cam_info_1 = *info_msg;
	
	// Update P matrix:
	
	for (unsigned int mmm = 0; mmm < 3; mmm++) {
		for (unsigned int nnn = 0; nnn < 4; nnn++) {
			//info_msg->P[4*mmm + nnn] = P0.at<double>(mmm, nnn); 
		}
	}
	
	if (infoProcessed_1) {

		cv_ptr_1 = cv_bridge::toCvCopy(msg_ptr, enc::BGR8);					// For some reason it reads as BGR, not gray

		//printf("%s << Processing camera 1 image...\n", __FUNCTION__);
		
		Mat newImage(cv_ptr_1->image);
		
		Mat image16, imageDown, greyImX, undistIm;
		cvtColor(newImage, greyImX, CV_RGB2GRAY);
		
		
		//unsigned int lastFrameCount = frameCount_1;
		
		
		if (!matricesAreEqual(greyImX, lastImage_1)) {
			
			//printf("%s::%s << New image (%d)\n", __PROGRAM__, __FUNCTION__, frameCount_1);
			
			elapsedTime = timeElapsedMS(cycle_timer);
			
			greyImX.copyTo(grayImageBuffer_1[frameCount_1 % MAXIMUM_FRAMES_TO_STORE]);

			if (configData.debugMode) {
				
				//imshow("image_1", undistIm);
				//waitKey(1);
				
			}
			
			lastImage_1.copyTo(olderImage_1);
			greyImX.copyTo(lastImage_1);
			
			time_buffer_1[frameCount_1 % MAXIMUM_FRAMES_TO_STORE] = info_msg->header.stamp;
			frameCount_1++;
			
			if (_access.try_lock()) {
				updatePairs();
				_access.unlock();
			}
			
			if (!firstImProcessed_1) {
				firstImProcessed_1 = true;
			}
			
			if (firstImProcessed_1 && firstImProcessed_2) {
				firstPairProcessed = true;
			}

		} else {
			//printf("%s::%s << Matrices are equal.\n", __PROGRAM__, __FUNCTION__);
			elapsedTime = timeElapsedMS(cycle_timer, false);
		}
	}

}

void stereoDepthNode::handle_image_2(const sensor_msgs::ImageConstPtr& msg_ptr, const sensor_msgs::CameraInfoConstPtr& info_msg) {
			
	while (!infoProcessed_2) {
		
		printf("%s::%s << Trying to process info (1) ... \n", __PROGRAM__, __FUNCTION__);
		
		try	{
			// Read in camera matrix
			configData.cameraData2.K = Mat::eye(3, 3, CV_64FC1);
			
			for (unsigned int mmm = 0; mmm < 3; mmm++) {
				for (unsigned int nnn = 0; nnn < 3; nnn++) {
					configData.cameraData2.K.at<double>(mmm, nnn) = info_msg->K[3*mmm + nnn];
				}
			}
			
			cout << configData.cameraData2.K << endl;
			
			configData.cameraData2.cameraSize.width = info_msg->width;
			configData.cameraData2.cameraSize.height = info_msg->height;
			
						
			printf("%s << (%d, %d)\n", __FUNCTION__, configData.cameraData2.cameraSize.width, configData.cameraData2.cameraSize.height);
			
			unsigned int maxDistortionIndex;
			if (info_msg->distortion_model == "rational_polynomial") {
				maxDistortionIndex = 8;
			} else {
				maxDistortionIndex = 5;
				if (info_msg->distortion_model != "plumb_bob") {
					ROS_WARN("Do not recognize distortion model (%s)", info_msg->distortion_model.c_str());
				}
			}
			
			configData.cameraData2.distCoeffs = Mat::zeros(1, maxDistortionIndex, CV_64FC1);
			
			for (unsigned int iii = 0; iii < maxDistortionIndex; iii++) {
				configData.cameraData2.distCoeffs.at<double>(0, iii) = info_msg->D[iii];
			}
			
			cout << configData.cameraData2.distCoeffs << endl;
			
			configData.cameraData2.newCamMat = Mat::zeros(3, 3, CV_64FC1);
			
			//Rect* validPixROI = 0;
			
			//bool centerPrincipalPoint = true;
			
			
			
			configData.cameraData2.P = Mat::zeros(3, 4, CV_64FC1);
			
			for (unsigned int mmm = 0; mmm < 3; mmm++) {
				for (unsigned int nnn = 0; nnn < 4; nnn++) {
					configData.cameraData2.P.at<double>(mmm, nnn) = info_msg->P[4*mmm + nnn];
				}
			}
			
			cout << "P2 = " << configData.cameraData2.P << endl;
			
			/*
			struct cameraParameters {
				Mat cameraMatrix;
				Mat distCoeffs;
				Mat blankCoeffs;
				Mat newCamMat;
				Mat imageSize;
				Size cameraSize;
				
				Mat K, K_inv;
				
				bool getCameraParameters(string intrinsics);
				
			};
			*/
		
			//ROS_INFO("Initializing map (2)...");
			
			//configData.cameraData2.newCamMat = getOptimalNewCameraMatrix(configData.cameraData2.K, configData.cameraData2.distCoeffs, configData.cameraData2.cameraSize, configData.alpha, configData.cameraData2.cameraSize, &roi2, centerPrincipalPoint);
			
			//cout << configData.cameraData2.newCamMat << endl;
			
			//initUndistortRectifyMap(configData.cameraData2.K, configData.cameraData2.distCoeffs, R1, configData.cameraData2.newCamMat, configData.cameraData2.cameraSize, CV_32FC1, map21, map22);
			
			//ROS_INFO("Map (2) initialized.");
			
			//assignDebugCameraInfo();
			
			//printf("%s::%s << Debug data assigned.\n", __PROGRAM__, __FUNCTION__);
			
			/*
			msg_debug.width = configData.cameraData2.cameraSize.width; 
			msg_debug.height = configData.cameraData2.cameraSize.height;
			msg_debug.encoding = "bgr8";
			msg_debug.is_bigendian = false;
			msg_debug.step = configData.cameraData2.cameraSize.width*3;
			msg_debug.data.resize(configData.cameraData2.cameraSize.width*configData.cameraData2.cameraSize.height*3);
			*/
			
			//minVal = configData.minLimit;
			//maxVal = configData.maxLimit;
			
			infoProcessed_2 = true;
			
		} catch (...) { // (sensor_msgs::CvBridgeException& e) {
			ROS_ERROR("Some failure in reading in the camera parameters...");
		}
		
	}
	
	// ROS_WARN("Camera (2) : %f", info_msg->header.stamp.toSec());
	
	cam_info_2 = *info_msg;
	
	for (unsigned int mmm = 0; mmm < 3; mmm++) {
		for (unsigned int nnn = 0; nnn < 4; nnn++) {
			//info_msg->P[4*mmm + nnn] = P1.at<double>(mmm, nnn); 
		}
	}
	
	if (infoProcessed_2) {

		cv_ptr_2 = cv_bridge::toCvCopy(msg_ptr, enc::BGR8);					// For some reason it reads as BGR, not gray

		//printf("%s << Processing camera 2 image...\n", __FUNCTION__);

		Mat newImage(cv_ptr_2->image);
		
		Mat image16, imageDown, greyImX, undistIm;
		cvtColor(newImage, greyImX, CV_RGB2GRAY);
		
		//unsigned int lastFrameCount = frameCount_2;
		
		if (!matricesAreEqual(greyImX, lastImage_2)) {
			
			//printf("%s::%s << New image (%d)\n", __PROGRAM__, __FUNCTION__, frameCount_2);
			
			elapsedTime = timeElapsedMS(cycle_timer);
			
			greyImX.copyTo(grayImageBuffer_2[frameCount_2 % MAXIMUM_FRAMES_TO_STORE]);

			if (configData.debugMode) {
				
				//imshow("image_2", undistIm);
				//waitKey(1);
				
			}
			
			lastImage_2.copyTo(olderImage_2);
			greyImX.copyTo(lastImage_2);
			
			time_buffer_2[frameCount_2 % MAXIMUM_FRAMES_TO_STORE] = info_msg->header.stamp;
			frameCount_2++;
			
			if (_access.try_lock()) {
				updatePairs();
				_access.unlock();
			}
			
			if (!firstImProcessed_2) {
				firstImProcessed_2 = true;
			}
			
			if (firstImProcessed_1 && firstImProcessed_2) {
				firstPairProcessed = true;
			}

		} else {
			printf("%s::%s << Matrices are equal.\n", __PROGRAM__, __FUNCTION__);
			elapsedTime = timeElapsedMS(cycle_timer, false);
		}
		
	}

}

void stereoDepthNode::updatePairs() {
	
	//ROS_INFO("Entered (updatePairs)...");
	
	unsigned int maxCam1 = frameCount_1;
	unsigned int maxCam2 = frameCount_2;
	
	if ((checkIndex_1 >= maxCam1) && (checkIndex_2 >= maxCam2)) {
		return;
	}
	
	if ((maxCam1 == 0) || (maxCam2 == 0)) {
		return;
	}
	
	//ROS_INFO("Continuing (updatePairs)...");
	
	ros::Time time_1 = time_buffer_1[(maxCam1-1) % MAXIMUM_FRAMES_TO_STORE];
	ros::Time time_2 = time_buffer_2[(maxCam2-1) % MAXIMUM_FRAMES_TO_STORE];
	
	// ROS_INFO("maxCam1 = (%d); maxCam2 = (%d) - [%d.%d] [%d.%d]", maxCam1, maxCam2, time_1.sec, time_1.nsec, time_2.sec, time_2.nsec);
	
	while (checkIndex_1 < maxCam1) {
		
		//ROS_INFO("Looping (1)...");
		
		/*
		if  (duplicateFlags_1[checkIndex_1 % MAXIMUM_FRAMES_TO_STORE] == 1) {
			checkIndex_1++;
			continue;
		}
		*/
		
		double leastDiff = 9e99;
		int bestMatch = -1;
		
		bool candidatesExhausted = false;
	
		while ((timeDiff(time_buffer_2[checkIndex_2 % MAXIMUM_FRAMES_TO_STORE], time_buffer_1[checkIndex_1 % MAXIMUM_FRAMES_TO_STORE]) < configData.maxTimeDiff)) {
			
			//ROS_INFO("Looping (2)...");
			
			if (checkIndex_2 >= (maxCam2-1)) {
				break;
				
			}
			
			/*
			if  (duplicateFlags_2[checkIndex_2 % MAXIMUM_FRAMES_TO_STORE] == 1) {
				checkIndex_2++;
				continue;
			}
			*/
			
			double diff = abs(timeDiff(time_buffer_2[checkIndex_2 % MAXIMUM_FRAMES_TO_STORE], time_buffer_1[checkIndex_1 % MAXIMUM_FRAMES_TO_STORE]));
			
			if (diff < leastDiff) {
				leastDiff = diff;
				bestMatch = checkIndex_2;
			}
			
			checkIndex_2++;
		}
		
		if ((timeDiff(time_buffer_2[checkIndex_2 % MAXIMUM_FRAMES_TO_STORE], time_buffer_1[checkIndex_1 % MAXIMUM_FRAMES_TO_STORE]) >= configData.maxTimeDiff)) {
			candidatesExhausted = true;
		}
		
		//checkIndex_2 = bestMatch;
		
		if ((leastDiff < configData.maxTimeDiff) && (bestMatch >= 0)) {
			
			//ROS_WARN("Pushing back match (%f)  (%d) [%02d.%04d] and (%d) [%02d.%04d]...", leastDiff, checkIndex_1, times_1.at(checkIndex_1).sec, times_1.at(checkIndex_1).nsec, bestMatch, times_2.at(bestMatch).sec, times_2.at(bestMatch).nsec);
			
			ROS_WARN("Adding pair (%d) & (%d) (%f) [%d.%d] & [%d.%d]", checkIndex_1, bestMatch, leastDiff, time_buffer_1[checkIndex_1 % MAXIMUM_FRAMES_TO_STORE].sec, time_buffer_1[checkIndex_1 % MAXIMUM_FRAMES_TO_STORE].nsec, time_buffer_2[bestMatch % MAXIMUM_FRAMES_TO_STORE].sec, time_buffer_2[bestMatch % MAXIMUM_FRAMES_TO_STORE].nsec);
			
			validPairs[0].push_back(checkIndex_1);
			checkIndex_1++;
			validPairs[1].push_back(bestMatch);
			
		} else if (candidatesExhausted) { // (checkIndex_2 < (maxCam2-1)) {
			checkIndex_1++;
		} else {
			break;
		}
		
	}
	
	//ROS_INFO("Leaving (updatePairs)...");
	
}

void stereoDepthNode::timed_loop(const ros::TimerEvent& event) {
	
	//ROS_INFO("Entered (timed_loop)...");
	
	if (pairsProcessed == validPairs[1].size()) {
		return;
	}
	
	if (infoProcessed_1 && infoProcessed_2) {
		if (alphaChanged) {
			updateMaps();
		}
	}
	
	//printf("%s::%s << Entered...\n", __PROGRAM__, __FUNCTION__);
	
	stereo_msgs::DisparityImagePtr disp_msg = boost::make_shared<stereo_msgs::DisparityImage>();
	
	if (firstPairProcessed) {
		
		// printf("%s::%s << 
		
		//stereo_model.fromCameraInfo(cam_info_1, cam_info_2);
		
		
		
		// Prepare disparity message
		disp_msg->header = cam_info_1.header;
		disp_msg->image.header = cam_info_1.header;
		disp_msg->image.height = cam_info_1.height;
		disp_msg->image.width = cam_info_1.width;
		disp_msg->image.encoding = sensor_msgs::image_encodings::TYPE_32FC1;
		disp_msg->image.step = disp_msg->image.width * sizeof(float);
		disp_msg->image.data.resize(disp_msg->image.height * disp_msg->image.step);
		
		int border = sgbm.SADWindowSize / 2;
		int left   = sgbm.numberOfDisparities + sgbm.minDisparity + border - 1;
		int wtf = (sgbm.minDisparity >= 0) ? border + sgbm.minDisparity : std::max(border, -sgbm.minDisparity);
		int right  = disp_msg->image.width - 1 - wtf;
		int top    = border;
		int bottom = disp_msg->image.height - 1 - border;
		
		disp_msg->valid_window.x_offset = left;
		disp_msg->valid_window.y_offset = top;
		disp_msg->valid_window.width    = right - left;
		disp_msg->valid_window.height   = bottom - top;
		
		disp_msg->min_disparity = sgbm.minDisparity;
		disp_msg->max_disparity = sgbm.minDisparity + sgbm.numberOfDisparities - 1;
		disp_msg->delta_d = 1.0 / 16;
		
		//disp_msg->f = stereo_model.right().fx();
		
		disp_msg->f = P0.at<double>(0,0);
		// disp_msg->T = stereo_model.baseline();
		
		disp_msg->T = P0.at<double>(0,3) - P1.at<double>(0,3);
		
		
		
		//firstPair = false;
		
		cv::Mat_<float> disp_image(disp_msg->image.height, disp_msg->image.width, reinterpret_cast<float*>(&disp_msg->image.data[0]), disp_msg->image.step);
		//printf("%s << Processing image pair: (%d, %d)\n", __FUNCTION__, frameCount_1, frameCount_2);
	
		elapsedTime = timeElapsedMS(cycle_timer, false);
		
		// Mat imCpy1, imCpy2;
		
		
		
		// Mat image16_1, image16_2, imageDown_1, imageDown_2, undistIm;
		
		//normalize_16(image16_1, grayImageBuffer_1[(frameCount_1-1) % MAXIMUM_FRAMES_TO_STORE], minVal, maxVal);
		//down_level(imageDown_1, image16_1);
		
        
        //normalize_16(image16_2, grayImageBuffer_2[(frameCount_2-1) % MAXIMUM_FRAMES_TO_STORE], minVal, maxVal);
		//down_level(imageDown_2, image16_2);
		
        
        //double grad, shift;
        
        Mat _8bit1, _8bit2;
        
        //relevelImages(grayImageBuffer_1[(frameCount_1-1) % MAXIMUM_FRAMES_TO_STORE], grayImageBuffer_2[(frameCount_2-1) % MAXIMUM_FRAMES_TO_STORE], disp16, Q, _8bit1, _8bit2, map11, map12, map21, map22);
        
        ROS_INFO("About to relevel...");
        relevelImages(grayImageBuffer_1[(validPairs[0].at(pairsProcessed)) % MAXIMUM_FRAMES_TO_STORE], grayImageBuffer_2[(validPairs[1].at(pairsProcessed)) % MAXIMUM_FRAMES_TO_STORE], disp16, R0, R1, t, Q1, Q2, _8bit1, _8bit2, map11, map12, map21, map22);
        
        
        /*
        adaptiveDownsample(grayImageBuffer_1[(frameCount_1-1) % MAXIMUM_FRAMES_TO_STORE], _8bit1);
        adaptiveDownsample(grayImageBuffer_2[(frameCount_2-1) % MAXIMUM_FRAMES_TO_STORE], _8bit2);
        
        remap(_8bit1, undistIm, map11, map12, INTER_LINEAR);
		GaussianBlur(undistIm, imCpy1, Size(7,7), 0.5, 0.5);
		
		remap(_8bit2, undistIm, map21, map22, INTER_LINEAR);
        GaussianBlur(undistIm, imCpy2, Size(7,7), 0.5, 0.5);
        */
        //findRadiometricMapping(grayImageBuffer_1[(frameCount_1-1) % MAXIMUM_FRAMES_TO_STORE], grayImageBuffer_2[(frameCount_2-1) % MAXIMUM_FRAMES_TO_STORE], grad, shift, imCpy1, imCpy2);
        
        // ROS_INFO("Radiometric mapping: (%f) (%f)", grad, shift);
        
        
		
		
		
		/*
		imshow("disparity1", image16_1);
		waitKey(1);
		
		imshow("disparity2", image16_2);
		waitKey(1);
		*/
		
		sgbm(_8bit1, _8bit2, disp16);
		
		// filterSpeckles(disp16, 1.0, 5, 16.0);
		
		disp16.convertTo(disp_image, CV_32F);
		
		//ros::Time currTime = ros::Time::now();
		
		//ROS_INFO("Publishing disparity message...");
		
		// This time should actually be the average of the two times corresponding to the frame pair...
		//disp_msg->header.stamp = currTime;
		
		ros::Time currTime = findAverageTime(time_buffer_1[(validPairs[0].at(pairsProcessed)) % MAXIMUM_FRAMES_TO_STORE], time_buffer_2[(validPairs[1].at(pairsProcessed)) % MAXIMUM_FRAMES_TO_STORE]);
		disp_msg->header.stamp = currTime;
		
		ROS_ERROR("AveTime = (%d.%d); time_1 = (%d.%d); time_2 = (%d.%d)", currTime.sec, currTime.nsec, time_buffer_1[(validPairs[0].at(pairsProcessed)) % MAXIMUM_FRAMES_TO_STORE].sec, time_buffer_1[(validPairs[0].at(pairsProcessed)) % MAXIMUM_FRAMES_TO_STORE].nsec, time_buffer_2[(validPairs[1].at(pairsProcessed)) % MAXIMUM_FRAMES_TO_STORE].sec, time_buffer_2[(validPairs[1].at(pairsProcessed)) % MAXIMUM_FRAMES_TO_STORE].nsec);
		
		disparityPublisher.publish(disp_msg);
		
		Mat cam1mat, cam2mat;
		
		cvtColor(_8bit1, cam1mat, CV_GRAY2RGB);
		cvtColor(_8bit2, cam2mat, CV_GRAY2RGB);
		
		vector<vector<vector<Point> > > c1, c2;
		
		getContours(_8bit1, c1);
		getContours(_8bit2, c2);
		
		Mat contourDisp(480, 640, CV_16UC1);
		
		depthFromContours(c1, c2, contourDisp);
		
		//imshow("contourDisp", contourDisp);
		//waitKey(1);
		
		printf("%s << contours: (%d, %d)\n", __FUNCTION__, ((int)c1.size()), ((int)c2.size()));
		
		//drawContours(_8bit1, cam1mat);
		//drawContours(_8bit2, cam2mat);
		//drawEpipolarLines(cam1mat, cam2mat, F);
		
		
		
		if (msg_1.width == 0) {
			msg_1.width = cam1mat.cols; 
			msg_1.height = cam1mat.rows;
			msg_1.encoding = "bgr8";
			msg_1.is_bigendian = false;
			msg_1.step = cam1mat.cols*3;
			msg_1.data.resize(cam1mat.cols*cam1mat.rows*cam1mat.channels());
		}
		
		if (msg_2.width == 0) {
			msg_2.width = cam2mat.cols; 
			msg_2.height = cam2mat.rows;
			msg_2.encoding = "bgr8";
			msg_2.is_bigendian = false;
			msg_2.step = cam2mat.cols*3;
			msg_2.data.resize(cam2mat.cols*cam2mat.rows*cam2mat.channels());
		}
		
		//ROS_INFO("Publishing rectified images...");
		msg_1.header.stamp = currTime;
		camera_info_1.header.stamp = currTime;
		std::copy(&(cam1mat.at<Vec3b>(0,0)[0]), &(cam1mat.at<Vec3b>(0,0)[0])+(cam1mat.cols*cam1mat.rows*cam1mat.channels()), msg_1.data.begin());
		cam_pub_1.publish(msg_1, camera_info_1);
		
		msg_2.header.stamp = currTime;
		camera_info_2.header.stamp = currTime;
		std::copy(&(cam2mat.at<Vec3b>(0,0)[0]), &(cam2mat.at<Vec3b>(0,0)[0])+(cam2mat.cols*cam2mat.rows*cam2mat.channels()), msg_2.data.begin());
		cam_pub_2.publish(msg_2, camera_info_2);
		
		//ROS_INFO("Rectified images published.");
		
		/*
		disp16.convertTo(disp8, CV_8U, 255/(numberOfDisparities*16.));
		resize(disp8, disparityImage, cvSize(640, 480));
		imshow("disparityImage", disparityImage);
		waitKey(1);
		*/
		
		//publishMaps();
		
		pairsProcessed++;
	}
	
	
	
	
}

void stereoDepthNode::publishMaps() {
	
	if (disp.rows > 0) {
		
		//disp.convertTo(dispFloat, CV_32F);
		
		//dispFloat /= 16.0; 
		
		if (0) { // (firstPair) {
	
			//disp_msg->header         = l_info_msg->header;
			//disp_msg->image.header   = l_info_msg->header;
			/*
			disp_msg->image.height   = disp.rows; // disp_msg->image.height   = l_image_msg->height;
			disp_msg->image.width    = disp.cols;
		
			disp_msg->image.encoding = sensor_msgs::image_encodings::TYPE_32FC1;
			disp_msg->image.step     = disp_msg->image.width * sizeof(float);
			disp_msg->image.data.resize(disp_msg->image.height * disp_msg->image.step);
			
			disp_msg->delta_d = 1.0 / 16;
			*/
			/*
			printf("%s::%s << Setting up disparity outputs...\n", __PROGRAM__, __FUNCTION__);
			disp_image.height = dispFloat.rows;
			disp_image.width = dispFloat.cols;
			disp_image.encoding = "float32";
			disp_image.is_bigendian = false;
			disp_image.step = dispFloat.cols*sizeof(float);
			disp_image.data.resize(dispFloat.cols*dispFloat.rows*sizeof(float));
			
			disp_object.image = disp_image;
			*/
			
			
			// disp_object.f = ...;
			// disp_object.T = ...;
			// disp_object.valid_window = ...;
			// disp_object.min_disparity = ...;
			// disp_object.max_disparity = ...;
			// disp_object.delta_d = ...;
			
			firstPair = false;
	
	
	
		}
	}
	
		
	
	if (!firstPair) {
		
		printf("%s::%s << Entering publishing segment...\n", __PROGRAM__, __FUNCTION__);
		
		
		//cv::Mat_<float> disp_image(disp_msg->image.height, disp_msg->image.width, reinterpret_cast<float*>(&disp_msg->image.data[0]), disp_msg->image.step);
		
		// disp_image.header
		// disp_image.data
		
		//std::copy(&(disparityImage.at<float>(0,0)), &(disparityImage.at<float>(0,0))+(disparityImage.cols*disparityImage.rows*3), disp_image.data.begin());
		
		//std::copy(&(dispFloat.at<float>(0,0)), &(dispFloat.at<float>(0,0))+(dispFloat.cols*dispFloat.rows*sizeof(float)), disp_image.data.begin());
		
		//disp_object.image = disp_image;
		
		/*
		unsigned int bytesCopied = 0;
		float val = 0.0;
		for (unsigned int iii = 0; iii < disp.rows; iii++) {
			for (unsigned int jjj = 0; jjj < disp.cols; jjj++) {
				val = ((float) disparityImage.at<unsigned short>(iii,jjj)) / 16.0;
				std::copy(&val, &val + 4, disp_object.image.data.begin() + bytesCopied);
				bytesCopied += 4;
			}
		}
		*/
		//std::copy(&(disparityImage.at<float>(0,0)), &(disparityImage.at<float>(0,0))+(disparityImage.cols*disparityImage.rows*3), disp_image.data.begin());
		
		printf("%s::%s << disp_object info: (%d, %d)\n", __PROGRAM__, __FUNCTION__, disp_object.image.height, disp_object.image.width);
		
		//disp_object.image.height = 480;
		//disp_object.image.width = 640;
		// disp_object.header = ...
		
		
		
		// disparityImage
		// std::copy(&(newImage.at<Vec3b>(0,0)[0]), &(newImage.at<Vec3b>(0,0)[0])+(newImage.cols*newImage.rows*3), msg_color.data.begin());
				
		printf("%s::%s << Publishing depth map...\n", __PROGRAM__, __FUNCTION__);
		//imshow("disp", disp);
		//waitKey(1);
		//disparityPublisher.publish(disp_msg);
		//pub_color.publish(msg_color, camera_info);
	}
	
	
	
}

void stereoDepthNode::updateMaps() {
	
	ROS_INFO("Updating maps...");
	
	if (configData.autoAlpha) {
		configData.alpha = findBestAlpha(configData.cameraData1.K, configData.cameraData2.K, configData.cameraData1.distCoeffs, configData.cameraData2.distCoeffs, configData.cameraData1.cameraSize, R0, R1);
	}
	
	//Rect* validPixROI = 0;
	bool centerPrincipalPoint = true;
	
	configData.cameraData1.newCamMat = getOptimalNewCameraMatrix(configData.cameraData1.K, configData.cameraData1.distCoeffs, configData.cameraData1.cameraSize, configData.alpha, configData.cameraData1.cameraSize, &roi1, centerPrincipalPoint);
	configData.cameraData2.newCamMat = getOptimalNewCameraMatrix(configData.cameraData2.K, configData.cameraData2.distCoeffs, configData.cameraData2.cameraSize, configData.alpha, configData.cameraData2.cameraSize, &roi2, centerPrincipalPoint);
	
	ROS_WARN("Adjusting undistortion mappings...");
	initUndistortRectifyMap(configData.cameraData1.K, configData.cameraData1.distCoeffs, R0, configData.cameraData1.newCamMat, configData.cameraData1.cameraSize, CV_32FC1, map11, map12);
	initUndistortRectifyMap(configData.cameraData2.K, configData.cameraData2.distCoeffs, R1, configData.cameraData2.newCamMat, configData.cameraData2.cameraSize, CV_32FC1, map21, map22);	
	
	alphaChanged = false;
	
}


void stereoDepthNode::serverCallback(thermalvis::depthConfig &config, uint32_t level) {
	
  ROS_INFO("Reconfigure Request: (%1.2f | [%d])", 
	config.alpha,
	config.autoAlpha);

	
	if ((config.autoAlpha != configData.autoAlpha) || ((!config.autoAlpha) && (config.alpha != configData.alpha))) {
  
		configData.autoAlpha = config.autoAlpha;
		configData.alpha = config.alpha;
		
		alphaChanged = true;
		
		ROS_WARN("About to try and update maps...");
		if (infoProcessed_1 && infoProcessed_2) {
			ROS_WARN("Updating maps...");
			updateMaps();
		}
		
	} else {
		configData.alpha = config.alpha;
	}
	
      

}
