/*! \file	videoslam.hpp
 *  \brief	Declarations for the VIDEOSLAM node.
*/

#ifndef _THERMALVIS_VIDEOSLAM_H_
#define _THERMALVIS_VIDEOSLAM_H_
	
#include "general_resources.hpp"
#include "ros_resources.hpp"
#include "opencv_resources.hpp"
#include "pcl_resources.hpp"

//#include <cv_bridge/CvBridge.h>

#include "improc.hpp"
#include "video.hpp"
#include "features.hpp"
#include "reconstruction.hpp"
#include "sba.hpp"
#include "keyframes.hpp"
#include "geometry.hpp"

#include "feature_tracks.h"
#include "pose_confidence.h"

#include "videoslamConfig.h"

#include <std_msgs/Float32.h>

typedef Eigen::Quaternion<double>   QuaternionDbl;
typedef dynamic_reconfigure::Server < thermalvis::videoslamConfig > Server;

// Drawing
#define DEFAULT_DRAWGRAPH_DECIMATION			1
#define DEFAULT_DRAWGRAPH_BICOLOR				0

// Hard Limits
#define MAX_HISTORY								10
#define MAX_POSES_TO_STORE						100
#define MAX_FRAMES								1000
#define MAX_TRACKS 								10000
#define SBA_MEMORY								134217728
#define MAX_TIME_GAP_FOR_INTERP					0.5

typedef pcl::PointCloud<pcl::PointXYZ> PointCloud;

const char __PROGRAM__[] = "THERMALVIS_ODOMETRY";

bool wantsToShutdown = false;
void mySigintHandler(int sig);
bool interpolatePose(const geometry_msgs::Pose& pose1, ros::Time time1, const geometry_msgs::Pose& pose2, ros::Time time2, geometry_msgs::Pose& finalPose, ros::Time time3);
void shiftPose(const geometry_msgs::Pose& pose_src, geometry_msgs::Pose& pose_dst, cv::Mat transformation);

/// \brief		Stores configuration information for the ODOMETRY routine
struct videoslamData {
	
	string read_addr;
	string flowSource, mapperSource;
	
	bool writePoses;
	
	int baMode;
	
	//bool evaluationMode;
	
	double baStep;
	
	double terminationTime, restartTime;
	
	double maxPoseDelay;
	
	bool clearTriangulations;
	
	cameraParameters cameraData;
	string extrinsicsFile;
	double cameraLatency;
	
	int evaluateParameters;
	
	double maxAllowableError;
	
	int pnpIterations;
	double inliersPercentage;
	
	bool trimFeatureTracks;
	int pairsForTriangulation, adjustmentFrames, adjustmentIterations;
	double maxReprojectionDisparity, maxDistance, minSeparation, maxSeparation, maxStandardDev;
	
	bool debugSBA, debugTriangulation, debugMode, verboseMode, publishPoints, publishKeyframes;
	
	double dataTimeout;
	
	bool obtainStartingData(ros::NodeHandle& nh);
	
};

/// \brief		Manages the ODOMETRY procedure
class videoslamNode {
private:

	videoslamData configData;
	
	int decimation, bicolor;
	
	std::ofstream lStream;
	std::streambuf* lBufferOld;
	
	cv::Mat extrinsicCalib_T, extrinsicCalib_R, extrinsicCalib_P;
	//QuaternionDbl extrinsicCalib_quat;
	
	unsigned int frameProcessedCounter;
	
	bool hasTerminatedFeed;
	
	cv::Mat blank, eye4;
	
	double distanceTravelled;
	
	int framesArrived, framesProcessed, pnpSuccesses, baSuccesses;
	double baAverage, dsAverage;
	
	ros::Subscriber tracks_sub;
	ros::Subscriber info_sub;
	ros::Subscriber pose_sub;
	
	SysSBA sys;
	
	//std_msgs::Float32 confidence_msg;
	
	
	ros::Publisher path_pub, camera_pub, points_pub, confidence_pub;
	
	pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_ptr_;
	
	unsigned int frameHeaderHistoryCounter, poseHistoryCounter;
	std_msgs::Header frameHeaderHistoryBuffer[MAX_HISTORY];
	geometry_msgs::PoseStamped poseHistoryBuffer[MAX_HISTORY];
	
	geometry_msgs::PoseStamped keyframePoses[MAX_POSES_TO_STORE];
	bool keyframeTypes[MAX_POSES_TO_STORE];
	unsigned int storedPosesCount;
	
	ros::Timer timer;
	
	double latestTracksTime, pointShift, pnpError, pnpInlierProp;
	
	int lastTestedFrame, usedTriangulations;
	
	bool infoProcessed;
	
	vector<featureTrack> featureTrackVector;
	
	keyframeStore keyframe_store;
	cv::Mat F_arr[MAX_FRAMES], H_arr[MAX_FRAMES];
	
	cv::Mat ACM[MAX_FRAMES];
	
	double predictiveError, bundleTransShift, bundleRotShift;
	
	char nodeName[256];
	
	ros::Publisher pose_pub;
	char pose_pub_name[256];
	
	/*
	ros::Publisher points_pub;
	char points_pub_name[256];
	*/
	sensor_msgs::PointCloud2 pointCloud_message;
	
	
	geometry_msgs::PoseStamped savedPose, currentPose, pnpPose;
	
	int currentPoseIndex;
	
	bool latestReceivedPoseProcessed;
	
	vector<unsigned int> framesReceived;
	
	// Thread-protection
	boost::mutex main_mutex;
	
	dynamic_reconfigure::Server<thermalvis::videoslamConfig> server;
	dynamic_reconfigure::Server<thermalvis::videoslamConfig>::CallbackType f;
	
public:

	videoslamNode(ros::NodeHandle& nh, videoslamData startupData);
	
	// Callbacks
	void handle_tracks(const thermalvis::feature_tracksConstPtr& msg);
	void handle_info(const sensor_msgs::CameraInfoConstPtr& info_msg);
	void handle_pose(const geometry_msgs::PoseStamped& pose_msg);
	void main_loop(const ros::TimerEvent& event);
	
	bool updateLocalPoseEstimates();
	
	//void publishPoints(const geometry_msgs::PoseStamped& pose_msg);
	void publishPoints(ros::Time stamp, unsigned int seq);
	bool determinePose();
	
	void publishPose();
	
	bool findNearestPoses(int& index1, int& index2, const ros::Time& targetTime);
	
	void prepareForTermination();
	
	void triangulatePoints();
	
	void integrateNewTrackMessage(const thermalvis::feature_tracksConstPtr& msg);
	
	bool checkConnectivity(unsigned int seq);
	
	bool updateKeyframePoses(const geometry_msgs::PoseStamped& pose_msg, bool fromICP = true);
	
	void trimFeatureTrackVector();
	
	void serverCallback(thermalvis::videoslamConfig &config, uint32_t level);
	
};

// dummy callbacks // http://answers.ros.org/question/55126/why-does-ros-overwrite-my-sequence-number/
void connected(const ros::SingleSubscriberPublisher&) {}
void disconnected(const ros::SingleSubscriberPublisher&) {}

boost::shared_ptr < videoslamNode > *globalNodePtr;

#endif
