#include "Seeker.h"

namespace seekDot {

Seeker::Seeker(ros::NodeHandle &n)
:n(n),
 it(n),
 image_count(0),
 currentPic(0)
{
	  //Argument usually we just use every 5th picture
	  n.param("takeEachNumPic", takeEachNumPic, 5);
	  //Selected approach
	  n.param("approach_select",approachSelect, 0);

	  std::cout<<"#####################Selected Approach:"<<approachSelect<<std::endl;

	  //Advertise topics to master
	  number_image = n.advertise<std_msgs::Int32>("number_image", 10);
	  pose_out= n.advertise<nav_msgs::Odometry>("/pose_out",10,1);
	  	  //IMG_Transport
	  image_circle = it.advertiseCamera("circles/image_raw", 10, 1);
	  image_debug =  it.advertiseCamera("debug/image_raw", 10,1);


	  //Subscribe topics
	  camera_sub = it.subscribeCamera<Seeker>("/image",10, &Seeker::processImage,this, image_transport::TransportHints("compressed") );
	  pose_in = n.subscribe<nav_msgs::Odometry>("/pose_in", 1, &Seeker::processPose,this);
}

Seeker::~Seeker()
{

}

void Seeker::processPose(const nav_msgs::Odometry::ConstPtr& msg)
{
	last_pose=*msg;
}

void Seeker::processImage(const sensor_msgs::ImageConstPtr& msg, const sensor_msgs::CameraInfoConstPtr& camera_info)
{
#if 1

	//Messages
	bool found=false;
	sensor_msgs::CvBridge img_bridge;
	sensor_msgs::CameraInfo info_out=*camera_info;


	//OpenCv Bridge
    img_bridge.fromImage(*msg, "rgb8");


    //Create and fill cv::Mat-In
    cv::Mat in(img_bridge.toIpl());

    //Create out and debug Mat
    cv::Mat out(in.size(),in.type());

    /*
     * Insert the current image into the debug image for the
     * case its not used in the code
     */
    cv::Mat debug(img_bridge.toIpl());


     //Clear out and debug Mat
    out.zeros(in.size(),in.type());



	/**
	 * IMAGE EDITING CODE HERE:
	 * in = Incoming cv::Mat
	 * out= Outgoing cv::Mat with marking red circle
	 * debug = Outgoing cv::Mat (image_debug)
	 * found = true when found
	 */
#if 1
		if(CTracker::instance()->processFrame(in))
		{
			out = *CTracker::instance()->getTrackedMark();
			found = true;
		}

		//Actual pose string for the picture:
		std::ostringstream poseStr;
		poseStr<<"X:"<<last_pose.pose.pose.position.x<<" Y:"<<last_pose.pose.pose.position.y;
		CTracker::writeStringToFrame( out, poseStr.str());

#else
		CTracker::instance()->processFrame(in);
		out = *CTracker::instance()->getTrackedMark();
		if(out != NULL)
			found = true;
#endif
		debug = CTracker::instance()->latestProcessedFrame();
	 ////END OF ANALYSIS CODE///////


	//publish debug image
        IplImage ipl_debug=debug.operator _IplImage();
        sensor_msgs::Image      img_debug=*img_bridge.cvToImgMsg(&ipl_debug,"rgb8");
        image_debug.publish(img_debug,info_out,info_out.header.stamp);

    if(found) //if a red dot was found
    {

    	//increase image number
    	image_count++;

    	//create count message
    	std_msgs::Int32 foundmsg;

    	//image count into message
    	foundmsg.data=image_count;

    	//Publish the analyzed image number
    	number_image.publish(foundmsg);

    	//Convert images to Ipl
    	IplImage ipl_out=out.operator _IplImage();


    	//Get the images into a new variable
    	sensor_msgs::Image img_out=*img_bridge.cvToImgMsg(&ipl_out, "rgb8");


    	//Prepare Pose to be published
    	nav_msgs::Odometry pose=this->last_pose;
    	pose.header.stamp=info_out.header.stamp;

    	//Publish pose
    	pose_out.publish(pose);

    	//Publish image
    	image_circle.publish(img_out,info_out,info_out.header.stamp);

    }
	currentPic++;
	if(currentPic>=takeEachNumPic)
	{
		currentPic=0;
	}
#endif
}





}
