// Optic Flow robot piece
// Jeff Hiner is responsible for all this

#include "optic.h"
#include <cstdio>
#include <cstring>
//#include <sys/types.h>
#include <sys/time.h>

//#define DISTRACTION_THRESH 0.75
#define interpolation CV_INTER_NN


/* THIS FUNCTION IS FOR MAKING IMAGE MEMORY AND CREATE IMAGE*/
inline static void allocateOnDemand( IplImage **img, CvSize size, int depth, int channels)
{
	if ( *img != NULL )
		return;
	*img = cvCreateImage( size, depth, channels );
	if ( *img == NULL )
	{
		fprintf(stderr, "Error: Couldn't allocate image. Out of memory?\n");
		exit(-1);
	}
}

inline clock_t realclock()
{
	timeval tv;
	gettimeofday(&tv, 0);
	return (clock_t)(tv.tv_sec * CLOCKS_PER_SEC + tv.tv_usec);
}

// dot product
inline double dotP(CvPoint2D32f p1, CvPoint2D32f p2)
{
	return p1.x * p2.x + p1.y * p2.y;
}

// takes two parameters, translation in X (left-right) and translation in Z (forward-backward)
void Optic::getflows( double &translationX, double &translationZ,
                      double &ballX, double &ballY, double &ballR, int &ballDetect)
{
	// copy F2 to F1
	cvCopy(frame2_1C, frame1_1C);
	frame1time = frame2time;

	// grab new F2
	do
	{
		frame = cvQueryFrame( input_video );
	}
	while (frame == NULL);

  // send new frame to object tracking function
  trackBall(frame, ballX, ballY, ballR, ballDetect);

	frame2time = realclock();
	cvConvertImage(frame, frame2_1C);
	cvConvertImage(frame, scribble_frame);	// copy the frame to the scribble frame

	const double dt = (double)(frame2time - frame1time) / CLOCKS_PER_SEC; // delta T between frames

	// optic flow function, also draws flow features in window
	calculate_flows();

	translationX = X_translation / dt;
	translationZ = Z_translation / dt;

}

void Optic::trackBall(IplImage* frame, double &ballX, double &ballY, double &ballR, int &ballDetect)
{
 
        // Covert color space to HSV as it is much easier to filter colors in the HSV color-space.
        //ROS_INFO("here 11");
        cvCvtColor(frame, hsv_frame, CV_BGR2HSV);
        //ROS_INFO("here 12");
        // Filter out colors which are out of range.
        cvInRangeS(hsv_frame, hsv_min, hsv_max, thresholded);
 
        // Memory for hough circles
        CvMemStorage* storage = cvCreateMemStorage(0);
        // hough detector works better with some smoothing of the image
        cvSmooth( thresholded, thresholded, CV_GAUSSIAN, 9, 9 );
        CvSeq* circles = cvHoughCircles(thresholded, storage, CV_HOUGH_GRADIENT, 2,
                                        thresholded->height/4, 100, 50, 10, 400);

        //ROS_INFO("here before %d",circles->total);

        // if we've detected a ball, make a note
        if( circles->total > 0) ballDetect = 1;
        else ballDetect = 0;

        // generate circles
        // need to change to publish -1's when ball is out of sight
        for (int i = 0; i < circles->total; i++)
        {
            float* p = (float*)cvGetSeqElem( circles, i );
            //ROS_INFO("here inside");
            //ROS_INFO("ball at x=%f y=%f r=%f\n\r",p[0],p[1],p[2] );   // x, y, r = radius
            ballX = 1.1; ballY = 0.3; ballR = 3.4;

            // fill and publish message
            ballX = p[0]; ballY = p[1]; ballR = p[2];     // fill message vars

            cvCircle( frame, cvPoint( cvRound(p[0]),cvRound(p[1])),
                                      3, CV_RGB(0,255,0), -1, 8, 0 );
            cvCircle( frame, cvPoint( cvRound(p[0]),cvRound(p[1])),
                                      cvRound(p[2]), CV_RGB(255,0,0), 3, 8, 0 );
        }
        //ROS_INFO("here after"); 

        cvReleaseMemStorage(&storage);

}

void Optic::calculate_flows()
{
	// Anywhere you see "scribble_frame" he's trying to draw on the frame

	char optical_flow_found_feature[FLOW_POINTS];

	//float optical_flow_feature_error[FLOW_POINTS];

	CvPoint p, q;
	//CvScalar line_color;
	CvSize optical_flow_window = cvSize(3,3);
	CvPoint2D32f frame2_features[FLOW_POINTS]; // Optic flow features' location in frame 2, set by Lucas-Kanade
	CvTermCriteria optical_flow_termination_criteria = cvTermCriteria( CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, .3 );


	int i;
	double avghyp, stddev;
	int considered_point[FLOW_POINTS],
	px[FLOW_POINTS],py[FLOW_POINTS],qx[FLOW_POINTS],qy[FLOW_POINTS];
	// following are arrays of
	double hyparr[FLOW_POINTS], // hypotenus array (distance between frame2 and frame1 feature)
	anglearr[FLOW_POINTS], // angle array (angle between frame2 and frame1 feature point)
	anglelocation[FLOW_POINTS]; // angle between center and frame1 feature point

	//////////////////////////////////////////// Calc Optic Flow! ////////////////////////////////////////////

	/* Pyramidal Lucas Kanade Optical Flow*/
	// optical_flow_termination_criteria= cvTermCriteria( CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, .3 );
	if (lastpyramid == pyramid1)
	{
		cvCalcOpticalFlowPyrLK(frame1_1C, frame2_1C, lastpyramid, pyramid2, frame1_features,
		                       frame2_features, number_of_features, optical_flow_window, 5,
		                       optical_flow_found_feature, 0,
		                       optical_flow_termination_criteria, CV_LKFLOW_PYR_A_READY );
		lastpyramid = pyramid2;
	}
	else if (lastpyramid == pyramid2)
	{
		cvCalcOpticalFlowPyrLK(frame1_1C, frame2_1C, lastpyramid, pyramid1, frame1_features,
		                       frame2_features, number_of_features, optical_flow_window, 5,
		                       optical_flow_found_feature, 0,
		                       optical_flow_termination_criteria, CV_LKFLOW_PYR_A_READY );
		lastpyramid = pyramid1;
	}
	else
	{
		cvCalcOpticalFlowPyrLK(frame1_1C, frame2_1C, pyramid1, pyramid2, frame1_features,
		                       frame2_features, number_of_features, optical_flow_window, 5,
		                       optical_flow_found_feature, 0,
		                       optical_flow_termination_criteria, 0 );
		lastpyramid = pyramid2;
	}

	//////////////////////////////////////////////////////////////////////////////////////////////////////////
	////////////////////////////////////////////* Time for Math! *////////////////////////////////////////////
	//////////////////////////////////////////////////////////////////////////////////////////////////////////

	// Store Data from optical flow into arrays, as well as compute the mean:
	avghyp = 0.0;
	int considered_features = 0;

	for (i = 0; i < number_of_features; i++)
	{
		/* If Pyramidal Lucas Kanade didn't really find the feature, skip it. */
		if ( optical_flow_found_feature[i] == 0 )
			continue;

		/* Set line color and thickness*/
		//line_thickness = 1;
		//line_color = CV_RGB(0,0,255);

		/* Draw arrow*/
		p.x = (int) (frame1_features[i].x-.5);
		p.y = (int) (frame1_features[i].y-.5);
		q.x = (int) (frame2_features[i].x-.5);
		q.y = (int) (frame2_features[i].y-.5);
		//const double tempmagnitude = sqrt( (double) (square(p.y - centery) + square(p.x - centerx)) );
		const double tempmagnitude2 = sqrt( (double) (square(q.y - centery ) + square(q.x - centerx)) );

		// what can cause a pair of points to be "bad"?
		// if the magnitude of abs(p_to_q[i] minus p_to_q[i_opposite]) is greater than DISTRACTION_THRESH * p_to_q[i]
		// then invalidate i_opposite and continue
		//const int nPts = floor((i - 1) / NSECTORS) * NSECTORS;
		//const int j = ((i - nPts - 1 + NSECTORS / 2) % NSECTORS) + 1 + nPts; // j is the "opposite" point from i

		// absolute distance from p to q
		//const double p_to_q_i = sqrt((double) (square(p.y - q.y) + square(p.x - q.x)));
		// same thing as above? not sure
		//const double p_to_q_j = sqrt((double) (square(frame1_features[j].y - frame2_features[j].y - 1) + square(frame1_features[j].x - frame2_features[j].x - 1)));

		// cull if LK says feature moved outside ROIradius; it's probably a glitch
		if ( tempmagnitude2 > ROIradius )
		{
			optical_flow_found_feature[i] = 0;
		}
		else
		{
			anglearr[considered_features] = atan2( (double) (p.y - q.y), (double) (p.x - q.x) );

			hyparr[considered_features] = sqrt( (double)(square(p.y - q.y) + square(p.x - q.x)) );
			anglelocation[considered_features] = atan2( (double)(p.y - centery), (double) (p.x - centerx) );
			//printf("angle location: %f\n",anglelocation[i]);
			draw_x( scribble_frame, p, 4, 4, cvScalar(255,255,255,0) );

			if (hyparr[considered_features] < hres/3)
				avghyp += hyparr[considered_features];

			considered_point[considered_features] = i;
			px[considered_features] = p.x;					// P is inital point
			py[considered_features] = p.y;
			qx[considered_features] = q.x;					// Q is the point P moves to...
			qy[considered_features] = q.y;
			considered_features++;
		}

	}

	p.x = centerx;												// Display the mask size
	p.y = centery;
	draw_x2( scribble_frame, p, ROIradius-1, 1, NSECTORS * 2, cvScalar(100,100,100,0) );
	cvCircle(scribble_frame, p, ROIradius, cvScalar(0,0,0), 1);

	// Compute average of vector magnitudes, which we use later to throw some more elements out
	avghyp /= (double)considered_features;

	// Compute Standard Deviation:
	stddev = 0.0;
	for (i = 0; i < considered_features; i++)
	{
		if (hyparr[i] < hres/3)
			stddev += (double)square(hyparr[i]-avghyp);
	}
	stddev = (double)sqrt(stddev)/(double)sqrt((double)number_of_features);
	//printf("Standard Deviation: %f, Mean: %f\n", stddev,avghyp);

	for (i = 0; i < considered_features; i++)		// Compute Standard Deviation of magnitudes
	{
		p.x = px[i];
		p.y = py[i];
		q.x = qx[i];
		q.y = qy[i];
		// RED = bigger than the window, YELLOW = bigger than 2x the average hyp
		if (((hyparr[i] <= (avghyp + stddev * stddevradius)) && ((hyparr[i] >= (avghyp - stddev * stddevradius)))) || (stddev==0.0))
    // within mean +/- standard deviation
		{
			cvLine( scribble_frame, p, q, CV_RGB(0,255,0), 1, 8, 0 );			// draw in green
		}
		else // Throw out this feature!
		{
			if (hyparr[i] >= hres/3)
				cvLine( scribble_frame, p, q, CV_RGB(255,255,0), 1, 8, 0 );		// draw in yellow
			else
				cvLine( scribble_frame, p, q, CV_RGB(255,0,0), 1, 8, 0 );		// draw in red
		}

		/* End of the arrow*/
		const int body = 1;//10;
		q.x = (int) (px[i] - body * hyparr[i] * cos(anglearr[i]));
		q.y = (int) (py[i] - body * hyparr[i] * sin(anglearr[i]));
		draw_x( scribble_frame, q, 2, 0, cvScalar(0,0,0,0) );

	}

	Z_translation = 0;
	X_translation = 0;
	int translation_points = 0;

	// Calculate movements
	for (i = 0; i < considered_features; i++)
	{
		if ( (hyparr[i] <= (avghyp + stddev * stddevradius)) && (hyparr[i] >= (avghyp - stddev * stddevradius)) )	// within mean +/- standard deviation
		{
			// do stuff for each point

			// sum of dot products with translate matrix over considered features gives us translation
			Z_translation += dotP( cvPoint2D32f(qx[i] - px[i], qy[i] - py[i]), Z_trans_field[considered_point[i]]);
			X_translation += dotP( cvPoint2D32f(qx[i] - px[i], qy[i] - py[i]), X_trans_field[considered_point[i]]);
			translation_points++;
		}
	}

	if (translation_points != 0)
	{
		Z_translation /= translation_points;
		X_translation /= translation_points;
	}

	if (view_enabled)   // block
	{
		// Display translations
		p.x = hres - 50;
		p.y = vres - 50;
		q.x = (int) (Z_translation) + p.x;
		q.y = (int) (X_translation) + p.y;

		cvLine( scribble_frame, p, q, CV_RGB(0,255,255), 1, 8, 0 );
		cvCircle(scribble_frame, p, 20, cvScalar(255,0,255), 1);

		// draw a white X at the center point
		q.x = centerx;
		q.y = centery;
		draw_x( scribble_frame, q, 4, 4, cvScalar(255,255,255,0) );

		/* Display the image which we drew on*/
		cvShowImage("Optical Flow", scribble_frame);
		cvWaitKey(3);
	}
}

void Optic::draw_x( IplImage* img, CvPoint pt, int radius, int weight, CvScalar color )
{
	cvLine( img, pt, cvPoint( pt.x + radius, pt.y + radius), color, weight, 4, 0 );
	cvLine( img, pt, cvPoint( pt.x - radius, pt.y + radius), color, weight, 4, 0 );
	cvLine( img, pt, cvPoint( pt.x + radius, pt.y - radius), color, weight, 4, 0 );
	cvLine( img, pt, cvPoint( pt.x - radius, pt.y - radius), color, weight, 4, 0 );
}

void Optic::draw_x2( IplImage* img, CvPoint center, int radius, int weight,int sectors, CvScalar color )//CvScalar(0,0,0,0);
{
	for (int j = 0; j < sectors; j++)
	{
		cvLine( img, center, cvPoint( center.x + radius * cos(2.0 * M_PI * (double)j / (double)sectors) , center.y + radius* sin(2.0 * M_PI * (double)j / (double)sectors)), color, weight, 1, 0 );
	}
}
