/*
 * Initialize.cpp
 *
 *  Created on: Jan 25, 2012
 *      Author: Geerten
 */

#include <crate_vision/main.h>
#include <crate_vision/Detect.h>
#include <crate_vision/CrateVisionNode.h>
#include <crate_vision/Initialize.h>
#include <iostream>
#include <stdio.h>
#include <FiducialDetector.h>
#include <pcrctransformation/pcrctransformer.hpp>
#include <pcrctransformation/point2f.hpp>

#include <Crate.h>

#include "tinyxml.h"

#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/core/core.hpp>
#include <unicap_cv_bridge.hpp>
#include <CameraCalibration/RectifyImage.h>

using namespace unicap_cv_bridge;
using namespace std;
using namespace cv;
using namespace pcrctransformation;


/* This function creates the initialize class. 
 *
 * @param unicap_cv_camera* $camera
 * This is the camara used for detection of empty places
 * 
 * @param RectifyImage* $ri
 * A rectifier that is used for lens correction
 *
 * @param pc_rc_transformer* $rc
 * A pc_rc_transformer that is used to convert real life coordinates to pixel coordinates and visa versa
 *
 * @param FiducialDetector* $fd
 * A Fiducial detector that is needed by the pc_rc_transformer
 */
Initialize::Initialize(unicap_cv_camera* camera, RectifyImage* ri, FiducialDetector* fd){
	cam = camera;
	rectifier = ri;
	fidDetector = fd;
}

/* This function is used to change the exposure of the camera or order to have a comparable looking frame in different lighting conditions.
 *
 * The entire frame is evaluated to see if the right exposure is reached. 
 * This function works the same way as a successive approximation ADC. The difference is cut by half every iteration
 */
void Initialize::exposure() {
	bool exposure_calibrated = false;
	int count = 0;

	Mat frame(cam->get_img_height(), cam->get_img_width(), cam->get_img_format());
	
	goal_value = 100;
	exposure_value = 0.001;
	exposure_diff = 0.1;
	cam->set_exposure(exposure_value);
	
	while(1) {
		
		while(!exposure_calibrated){
			count++;
			cam->get_frame(&frame);
	
			rectifier->rectify(frame, frameRectified);
	
			imshow("exposure init", frameRectified); waitKey(10);
			
			int avg_value = 0;
			int peak = 0;
			
			Detect::hsv_hist(frameRectified, &peak, 2, &avg_value);
	
			exposure_calibrated = false;
			if (avg_value > (goal_value + 1)) {
				cout << "too high" << endl;
				exposure_value-= exposure_diff;
				if(exposure_value < 0){
					exposure_value = 0.0001;
				}
			} else if (avg_value < (goal_value - 1)) {
				cout << "too low"<< endl;
				exposure_value+= exposure_diff;
			} else {
				exposure_calibrated = true;
			}
			exposure_diff/=2;
			cout << "avg val: " << avg_value << "exposure: " << exposure_value << " exposure_diff: " << exposure_diff << endl;
			cam->set_exposure(exposure_value);
			
			char key = waitKey(100);
			if(key == 'x'){
				exposure_calibrated = true;
			}
			
			if(count > 25){ 	//wanted exposure is likely not going to be found
				count = 0;		//reset the count
				
				//reset all vars needed for exposure so we can try again
				exposure_value = 0.0001;
				exposure_diff = 0.1;
				exposure_calibrated = false;
				cam->set_exposure(exposure_value);
				waitKey(1000);
			
			}
		
		}
		
		if (exposure_calibrated) {
			destroyWindow("exposure init");
			cout << "final exposure: " << exposure_value << endl;
			while(1){
				cam->get_frame(&frame);
				rectifier->rectify(frame, frameRectified);
				
				medianBlur(frameRectified, final_frame, getVisionData()->crateData.median);
		   		
				
				imshow("final exposure, press q to continue", final_frame);			
				char key = waitKey(10);
		    	if (key == 'q') {
					break;
		   	} else if (key == '+') {
		   		cout << "goal value: " << goal_value << endl;
		   		goal_value+= 2;
		   	} else if (key == '-') {
		   		cout << "goal value: " << goal_value << endl;
		   		goal_value-= 2;
		   	} else if (key == '\\') {
		   		exposure_value = 0.0001;
					exposure_diff = 0.1;
					exposure_calibrated = false;
					cam->set_exposure(exposure_value);
					waitKey(1000);
					break;
		   	} else if (key == ','){
		   		if(getVisionData()->crateData.median!=1){
		   			getVisionData()->crateData.median-=2;
					}
		   		cout << "median: " << getVisionData()->crateData.median << endl;
		   	} else if (key == '.'){
					getVisionData()->crateData.median+=2;		 
		   		cout << "median: " << getVisionData()->crateData.median << endl;  	
		   	}
	   	}
			destroyWindow("final exposure, press q to continue");
			if (exposure_calibrated) {
				break;
			}
		}
	}
}

/* This function is used to calibrate the value's used to check if a box is empty. A empty crate must be placed under the camara to make this calibration work.
 *
 * The low and high hue, as well as the saturation peak of an empty box is determined by checking all the empty boxes. The hue reach is then extended by 2 to both sides to ensure detection.
 *
 * The function is looped a 100 times to improve the results.
 */
void Initialize::detections() {
/*	
	for(int l = 0; l < getVisionData()->contentData.diff_colors; l++){
		cout  << "nr: " << l << " low: " << getVisionData()->contentData.ball_hue_low[l] << " high: " << getVisionData()->contentData.ball_hue_high[l] << endl;
	}
*/
	
	Mat frame(cam->get_img_height(), cam->get_img_width(), cam->get_img_format()), final_frame;
	int steps = 50;
	for (int i = 0; i < steps; i++) {
	cout << i*100/steps << "%" << endl;
		cam->get_frame(&frame);

		rectifier->rectify(frame, frameRectified);
		medianBlur(frameRectified, frameRectified, getVisionData()->crateData.median);
		
		Detect::split_crate(frameRectified, Point2f(getVisionData()->robotCrateData.middle_x, getVisionData()->robotCrateData.middle_y), getVisionData()->robotCrateData.degree);
		
		int peak = 0, avg = 0;
		int l;
		for (l = 0; l < getVisionData()->crateData.col_nr*(getVisionData()->crateData.row_nr-1); l++) {
			//cout << "l: " << l << endl;
			
			/*imshow("histogram", Detect::hsv_hist(getVisionData()->contentData.box[l], &peak, 0, &avg));
			imshow("source", getVisionData()->contentData.box[l]);
			imshow("final crate", final_crate);*/
			
			Detect::hsv_hist(getVisionData()->contentData.box[l], &peak, 0, &avg);
			//cout << "peak: " << peak << endl;
			
			if (getVisionData()->contentData.empty_hue_low > peak) getVisionData()->contentData.empty_hue_low = peak;
			if (getVisionData()->contentData.empty_hue_high < peak) getVisionData()->contentData.empty_hue_high = peak;
		}
		for (l = 0; l < getVisionData()->contentData.diff_colors; l++) {
			int nr = getVisionData()->crateData.col_nr*(getVisionData()->crateData.row_nr-1) + l;
			
			//cout << "l: " << nr << endl;
			
			Detect::hsv_hist(getVisionData()->contentData.box[nr], &peak, 0, &avg);
			
			//cout << "diffpeak: " << peak << endl;
			
			if (getVisionData()->contentData.ball_hue_low[l] > peak) getVisionData()->contentData.ball_hue_low[l] = peak;
			if (getVisionData()->contentData.ball_hue_high[l] < peak) getVisionData()->contentData.ball_hue_high[l] = peak;
		}
	}
	
	for(int l = 0; l < getVisionData()->contentData.diff_colors; l++){
		cout  << "nr: " << l << " low: " << getVisionData()->contentData.ball_hue_low[l] << " high: " << getVisionData()->contentData.ball_hue_high[l] << endl;
		if(getVisionData()->contentData.ball_hue_high[l] - getVisionData()->contentData.ball_hue_low[l] > 100){
			int temp_high = getVisionData()->contentData.ball_hue_high[l];
			getVisionData()->contentData.ball_hue_high[l] = getVisionData()->contentData.ball_hue_low[l];
			getVisionData()->contentData.ball_hue_low[l] = temp_high;
		} 
		
		getVisionData()->contentData.ball_hue_low[l]-=5;
		getVisionData()->contentData.ball_hue_high[l]+=5;
		
		if(getVisionData()->contentData.ball_hue_low[l] < 0){
			getVisionData()->contentData.ball_hue_low[l] += 255;
		}
		

		cout  << "nr: " << l << " low: " << getVisionData()->contentData.ball_hue_low[l] << " high: " << getVisionData()->contentData.ball_hue_high[l] << endl;
	}

	getVisionData()->contentData.empty_hue_high+= 3;
	getVisionData()->contentData.empty_hue_low-= 3;
	//border_saturation+= 2;*/
	cout << "high hue: " << getVisionData()->contentData.empty_hue_high << ", low hue: " << getVisionData()->contentData.empty_hue_low << ",satur: " << getVisionData()->contentData.border_saturation << endl;
}


bool xComp(cv::Point2f i, cv::Point2f j) { return (i.x<j.x); }
bool yComp(cv::Point2f i, cv::Point2f j) { return (i.y<j.y); }

/*
 * used to find the X median. This function is only to be used by Initialize::fiducials()
 *
 */
inline float medianX(std::vector<cv::Point2f> points){
       	std::vector<cv::Point2f>::iterator n = points.begin()+points.size()/2;
	if(points.size()%2 == 0) {
		nth_element(points.begin(), n, points.end(), xComp);
		return ((n->x + (n+1)->x)/2.0);
	} else {
	      nth_element(points.begin(), n, points.end(), xComp);
        	return n->x;
	}
}

/*
 * used to find the Y median. This function is only to be used by Initialize::fiducials()
 *
 */
inline float medianY(std::vector<cv::Point2f> points){
        std::vector<cv::Point2f>::iterator n = points.begin()+points.size()/2;
        if(points.size()%2 == 0) {
                nth_element(points.begin(), n, points.end(), yComp);
                return ((n->y + (n+1)->y)/2.0);
        } else {
                nth_element(points.begin(), n, points.end(), yComp);
                return n->y;
        }
}

/* This function is used to find the fiducials and but their coordinates in the coordinate transformer, so it can be used.
 * All three fiducials must be visible in order to make this function work. If the results make no sense make sure there are no other round objects visible under the camara.
 *
 * @param int $measurements
 * The amount of times the function searches for fiducials. When they are all found the median is taken to improve precision.
 *
 * @param int $maxErrors
 * specifies the amount of errors before returning false to indicate failure
*/
bool Initialize::fiducials(int measurements, int maxErrors){
	cam->set_auto_white_balance(true);

	//exposure_value = 0.015;
	//cam->set_exposure(exposure_value);

	fidDetector->minRad = 16;
	fidDetector->maxRad = 24;

	fidDetector->lowThreshold = 35;
	fidDetector->highThreshold = 140;
	fidDetector->circleThreshold = 100;
	
	
	//markers.clear();

		
	//vector<Point2f> markers;
	
	vector<Point2f> fid1_buffer;
	vector<Point2f> fid2_buffer;
	vector<Point2f> fid3_buffer;

	int measurementCount = 0;
	int failCount = 0;
	Mat frame(cam->get_img_height(), cam->get_img_width(), cam->get_img_format());
	while(measurementCount<measurements && (maxErrors<0 || failCount<maxErrors)){
		
		cam->get_frame(&frame);
		rectifier->rectify(frame, frameRectified);
		cv::Mat gray;
		cv::cvtColor(frameRectified, gray, CV_BGR2GRAY);

		std::vector<cv::Point2f> fiducialPoints;
		fidDetector->detect(gray, fiducialPoints);
		if(fiducialPoints.size() == 3) {
			measurementCount++;
			Crate::order(fiducialPoints);
			fid1_buffer.push_back(fiducialPoints[0]);
			fid2_buffer.push_back(fiducialPoints[1]);
			fid3_buffer.push_back(fiducialPoints[2]);
			cout << measurementCount << endl;
		} else {
			failCount++;
		}
	}
	
	if(measurementCount == measurements) {
		cv::Point2f fid1(medianX(fid1_buffer), medianY(fid1_buffer));
		cv::Point2f fid2(medianX(fid2_buffer), medianY(fid2_buffer));
		cv::Point2f fid3(medianX(fid3_buffer), medianY(fid3_buffer));		
		
		markers.push_back(point2f(fid1.x, fid1.y));
		markers.push_back(point2f(fid2.x, fid2.y));
		markers.push_back(point2f(fid3.x, fid3.y));

		//markers.push_back(point2f(10, 10));
		//markers.push_back(point2f(0, 10));
		//markers.push_back(point2f(0, 0));

		std::cout << "x1: " << markers[0].x << ", y1: " << markers[0].y << std::endl;
		std::cout << "x2: " << markers[1].x << ", y2: " << markers[1].y << std::endl;
		std::cout << "x3: " << markers[2].x << ", y3: " << markers[2].y << std::endl;

		getVisionData()->envData.cordTransformer->set_fiducials_pixel_coordinates(markers);

		// Determine mean deviation
		double totalDistance = 0;
		for(vector<Point2f>::iterator it=fid1_buffer.begin(); it!=fid1_buffer.end(); ++it) totalDistance += Crate::distance(fid1, *it);
		for(vector<Point2f>::iterator it=fid2_buffer.begin(); it!=fid2_buffer.end(); ++it) totalDistance += Crate::distance(fid2, *it);
		for(vector<Point2f>::iterator it=fid3_buffer.begin(); it!=fid3_buffer.end(); ++it) totalDistance += Crate::distance(fid3, *it);
		
		float meanDeviation = totalDistance / double(fid1_buffer.size()+fid2_buffer.size()+fid3_buffer.size());
		ROS_INFO("Calibration markers updated.\nMeasured: %d Failed: %d Mean deviation: %f", measurements, failCount, meanDeviation);
		
		Point temp1, temp2, temp3;
		
		temp1.x = markers[0].x;
		temp1.y = markers[0].y;
		temp2.x = markers[1].x;
		temp2.y = markers[1].y;
		temp3.x = markers[2].x;
		temp3.y = markers[2].y;
		
		line(frameRectified, temp1, temp2, CV_RGB(255, 0, 255), 1, 8, 0);
		line(frameRectified, temp1, temp3, CV_RGB(0, 255, 255), 1, 8, 0);
		line(frameRectified, temp2, temp3, CV_RGB(255, 255, 0), 1, 8, 0);

		imshow("fiducial test, press key to continue", frameRectified);
		waitKey(0);
		destroyWindow("fiducial test, press key to continue");
		
		
		return true;
		
	}
	return false;
}
