#include "IdentifyBall.h"
#include <iostream>
#include <opencv2/opencv.hpp>
#include <vector>
#include <string>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/cvconfig.h>
#include "ros/ros.h"
// ZED includes
#include <sl/Camera.hpp>
#include "rc_cv/cv_data.h"
//#include <tf/transform_listener.h>

using namespace std;
using namespace cv;
using namespace sl;

#define pi 3.1415926

cv::Mat slMat2cvMat(sl::Mat& input);
#ifdef HAVE_CUDA
cv::cuda::GpuMat slMat2cvMatGPU(Mat& input);
#endif // HAVE_CUDA

IdentifyBall cat;

void transformPose(sl::Transform &pose, float tx);

int main(int argc, char** argv)
{
	ros::init(argc, argv, "CV_publisher");
	ros::NodeHandle n;

	ros::Publisher cv_pub = n.advertise<rc_cv::cv_data>("rc_cv", 1000);

	rc_cv::cv_data ang_data;
	// freq = 100hz
	ros::Rate r(100);


	namedWindow("Control",1);
	namedWindow("ROI_Control",1);
        //namedWindow("size",1);

	int iLowH = 89;
	int iHighH = 135;

	int iLowS = 51;
	int iHighS = 203;

	int iLowV = 0;
	int iHighV = 119;

	//ï¿œï¿œï¿œï¿œï¿œï¿œï¿œï¿œï¿œï¿œ
	cvCreateTrackbar("LowH", "Control", &iLowH, 180); //É«ï¿œï¿œ (0 - 179)
	cvCreateTrackbar("HighH", "Control", &iHighH, 180);

	cvCreateTrackbar("LowS", "Control", &iLowS, 255); //ï¿œï¿œï¿œÍ¶ï¿œ (0 - 255)
	cvCreateTrackbar("HighS", "Control", &iHighS, 255);

	cvCreateTrackbar("LowV", "Control", &iLowV, 255); //ï¿œï¿œï¿œï¿œ (0 - 255)
	cvCreateTrackbar("HighV", "Control", &iHighV, 255);

        int ROI_iLowH = 85;
	int ROI_iHighH = 144;

	int ROI_iLowS = 45;
	int ROI_iHighS = 177;

	int ROI_iLowV = 0;
	int ROI_iHighV = 118;

        cvCreateTrackbar("ROI_LowH", "ROI_Control", &ROI_iLowH, 180); //É«ï¿œï¿œ (0 - 179)
	cvCreateTrackbar("ROI_HighH", "ROI_Control", &ROI_iHighH, 180);

	cvCreateTrackbar("ROI_LowS", "ROI_Control", &ROI_iLowS, 255); //ï¿œï¿œï¿œÍ¶ï¿œ (0 - 255)
	cvCreateTrackbar("ROI_HighS", "ROI_Control", &ROI_iHighS, 255);

	cvCreateTrackbar("ROI_LowV", "ROI_Control", &ROI_iLowV, 255); //ï¿œï¿œï¿œï¿œ (0 - 255)
	cvCreateTrackbar("ROI_HighV", "ROI_Control", &ROI_iHighV, 255);

        //int min_size=40;
        //int max_size=150;
        //cvCreateTrackbar("min_size", "size", &min_size, 600); 
	//cvCreateTrackbar("max_size", "size", &max_size, 600);

	// Create a ZED camera object
	Camera zed;

	sl::Pose zed_pose;


	// Set configuration parameters
	InitParameters init_params;
	init_params.camera_resolution = RESOLUTION::HD1080;
	init_params.camera_fps = 30;
	init_params.depth_mode = DEPTH_MODE::ULTRA;
	init_params.coordinate_units = UNIT::METER;
	if (argc > 1) init_params.input.setFromSVOFile(argv[1]);

	// Open the camera
	ERROR_CODE err = zed.open(init_params);
	if (err != ERROR_CODE::SUCCESS) {
		printf("%s\n", toString(err).c_str());
		zed.close();
		return 1; // Quit if an error occurred
	}
	
	

	// Set runtime parameters after opening the camera
	RuntimeParameters runtime_parameters;
	runtime_parameters.sensing_mode = SENSING_MODE::STANDARD;


	// Set exposure to 50% of camera framerate
	zed.setCameraSettings(VIDEO_SETTINGS::EXPOSURE, 50);
	// Set white balance to 4600K
	//zed.setCameraSettings(VIDEO_SETTINGS::WHITE_BALANCE, 4600);
	// Reset to auto exposure
	zed.setCameraSettings(VIDEO_SETTINGS::EXPOSURE, VIDEO_SETTINGS_VALUE_AUTO);


	// Prepare new image size to retrieve half-resolution images
	Resolution image_size = zed.getCameraInformation().camera_resolution;
	int new_width = image_size.width ;
	int new_height = image_size.height;
	

	Resolution new_image_size(new_width, new_height);

	// To share data between sl::Mat and cv::Mat, use slMat2cvMat()
	// Only the headers and pointer to the sl::Mat are copied, not the data itself
	sl::Mat image_zed(new_width, new_height, MAT_TYPE::U8_C4);
	cv::Mat frame = slMat2cvMat(image_zed);

#ifndef HAVE_CUDA // If no cuda, use CPU memory
	sl::Mat depth_image_zed(new_width, new_height, MAT_TYPE::U8_C4);
	cv::Mat depth_image_ocv = slMat2cvMat(depth_image_zed);
#else
	Mat depth_image_zed_gpu(new_width, new_height, MAT_TYPE::U8_C4, sl::MEM::GPU); // alloc sl::Mat to store GPU depth image
	cv::cuda::GpuMat depth_image_ocv_gpu = slMat2cvMatGPU(depth_image_zed_gpu); // create an opencv GPU reference of the sl::Mat
	cv::Mat depth_image_ocv; // cpu opencv mat for display purposes
#endif
	sl::Mat  image, depth, point_cloud;

	while (n.ok())
	{
		cat.countnum++;
		if (zed.grab(runtime_parameters) == ERROR_CODE::SUCCESS) {

			// Retrieve the left image, depth image in half-resolution
			zed.retrieveImage(image_zed, VIEW::LEFT, MEM::CPU, new_image_size);
#ifndef HAVE_CUDA 
			// retrieve CPU -> the ocv reference is therefore updated
			zed.retrieveImage(depth_image_zed, VIEW::DEPTH, MEM::CPU, new_image_size);
#else
			// retrieve GPU -> the ocv reference is therefore updated
			zed.retrieveImage(depth_image_zed_gpu, VIEW::DEPTH, MEM::GPU, new_image_size);
#endif
			// Retrieve the RGBA point cloud in half-resolution
			// To learn how to manipulate and display point clouds, see Depth Sensing sample
			zed.retrieveMeasure(point_cloud, MEASURE::XYZRGBA, MEM::CPU, new_image_size);
                      

                        //zed.getPosition(zed_pose, REFERENCE_FRAME::WORLD);

			// 获取相机中心与左眼之间的距离
			float translation_left_to_center = zed.getCameraInformation().calibration_parameters.T.x * 0.5f;
			// 检索姿势数据并将其转换为位于相机中心的新帧
			POSITIONAL_TRACKING_STATE tracking_state = zed.getPosition(zed_pose, REFERENCE_FRAME::WORLD);
			transformPose(zed_pose.pose_data, translation_left_to_center);

                        
			if (cat.countnum >= 1 && cat.countnum <= 5)
			{
				cv::Mat frameHSV, imgThresholded, blur;
				cv::Mat final = frame.clone();
				cv::Mat final1 = frame.clone();
                                cv::Rect roi;
                                roi.x=0;
                                roi.y=500;
                                roi.width=1920;
                                roi.height=450;
                                cv::Mat src=frame(roi).clone();
				blur = cat.picture(src, iLowH, iLowS, iLowV, iHighH, iHighS, iHighV);
				cv::imshow("erzhida", blur);//ï¿œï¿œï¿œï¿œcvï¿œÕŒä²»ï¿œï¿œÈ·

				vector<vector<Point>>contours = cat.filter_size(src,blur);

				int* min = cat.filter_again(contours,frame);

				cv::circle(final1, Point(min[0], min[1]), 5, Scalar(0, 255, 0), -1, 8);
				cv::circle(final1, Point(min[0], min[1]), 30, Scalar(0, 255, 0), 2, 8);

				imshow("qianwu",final1);

				cat.a[cat.countnum - 1] = min[0];
				cat.b[cat.countnum - 1] = min[1];

				cat.sum_x = cat.sum_x + min[0];
				cat.sum_y = cat.sum_y + min[1];
				if (cat.countnum == 5)
				{
					cat.open1 = 1;
					cat.newsum_x = cat.sum_x;
					cat.sum_x = 0;
					cat.newsum_y = cat.sum_y;
					cat.sum_y = 0;
				}//ï¿œï¿œroiï¿œï¿œï¿œØŽï¿œ
#ifdef HAVE_CUDA
// download the Ocv GPU data from Device to Host to be displayed
				depth_image_ocv_gpu.download(depth_image_ocv);
#endif
				//cv::imshow("Depth", depth_image_ocv);
				/*int x = image.getWidth() / 2;
				int y = image.getHeight() / 2;*/
				int x = min[0];
				int y = min[1];
				sl::float4 point_cloud_value;
				point_cloud.getValue(x, y, &point_cloud_value);
				if (std::isfinite(point_cloud_value.z)) {
					float distance = sqrt(point_cloud_value.x * point_cloud_value.x + point_cloud_value.y * point_cloud_value.y + point_cloud_value.z * point_cloud_value.z);
					cout << "Distance to Camera at {" << x << ";" << y << "}: " << distance << "mm" << endl;
				}
				else
					cout << "The Distance can not be computed at {" << x << ";" << y << "}" << endl;
				waitKey(3);
			}

			if (cat.open1 == 1)
			{
				///////////////////////////////////////////////////////////////////////////////////////////////
				cat.aver_x = cat.newsum_x / 5.0;
				cat.aver_y = cat.newsum_y / 5.0;

				int i = 0;
				Point2f aver;
				aver = Point2f(cat.aver_x, cat.aver_y);

				int num = 5;
				int flag = 0;
				float distance;
				for (i = 0; i < num; i++)
				{
					distance = pow((aver.x - cat.a[i]), 2) + pow((aver.y - cat.b[i]), 2);
					distance = sqrt(distance);
					if (distance < 40)
					{
						cat.c[flag] = cat.a[i];
						cat.d[flag] = cat.b[i];
						flag++;
					}

				}
				for (i = 0; i < flag; i++)
				{
					cat.new_sum_x = cat.new_sum_x + cat.c[i];
					cat.c[i] = 0;
					cat.new_sum_y = cat.new_sum_y + cat.d[i];
					cat.d[i] = 0;
				}
				cat.new_aver_x = cat.new_sum_x / flag;
				cat.new_aver_y = cat.new_sum_y / flag;
				num = 0;
				cat.new_sum_x = 0;
				cat.new_sum_y = 0;
				if(flag==0)
				{
					cat.countnum = 0;
				}
				flag = 0;
				int row;
				int col;
				row = cat.new_aver_x - 100;
				col = cat.new_aver_y - 100;
				if (col <= 0 || row <= 0)
				{
					row = 300;
					col = 300;
					cat.countnum = 0;
				}
				if (row > 1700 || col > 800)
				{
					row = 300;
					col = 300;
					cat.countnum = 0;
				}

				//cout << new_aver_x <<','<< new_aver_y << endl;

				cat.sss1 = row;
				cat.sss2 = col;
				cat.new_aver_x = 0;
				cat.new_aver_y = 0;
				cat.open1 = 0;
			}

			if (cat.countnum >= 6 && cat.countnum <= 15)
			{
				cout << cat.countnum << endl;
				cout << frame.cols << frame.rows << endl;
				cv::Mat ROI = frame(cv::Rect(cat.sss1, cat.sss2, 200, 200));
				cout <<"jianyan"<< endl;
				//imshow("ROI", ROI);
				cv::Mat ROI_frameHSV, ROI_imgThresholded, ROI_blur;
				cv::Mat final2 = frame.clone();

				ROI_blur= cat.picture_ROI(ROI,ROI_iLowH,ROI_iLowS,ROI_iLowV,ROI_iHighH,ROI_iHighS,ROI_iHighV);
				cv::imshow("ROI_erzhi",ROI_blur);

				vector<vector<Point>>ROI_contours = cat.filter_size_ROI(ROI,ROI_blur);

				int* min_ROI = cat.filter_again_ROI(ROI_contours,ROI);
				
				if (cat.countnum == 15) cat.countnum = 0;

#ifdef HAVE_CUDA
				// download the Ocv GPU data from Device to Host to be displayed
				depth_image_ocv_gpu.download(depth_image_ocv);
#endif

				
				int x = min_ROI[0]+cat.sss1;
				int y = min_ROI[1]+cat.sss2;
				sl::float4 point_cloud_value;
				point_cloud.getValue(x, y, &point_cloud_value);
				double distance_ROI;
				if (std::isfinite(point_cloud_value.z)) {
					double distance = sqrt(point_cloud_value.x * point_cloud_value.x + point_cloud_value.y * point_cloud_value.y + point_cloud_value.z * point_cloud_value.z);
					distance_ROI=distance;
					cout << "Distance to Camera at {" << x << ";" << y << "}: " << distance_ROI << "m" << endl;
				}
				else
					cout << "The Distance can not be computed at {" << x << ";" << y << "}" << endl;


				cout << min_ROI[0] << "," << min_ROI[1] << endl;
				circle(ROI, Point(min_ROI[0] , min_ROI[1]), 5, Scalar(0, 255, 0), -1, 8);
				circle(ROI, Point(min_ROI[0] , min_ROI[1]), 30, Scalar(0, 255, 0), 2, 8);

				cv::circle(final2, Point(min_ROI[0]+cat.sss1, min_ROI[1]+cat.sss2), 5, Scalar(0, 255, 0), -1, 8);
				cv::circle(final2, Point(min_ROI[0]+cat.sss1, min_ROI[1]+cat.sss2), 30, Scalar(0, 255, 0), 2, 8);
				
				float*angle_rc=cat.rc_angle_re(min_ROI[0]+cat.sss1,min_ROI[1]+cat.sss2,distance_ROI);
				if(min_ROI[0]==200)
				{
					angle_rc[0]=0;
					angle_rc[1]=0;
                                        //angle_rc[2]=0;
                                        //angle_rc[3]=0;

				}
				
			
			
				cout << "yaw " << angle_rc[0] <<"pitch " << angle_rc[1]<<endl;
                                
				imshow("houshi", final2);
				imshow("ROI",ROI);
				

				ang_data.ang_pitch = angle_rc[1];
				ang_data.ang_yaw = angle_rc[0];
				cv_pub.publish(ang_data);
				waitKey(3);
			}



		}
		r.sleep();
	}
#ifdef HAVE_CUDA
	// sl::Mat GPU memory needs to be free before the zed
	depth_image_zed_gpu.free();
#endif
	zed.close();
	return 0;
}

	void transformPose(sl::Transform &pose, float tx){
	Transform transform_;
	transform_.setIdentity();

	// 沿 X 轴按 tx 平移跟踪帧
	transform_.tx = tx;
	// Pose（新参考帧） = M.inverse（） * Pose （相机帧） * M，其中 M 是两帧之间的变换
	pose = Transform::inverse(transform_) * pose * transform_;
	}

int getOCVtype(sl::MAT_TYPE type) {
	int cv_type = -1;
	switch (type) {
	case MAT_TYPE::F32_C1: cv_type = CV_32FC1; break;
	case MAT_TYPE::F32_C2: cv_type = CV_32FC2; break;
	case MAT_TYPE::F32_C3: cv_type = CV_32FC3; break;
	case MAT_TYPE::F32_C4: cv_type = CV_32FC4; break;
	case MAT_TYPE::U8_C1: cv_type = CV_8UC1; break;
	case MAT_TYPE::U8_C2: cv_type = CV_8UC2; break;
	case MAT_TYPE::U8_C3: cv_type = CV_8UC3; break;
	case MAT_TYPE::U8_C4: cv_type = CV_8UC4; break;
	default: break;
	}
	return cv_type;
}

/**
* Conversion function between sl::Mat and cv::Mat
**/
cv::Mat slMat2cvMat(sl::Mat& input) {
	// Since cv::Mat data requires a uchar* pointer, we get the uchar1 pointer from sl::Mat (getPtr<T>())
	// cv::Mat and sl::Mat will share a single memory structure
	return cv::Mat(input.getHeight(), input.getWidth(), getOCVtype(input.getDataType()), input.getPtr<sl::uchar1>(MEM::CPU), input.getStepBytes(sl::MEM::CPU));
}

#ifdef HAVE_CUDA
/**
* Conversion function between sl::Mat and cv::Mat
**/
cv::cuda::GpuMat slMat2cvMatGPU(Mat& input) {
	// Since cv::Mat data requires a uchar* pointer, we get the uchar1 pointer from sl::Mat (getPtr<T>())
	// cv::Mat and sl::Mat will share a single memory structure
	return cv::cuda::GpuMat(input.getHeight(), input.getWidth(), getOCVtype(input.getDataType()), input.getPtr<sl::uchar1>(MEM::GPU), input.getStepBytes(sl::MEM::GPU));
}
#endif

/**
* This function displays help in console
**/
void printHelp() {
	std::cout << " Press 's' to save Side by side images" << std::endl;
	std::cout << " Press 'p' to save Point Cloud" << std::endl;
	std::cout << " Press 'd' to save Depth image" << std::endl;
	std::cout << " Press 'm' to switch Point Cloud format" << std::endl;
	std::cout << " Press 'n' to switch Depth format" << std::endl;
}
