#include <opencv2/opencv.hpp>
#include <iostream> 
#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <csignal>
#include <sensor_msgs/PointCloud2.h>
#include <pcl/point_types.h>     
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/filters/passthrough.h>
#include <pcl/common/transforms.h>
#include <geometry_msgs/PoseStamped.h>


using namespace cv;
using namespace std;

Eigen::Matrix3d K;
Eigen::Matrix3d K_inverse;
Eigen::Quaterniond  quad(1,0,0,0);
Eigen::Vector3d p0(0,0,0);
double motor_yaw=0; //TODO

// bool flag = true;
bool camera_flag = false;
bool image_flag = false;
bool pos_flag = false;
bool yaw_flag = false;
ros::Publisher cloud_visual_pub;
// ros::Publisher trans_cloud_pub;
// ros::Publisher passz_cloud_pub;
// ros::Publisher voxel_cloud_pub;

ros::Time time_get_yaw;
ros::Time time_get_pos;
ros::Time time_get_img;

int AGENT_ID= 0;
int COLOR_ID=0;
double init_x=0;
double init_y=0;
double init_z=0;

Mat img;

void PclPassz(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_in, pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_out, double limL, double limH)
{
	pcl::PassThrough<pcl::PointXYZ> pass;
	pass.setInputCloud (cloud_in);
    pass.setFilterFieldName ("z");// 滤波字段设置为z轴方向
    pass.setFilterLimits (limL, limH); 
	//设置保留范围内还是过滤掉范围内, 默认为false,可以注释掉。true为过滤掉范围内的，false为过滤掉范围外的
    // pass.setFilterLimitsNegative (false); 
    pass.filter (*cloud_out);
}


void PclVoxel(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_in, pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_out,double resolution)
{
	pcl::VoxelGrid<pcl::PointXYZ> sorr;
	sorr.setInputCloud(cloud_in);
	sorr.setLeafSize(resolution, resolution, resolution);
	sorr.filter(*cloud_out);
}


void cameraCallback(sensor_msgs::CameraInfo msg)
{
	if(camera_flag){
		return;
	}

	Eigen::Matrix3f tmp;
	tmp<<msg.K[0],msg.K[1],msg.K[2],
		msg.K[3],msg.K[4],msg.K[5],
		msg.K[6],msg.K[7],msg.K[8];
	K=tmp.cast<double>();
	K_inverse = K.inverse();
	camera_flag = true;
}


void imageCallback(const sensor_msgs::ImageConstPtr& msg)
{
	image_flag = true;

	time_get_img = msg->header.stamp;
	// ROS_INFO(" [CONVERT_POINT]: image_in time: %f ======== ",(time_start-msg->header.stamp).toSec());

	// uncomment if Camera is Kinect (the encoding is 32FC1)
	cv_bridge::CvImagePtr cv_ptr = cv_bridge::toCvCopy(msg,sensor_msgs::image_encodings::TYPE_32FC1);
	// uncomment if Camera is D435 (the encoding is 16UC1)	
	// cv_bridge::CvImagePtr cv_ptr = cv_bridge::toCvCopy(msg,sensor_msgs::image_encodings::TYPE_16UC1);
	img=cv_ptr->image;
}


void positionCallback(const geometry_msgs::PoseStamped& msg)
{
	pos_flag = true;
	time_get_pos = msg.header.stamp; // TEST
	p0(0) = msg.pose.position.x + init_x;
	p0(1) = msg.pose.position.y + init_y;
	p0(2) = msg.pose.position.z + init_z;
	quad.x() = msg.pose.orientation.x;
	quad.y() = msg.pose.orientation.y;   
	quad.z() = msg.pose.orientation.z;
	quad.w() = msg.pose.orientation.w;
}


void gimbal_yaw_angleCallback(const geometry_msgs::PoseStamped& msg)
{
	yaw_flag = true;
	time_get_yaw = msg.header.stamp; // TEST
	motor_yaw = msg.pose.position.x;
}


int main(int argc, char** argv) 
{
    ros::init(argc, argv, "convert_pointcloud");
    ros::NodeHandle nh("~");

	// 无人机编号与初始位置 在launch中设置
	nh.getParam("AGENT_ID",AGENT_ID);
	nh.getParam("x",init_x);
	nh.getParam("y",init_y);
	nh.getParam("z",init_z);

	ROS_INFO("init_x: %f init_y: %f init_z: %f",init_x,init_y,init_z);
    
	ros::Subscriber position_sub =  nh.subscribe("/mavros/local_position/pose", 1, positionCallback);  //读取无人机位置
	ros::Subscriber gimbal_angle_sub = nh.subscribe("/iris/gimbal_yaw_angle", 1, gimbal_yaw_angleCallback);  //读取相机转角
	ros::Subscriber camera_sub = nh.subscribe("/depth/camera_info", 1, cameraCallback);  //读取深度相机信息
	
	image_transport::ImageTransport it(nh);
	image_transport::Subscriber depth_sub = it.subscribe("/depth/image_raw", 1, imageCallback);
	
	cloud_visual_pub=nh.advertise<sensor_msgs::PointCloud2>("/convert_pointcloud", 1, true);
	// trans_cloud_pub= nh.advertise<sensor_msgs::PointCloud2>("/trans_cloud", 1, true);
	// passz_cloud_pub= nh.advertise<sensor_msgs::PointCloud2>("/passz_cloud", 1, true);
	// voxel_cloud_pub= nh.advertise<sensor_msgs::PointCloud2>("/voxel_cloud", 1, true);

	//TEST
	// int count = 0;
	// double total_time1 = 0;
	// double total_time2 = 0;
	// double total_time3 = 0;
	while (ros::ok())
	{
		ros::spinOnce();
		if((!camera_flag)||(!image_flag)||(!pos_flag)||(!yaw_flag)){
			// cout << "camera_flag" << camera_flag << endl;
			// cout << "image_flag" << image_flag << endl;
			// cout << "pos_flag" << pos_flag << endl;
			// cout << "yaw_flag" << yaw_flag << endl;
			continue;
		}

		// ros::Time time_start = ros::Time::now();

		// uncomment if Camera is Kinect (the encoding is 32FC1)
		double depth_scale=1;
		pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_in (new pcl::PointCloud<pcl::PointXYZ>);
		cloud_in->width = img.cols;
		cloud_in->height = img.rows;
		for (int m = 0; m < img.rows; m++)
		{
			for (int n=0; n < img.cols; n++)
			{
				double d = img.at<float_t>(m,n);
				pcl::PointXYZ p;
				if(isnan(d))
				{
					// continue;
					d = 0;
				}

				double Zc=depth_scale*d;

				Eigen::Vector3d pixel(n,m,1);
				Eigen::Vector3d coord=Zc*K_inverse*pixel;
				p.x=coord(0);
				p.y=coord(1);
				p.z=coord(2);
				cloud_in->points.push_back(p);
			}
		}

		// uncomment if Camera is D435 (the encoding is 16UC1)
		/**
		double depth_scale=1;

		pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud1 (new pcl::PointCloud<pcl::PointXYZRGB>);
		for (int m = 0; m < img.rows; m++)
		{
			for (int n=0; n < img.cols; n++)
			{
				double d = img.at<uint16_t>(m,n)*0.001;//TODO
				pcl::PointXYZRGB p;
				if(d==0.0)
				{
					p.r=255;
					p.g=0;
					p.b=0;
					d=7;
				}
				else{
					p.r = 0;
					p.g=255;
					p.b = 0;
				}
				double Zc=depth_scale*d;

				Eigen::Vector3d pixel(n,m,1);
				Eigen::Vector3d coord=Zc*K.inverse()*pixel;
				p.x=coord(0);
				p.y=coord(1);
				p.z=coord(2);
				cloud1->points.push_back(p);
			}
		}
		**/

		// ros::Time time_conver_end = ros::Time::now();
		// ROS_INFO(" [CONVERT_POINT]: convert image time: %f ======== ",(time_conver_end-time_conver_image).toSec());

		Eigen::Quaterniond quad_sychronized = quad;
		Eigen::Matrix4d vehicle2ground = Eigen::Matrix4d::Identity();
		vehicle2ground.block(0, 0, 3, 3) = Eigen::Matrix3d(quad_sychronized);
		vehicle2ground(0, 3) = p0(0);
		vehicle2ground(1, 3) = p0(1);
		vehicle2ground(2, 3) = p0(2);
		Eigen::Matrix4d d435base2vehicle = Eigen::Matrix4d::Identity();
		Eigen::Quaterniond d435base2vehicle_rotate(cos(motor_yaw/2.0),0,0,sin(motor_yaw/2.0));
		d435base2vehicle.block(0, 0, 3, 3) = Eigen::Matrix3d(d435base2vehicle_rotate);
		d435base2vehicle(0, 3) = 0.0;
		d435base2vehicle(1, 3) = 0.0;
		d435base2vehicle(2, 3) = 0.10; // TODO：相机中心相对无人机中心的高度（位置），在iris.sdf中设定

		Eigen::Matrix4d d4352d435base = Eigen::Matrix4d::Identity();
		Eigen::Quaterniond d4352d435base_rotate(0.5,-0.5,0.5,-0.5);
		d4352d435base.block(0, 0, 3, 3) = Eigen::Matrix3d(d4352d435base_rotate);
		d4352d435base(0, 3) = 0.0;
		d4352d435base(1, 3) = 0.0;
		d4352d435base(2, 3) = 0.0;

		Eigen::Matrix4d transform = vehicle2ground*d435base2vehicle*d4352d435base;  //d435 to ground

		pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_trans(new pcl::PointCloud<pcl::PointXYZ>());
		pcl::transformPointCloud(*cloud_in, *cloud_trans, transform);

		// // Visualize
		// sensor_msgs::PointCloud2 trans_cloud_ros;
		// pcl::toROSMsg(*cloud_trans, trans_cloud_ros);
		// trans_cloud_ros.header.stamp = ros::Time::now();
		// trans_cloud_ros.header.frame_id = "map";
		// trans_cloud_pub.publish(trans_cloud_ros);

		// ros::Time time_begin_filter = ros::Time::now();
		// if(AGENT_ID == 1){
		// 	ROS_INFO(" [CONVERT_POINT]: time_begin_filter: %f ======== ",(time_begin_filter-time_begin_trans).toSec());
		// }

		// Filter
		pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_filter(new pcl::PointCloud<pcl::PointXYZ>());
		PclPassz(cloud_trans, cloud_filter, 0.3, 4.0); // TODO

		// // Visualize
		// pcl::toROSMsg(*cloud_filter, trans_cloud_ros);
		// trans_cloud_ros.header.stamp = ros::Time::now();
		// trans_cloud_ros.header.frame_id = "map";
		// passz_cloud_pub.publish(trans_cloud_ros);
		
		// Voxel
		pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_voxel(new pcl::PointCloud<pcl::PointXYZ>());
		PclVoxel(cloud_filter, cloud_voxel, 0.1);

		// // Visualize
		// pcl::toROSMsg(*cloud_voxel, trans_cloud_ros);
		// trans_cloud_ros.header.stamp = ros::Time::now();
		// trans_cloud_ros.header.frame_id = "map";
		// voxel_cloud_pub.publish(trans_cloud_ros);


		// if(AGENT_ID == 0){
		// 	ROS_INFO(" [CONVERT_POINT]: cloud_trans : %d  ",cloud_trans->points.size());
		// 	ROS_INFO(" [CONVERT_POINT]: cloud_filter: %d  ",cloud_filter->points.size());
		// 	ROS_INFO(" [CONVERT_POINT]: cloud_voxel : %d  ",cloud_voxel->points.size());

		// }

		

		// ros::Time time_begin_filter = ros::Time::now();
		// if(AGENT_ID == 2){
		// 	if(count >10){
		// 		total_time1 += (time_start - time_get_img).toSec();
		// 		total_time2 += (time_get_pos - time_get_img).toSec();
		// 		total_time3 += (time_get_yaw - time_get_img).toSec();
		// 	}
		// 	count++;
		// 	if(count == 50){
		// 		ROS_INFO(" [CONVERT_POINT]: time_start-time_get_img          %f ======== ", total_time1/40.0);
		// 		ROS_INFO(" [CONVERT_POINT]: time_get_pos-time_get_img        %f ======== ", total_time2/40.0);
		// 		ROS_INFO(" [CONVERT_POINT]: time_get_yaw-time_get_img        %f ======== ", total_time3/40.0);
		// 	}
		// }

		// cout << "cloud_in    "<<AGENT_ID<<"   " << cloud_in->points.size() << endl;
		// cout << "cloud_trans "<<AGENT_ID<<"   " << cloud_trans->points.size() << endl;
		// cout << "cloud_filter"<<AGENT_ID<<"   " << cloud_filter->points.size() << endl;
		// cout << "cloud_voxel "<<AGENT_ID<<"   " << cloud_voxel->points.size() << endl;

		// convert to ROS message and publish
		sensor_msgs::PointCloud2 cloud_visual;
		pcl::toROSMsg(*cloud_voxel, cloud_visual);

		// message publish should have the same time stamp
		cloud_visual.header.stamp = time_get_img; // TEST
		cloud_visual.header.frame_id = "map";
		// ros::Time time_end = ros::Time::now();
		cloud_visual_pub.publish(cloud_visual);

		// TEST
		// ROS_INFO(" [CONVERT_POINT]: filter time: %f ======== ",(time_end-time_conver_end).toSec());
		// ROS_INFO(" [CONVERT_POINT]: filter finish ======== ");
    }

	cv::destroyAllWindows();
    waitKey(0);
    return 0;
}