#include "act_D455.h"

ActD455::ActD455():
align(RS2_STREAM_COLOR), 
pointCloud(new pcl::PointCloud<pcl::PointXYZ>), 
center_point(0,0,0)
{    
	data = new unsigned short[640 * 480];
}

void ActD455::Init(void)
{
	#ifdef RECORD_VIDEO
	cfg.enable_record_to_file("a.bag");
	#endif
	
	#ifndef IFCAMERA
	// cfg.enable_device_from_file("../src/cv/lib/d455/k2.bag");
	cfg.enable_device_from_file(LOAD);

	#else
	//pipe.stop();
	auto devices = ctx.query_devices();//获取设备列表
	device_count = devices.size();					//获取传感器连接数量
	cout << "device_count  " << device_count << endl;
	while (!device_count)							//持续访问，直到有设备连接为止
	{
		devices = ctx.query_devices();
		device_count = devices.size();
		cout<<"device_count:" << device_count << endl;
		cout <<"No device detected. Is it plugged in?\n";
		// return;
	}
	// Get the first connected device
	auto dev = devices[0];

	// 打开指定序列号的相机
	// const std::string D435_SERIAL_NUMBER_1 = "819612070740"; // d435
	// const std::string D435_SERIAL_NUMBER_1 = "309622301037"; // d455
	// cfg.enable_device(D435_SERIAL_NUMBER_1);

	std::ifstream file("../src/camera_dk/modejson/d455.json");  //加载相机参数设置文件，保存相对路径
	if (file.good())
	{
		std::string str((std::istreambuf_iterator<char>(file)), std::istreambuf_iterator<char>());

		auto prof = cfg.resolve(pipe);
		if (auto advanced_mode_dev = prof.get_device().as<rs2::serializable_device>())
		{
			advanced_mode_dev.load_json(str);
		}
		else
		{
			cout << "Current device doesn't support advanced-mode!\n";
			return;
		}
	}
	cout << "reading setting mode is OK" << endl; 

	//-- Add desired streams to configuration
	cfg.enable_stream(RS2_STREAM_DEPTH, DEPTH_WIDTH, DEPTH_HEIGHT, RS2_FORMAT_Z16, 60);         //使能深度相机输入流
	//配置彩色图像流：分辨率640*480，图像格式：BGR， 帧率：30帧/秒
	cfg.enable_stream(RS2_STREAM_COLOR, IMAGE_WIDTH, IMAGE_HEIGHT, RS2_FORMAT_BGR8, 60);		//使能彩色相机输入流
	#endif

    cout << "enable stream Ok ..\n";
  	rs2_sensor *depthSensor;
    rs2::pipeline_profile selection = pipe.start(cfg);
    cout << "start cfg Ok ..\n";
	
	#ifdef IFCAMERA
	auto colorSensors = selection.get_device().query_sensors()[1];
	// colorSensors.set_option(RS2_OPTION_ENABLE_AUTO_EXPOSURE, 0); //zero is turn off
	// colorSensors.set_option(RS2_OPTION_EXPOSURE, 650);
	// colorSensors.set_option(RS2_OPTION_GAIN, 300);    										//设置曝光时间
	// colorSensors.set_option(RS2_OPTION_WHITE_BALANCE, 4500);									//设置白平衡
	cout << "set exposure OK ..\n";
	#endif

    //-- Wait for frames from the camera to settle
	for (int i = 0; i < 5; i++)
	{
		//Drop several frames for auto-exposure
		frameSet = pipe.wait_for_frames();
	}

    rs2::video_frame colorFrame = frameSet.get_color_frame();									//获取颜色帧
	rs2::depth_frame alignedDepthFrame = frameSet.get_depth_frame();							//获取深度帧

    rs2::stream_profile dprofile = alignedDepthFrame.get_profile();								//读取对齐深度帧参数
    rs2::stream_profile cprofile = colorFrame.get_profile();									//读取对齐颜色帧参数
    
    rs2::video_stream_profile cvsprofile(cprofile);
    color_intrin = cvsprofile.get_intrinsics();													//获取彩色相机内参

    rs2::video_stream_profile dvsprofile(dprofile);
    depth_intrin = dvsprofile.get_intrinsics();													//获取深度相机内参

    depth2color_extrin = dprofile.get_extrinsics_to(cprofile);									//获取深度向彩色相机外参
    color2depth_extrin = cprofile.get_extrinsics_to(dprofile);									//获取彩色向深度相机外参
    cout << "Get intrinics ..\n";

    cout << "Camera D455 init done ..\n";
	cameraFlag = true;
}


void ActD455::Update()
{
	TickMeter tk;
	tk.start();
    frameSet = pipe.wait_for_frames();											//等待相机数据
	frameSet = align.process(frameSet);

    rs2::video_frame colorFrame = frameSet.get_color_frame();					//获取彩色帧
    rs2::depth_frame alignedDepthFrame = frameSet.get_depth_frame();			//获取深度帧
	
    srcImage = cv::Mat(cv::Size(IMAGE_WIDTH, IMAGE_HEIGHT), CV_8UC3, (void*)colorFrame.get_data(), cv::Mat::AUTO_STEP);				//将彩色图像数据存储在Mat矩阵中
    depthImage = cv::Mat(cv::Size(DEPTH_WIDTH, DEPTH_HEIGHT), CV_16UC1, (void*)alignedDepthFrame.get_data(), cv::Mat::AUTO_STEP);	//将深度图像数据存储在Mat矩阵中
	memcpy(data, alignedDepthFrame.get_data(), DEPTH_WIDTH * DEPTH_HEIGHT * 2);

	tk.stop();
	cout << "update time:" << tk.getTimeMilli() << endl;	
}

//======================================================
// getColorTexture
// - Function is utilized to extract the RGB data from
// a single point return R, G, and B values.
// Normals are stored as RGB components and
// correspond to the specific depth (XYZ) coordinate.
// By taking these normals and converting them to
// texture coordinates, the RGB components can be
// "mapped" to each individual point (XYZ).
//======================================================
std::tuple<uint8_t, uint8_t, uint8_t> ActD455::GetColorTexture(rs2::video_frame texture, rs2::texture_coordinate Texture_XY)
{
	//-- Get Width and Height coordinates of texture
	int width = texture.get_width();  // Frame width in pixels
	int height = texture.get_height(); // Frame height in pixels

	//-- Normals to Texture Coordinates conversion
	int xValue = min(max(int(Texture_XY.u * width + .5f), 0), width - 1);
	int yValue = min(max(int(Texture_XY.v * height + .5f), 0), height - 1);

	int bytes = xValue * texture.get_bytes_per_pixel();   // Get # of bytes per pixel
	int strides = yValue * texture.get_stride_in_bytes(); // Get line width in bytes
	int textIndex = (bytes + strides);

	const auto newTexture = reinterpret_cast<const uint8_t*>(texture.get_data());

	//-- RGB components to save in tuple
	int newText1 = newTexture[textIndex];
	int newText2 = newTexture[textIndex + 1];
	int newText3 = newTexture[textIndex + 2];

	return std::tuple<uint8_t, uint8_t, uint8_t>(newText1, newText2, newText3);
}

// Mat ActD455::GetSrcImage()
// {
// 	return srcImage;
// }

void ActD455::release(void)
{
	
}

void ActD455::GetCameraParam(rs2_intrinsics& _color_intrin, rs2_intrinsics& _depth_intrin, rs2_extrinsics& _depth2color_extrin, rs2_extrinsics& _color2depth_extrin, uint16_t* _data)
{
    _color_intrin = color_intrin;
    _depth_intrin = depth_intrin;
    _depth2color_extrin = depth2color_extrin;
    _color2depth_extrin = color2depth_extrin;
}

Point3f ActD455::getPointFromPixel(Point2f Point2d)
{
	float colorPixel[2];
	colorPixel[0] = Point2d.x;
	colorPixel[1] = Point2d.y;
	float point[3];
    // float depthPixel[2];
    // rs2_project_color_pixel_to_depth_pixel(depthPixel,data,1.0f,0.5,5,&depth_intrin,&color_intrin,
    //                                         &color2depth_extrin,&depth2color_extrin,colorPixel);
    
    // int raw = depthPixel[1];
    // int clo = depthPixel[0];

    // float depth = averange_distance(raw,clo)/1000;
    // rs2_deproject_pixel_to_point(point , &depth_intrin, depthPixel ,depth);



	float depth = depthImage.ptr<ushort>((int)colorPixel[1])[(int)colorPixel[0]] / 1000.f;
	// float depth = averange_distance(colorPixel[1], colorPixel[0]);
    rs2_deproject_pixel_to_point(point, &color_intrin, colorPixel, depth);//2d 到 3d
	if(point[2] < 0.1)
	{
		point[0] = 0.f;
		point[1] = 0.f;
		point[2] = 0.f;
	}
    
	return(Point3f(point[0], point[1], point[2]));
}

// 适用于给定深度和像素坐标的三维坐标获取
Point3f ActD455::getPointFromPixel(Point2f Point2d, float depth)
{
	float colorPixel[2];
	colorPixel[0] = Point2d.x;
	colorPixel[1] = Point2d.y;
	float point[3];
    
    rs2_deproject_pixel_to_point(point, &color_intrin, colorPixel, depth);//2d 到 3d
    
	return(Point3f(point[0], point[1], point[2]));
}

bool ActD455::PointCloudGenerate(cv::Rect roiRect) {
	
	if (roiRect.width * roiRect.height <= 10) {
        cout<< " Not have enough roiRect: in function pointCloudGenerate() " << endl;
        return false;
    } else {
		cout << "roiSquare = " << roiRect.width * roiRect.height << endl;
	}
	pointCloud->points.clear();
	Point2f source_point2d;
	Point3f target_point3d;
	PointType pclpoint;

	int gap_val_width = int(roiRect.width / CLOUD_GENERATE_THRESHOULD + 1);
    int gap_val_height = int(roiRect.height / CLOUD_GENERATE_THRESHOULD + 1);
	
    // 此处进行了简单的下采样，将点云数量控制在10000个以下
	for (int i = roiRect.y; i < (roiRect.y + roiRect.height); i = i + gap_val_height) {
		for (int j = roiRect.x; j < (roiRect.x + roiRect.width); j = j + gap_val_width) {
			source_point2d.x = j;
			source_point2d.y = i;
			target_point3d = getPointFromPixel(source_point2d);
			pclpoint.x = target_point3d.x;
			pclpoint.y = target_point3d.y;
			pclpoint.z = target_point3d.z;

			if (pclpoint.x != 0 && pclpoint.y != 0 && pclpoint.z != 0)
			{
				pointCloud->points.push_back(pclpoint);
			}
		}
	}
	// 此处取出了中心点的坐标
	source_point2d.x = (int)roiRect.x + roiRect.width/2;
	source_point2d.y = (int)roiRect.y + roiRect.height/2;
	target_point3d = getPointFromPixel(source_point2d, 1.0);
	center_point.x = target_point3d.x;
	center_point.y = target_point3d.y;
	center_point.z = target_point3d.z;

	if (pointCloud->points.size()>0) {
		cout << "The size of pointCloud is :\t" << pointCloud->points.size() << endl;
        return true;
    } else {   
        cout<< " Not get enough roiXYs: in function pointCloudGenerate() " << endl;
    }

	return false;

}

//取一个3*3的矩阵减小误差
double ActD455::averange_distance(int i,int j)
{
    if(i==0||j==0)
    {
        return false;
    }
    int count=0;
    double sum=0;
    double mid_sum=0;
    double a=0;
    for(int row=i-1;row<=i+1;row++)
    {
        for(int col=j-1;col<=j+1;col++)
        {
            a=depthImage.ptr<ushort>(row)[col] / 1000.f;
            if(a>=0.3)
            {
            sum=a+sum;
            count++;
            }
        }
    }
    double averange_sum=sum/(double)count;
    double error_range=averange_sum/30.0;
    double real_error=0;
    sum=0;count=0;
    for(int row=i-1;row<=i+1;row++)
    {
        for(int col=j-1;col<=j+1;col++)
        {
            mid_sum=depthImage.ptr<ushort>(row)[col] / 1000.f;
            real_error=fabs(mid_sum-averange_sum);
            if(real_error<=error_range)
            {
            sum=sum+mid_sum;
            count++;
            }
        }
    }

    double distance=sum/(double)count;
    if(count>=5)
    {
        return distance;
    }
    else
    {
        return 0;
    }

}

float ActD455::depth_correct_Delta(float distance)
{
	float error = corrcetA_1 * distance * distance + corrcetB_1 * distance + corrcetC_1;
	return error;
}

float ActD455::depth_correct_2(float distance)
{
	float error = corrcetA_2 * distance * distance + corrcetB_2 * distance + corrcetC_2;
	return error;
}

ActD455::~ActD455()
{    
	delete[] data;
	pipe.stop(); 
}