
/**
rs_d455设备
*/

#include <librealsense2/rs.hpp>
// C++
#include <iostream>
#include <vector>
#include <math.h>
// opencv
#include <opencv2/opencv.hpp>


int main(void){

    // The context represents the current platform with respect to connected devices
	rs2::context ctx;
	// Using the context we can get all connected devices in a device list
	rs2::device_list devices = ctx.query_devices();
	if (0 == devices.size())
	{
		std::cerr << "[Error] No device connected, please connect a RealSense device." << std::endl;
		return false;
	}
	else
	{
		for (auto&& dev : devices)// 打印所有已连接的设备序列号
		{
			std::string devSerial;
			devSerial = dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER);// Device serial number
			std::cout << "RealSense device serial number: " << devSerial << std::endl;
			
			std::cout << "Primary firmware version: " << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION) << std::endl;
			std::cout << "Recommended firmware version: " << dev.get_info(RS2_CAMERA_INFO_RECOMMENDED_FIRMWARE_VERSION) << std::endl;
		}
	}
	

    rs2::pipeline rs2Pipe_ = {};// Declare RealSense pipeline, encapsulating the actual device and sensors
	rs2::pipeline_profile rs2Selection_ = {};
    

	// 配置config
	rs2::config cfg;
	// 开启ir
	cfg.enable_stream(RS2_STREAM_ACCEL, RS2_FORMAT_MOTION_XYZ32F, 200);// 使能加速度
  	cfg.enable_stream(RS2_STREAM_GYRO, RS2_FORMAT_MOTION_XYZ32F, 200); // 使能角速度
	cfg.enable_stream(RS2_STREAM_INFRARED, 1, 640, 480, RS2_FORMAT_Y8, 30);// left IR
	cfg.enable_stream(RS2_STREAM_INFRARED, 2, 640, 480, RS2_FORMAT_Y8, 30);// right IR
	// 禁止深度
	cfg.disable_stream(RS2_STREAM_DEPTH);

	// 开启相机
	rs2Selection_ = rs2Pipe_.start(cfg);

	// 配置双目传感器
	rs2::sensor depth_sen;
	auto sensors = rs2Pipe_.get_active_profile().get_device().query_sensors();
	for (auto&& sensor : sensors) {
		if(std::string(sensor.get_info(RS2_CAMERA_INFO_NAME)) == "Stereo Module") {
			depth_sen = sensor;
		}
	}
	if (std::string(depth_sen.get_info(RS2_CAMERA_INFO_NAME)) != "Stereo Module") {
		throw;
	}

	depth_sen.set_option(RS2_OPTION_EMITTER_ENABLED, 0);// 关闭激光
	depth_sen.set_option(RS2_OPTION_EXPOSURE, 200);		// 设置曝光时间

    while(1){

        auto rs2_Frameset = rs2Pipe_.wait_for_frames();// Set of time synchronized frames, one from each active stream
		
		// Find and retrieve IMU data
		if (rs2::motion_frame accel_frame = rs2_Frameset.first_or_default(RS2_STREAM_ACCEL))
		{
			rs2_vector accel_sample = accel_frame.get_motion_data();
			std::cout << "Accel:" << accel_sample.x << ", " << accel_sample.y << ", " << accel_sample.z << std::endl;
		}

		if (rs2::motion_frame gyro_frame = rs2_Frameset.first_or_default(RS2_STREAM_GYRO))
		{
			rs2_vector gyro_sample = gyro_frame.get_motion_data();
			std::cout << "Gyro:" << gyro_sample.x << ", " << gyro_sample.y << ", " << gyro_sample.z << std::endl;
		}
		
		if (auto fs = rs2_Frameset.as<rs2::frameset>()) {

			rs2::video_frame rs2_IrFrameLeft = rs2_Frameset.get_infrared_frame(1);
			rs2::video_frame rs2_IrFrameRight = rs2_Frameset.get_infrared_frame(2);
			double time_l = rs2_IrFrameLeft.get_timestamp();
			double time_r = rs2_IrFrameRight.get_timestamp();

			if(long(time_l * 1e9) == long(time_r * 1e9)){
				// 读取ir
				cv::Mat left_ir = cv::Mat(cv::Size(640, 480), CV_8UC1, (void*)rs2_IrFrameLeft.get_data());
				cv::Mat right_ir = cv::Mat(cv::Size(640, 480), CV_8UC1, (void*)rs2_IrFrameRight.get_data());
				
				cv::imshow("ll", left_ir);
				cv::imshow("rr", right_ir);
				cv::waitKey(1);
				std::cout << "get frame" << std::endl;
			}
			

		}
        


    }

    return 0;
}

