#include<iostream>
#include<string>
#include <librealsense2/rs.hpp>
#include <opencv2/opencv.hpp>
#include<opencv2/core/core.hpp>
#include<opencv2/highgui/highgui.hpp>

using namespace std;
using namespace cv;

int main(int argc,char** argv)
{
    rs2::config cfg;

    // 使能 左右目图像数据
    cfg.enable_stream(RS2_STREAM_FISHEYE,1, RS2_FORMAT_Y8);
    cfg.enable_stream(RS2_STREAM_FISHEYE,2, RS2_FORMAT_Y8);

    // 使能 传感器的POSE和6DOF IMU数据
    cfg.enable_stream(RS2_STREAM_POSE, RS2_FORMAT_6DOF);

    rs2::pipeline pipe;
    pipe.start(cfg);

    rs2::frameset data;
    rs2::stream_profile profiles;
    rs2::pipeline_profile profile=pipe.get_active_profile(); 

    rs2_intrinsics distparaml=profile.get_stream(RS2_STREAM_FISHEYE,1).as<rs2::video_stream_profile>().get_intrinsics();
    double kl[3][3]={distparaml.fx,0,distparaml.ppx,0,distparaml.fy,distparaml.ppy,0,0,1};
    Mat Kl=Mat(3,3,CV_64F,kl);
    double dl[5]={distparaml.coeffs[0],distparaml.coeffs[1],distparaml.coeffs[2],distparaml.coeffs[3],distparaml.coeffs[4]};
    Mat Dl=Mat(1,5,CV_64F,dl);
    cout<<"Kl"<<Kl<<endl;
    cout<<"Dl"<<Dl<<endl;
    rs2_intrinsics distparamr=profile.get_stream(RS2_STREAM_FISHEYE,2).as<rs2::video_stream_profile>().get_intrinsics();
    double kr[3][3]={distparamr.fx,0,distparamr.ppx,0,distparamr.fy,distparamr.ppy,0,0,1};
    Mat Kr=Mat(3,3,CV_64F,kr);
    double dr[5]={distparaml.coeffs[0],distparaml.coeffs[1],distparaml.coeffs[2],distparaml.coeffs[3],distparaml.coeffs[4]};
    Mat Dr=Mat(1,5,CV_64F,dl);
    cout<<"KR"<<Kr<<endl;
    cout<<"DR"<<Dr<<endl;
    
    Mat srcl=Mat::eye(distparaml.height,distparaml.width,CV_64F);
    Mat dstl=Mat::eye(distparaml.height,distparaml.width,CV_64F);
    Mat srcr=Mat::eye(distparaml.height,distparaml.width,CV_64F);
    Mat dstr=Mat::eye(distparaml.height,distparaml.width,CV_64F);  
    
    Mat mapl1,mapl2,mapr1,mapr2;
    Mat NewKl=getOptimalNewCameraMatrix(Kl,Dl,srcl.size(),1,dstl.size(),0);
    //cout<<Kl<<endl;
    //cout<<NewKl<<endl;
    //initUndistortRectifyMap(Kl,Dl,Mat(),NewKl,srcl.size(),CV_16SC2,mapl1,mapl2);
    //Mat NewKr=getOptimalNewCameraMatrix(Kr,Dr,srcr.size(),1,dstr.size(),0);
    //initUndistortRectifyMap(Kr,Dr,Mat(),NewKr,srcr.size(),CV_16SC2,mapr1,mapr2);
    
    while (1)
   {
    data = pipe.wait_for_frames();
    
	// Get a frame from the pose stream
	auto f = data.first_or_default(RS2_STREAM_POSE);
	auto pose = f.as<rs2::pose_frame>().get_pose_data();

    /***
	cout<<"px: "<<pose.translation.x<<"   py: "<<pose.translation.y<<"   pz: "<<pose.translation.z<<
	"vx: "<<pose.velocity.x<<"   vy: "<<pose.velocity.y<<"   vz: "<<pose.velocity.z<<endl;
    
	cout<<"ax: "<<pose.acceleration.x<<"   ay: "<<pose.acceleration.y<<"   az: "<<pose.acceleration.z<<
	"gx: "<<pose.angular_velocity.x<<"   gy: "<<pose.angular_velocity.y<<"   gz: "<<pose.angular_velocity.z<<endl;
***/
    
     rs2::frame image_left = data.get_fisheye_frame(1);
     rs2::frame image_right = data.get_fisheye_frame(2);
  

      if (!image_left || !image_right)
          break;

      cv::Mat distor_left(cv::Size(848, 800), CV_8U, (void*)image_left.get_data(), cv::Mat::AUTO_STEP);
      cv::Mat distor_right(cv::Size(848, 800), CV_8U, (void*)image_right.get_data(), cv::Mat::AUTO_STEP);
      
      Mat undistorl,undistorr;
      
      //remap(distor_left,undistorl,mapl1,mapl2,cv::INTER_LINEAR);
      //remap(distor_right,undistorr,mapr1,mapr2,cv::INTER_LINEAR);
      undistort(distor_left,undistorl,Kl,Dl,noArray());

      cv::imshow("../left.png", undistorl);
      cv::imshow("../right.png", distor_right);
      cv::waitKey(500);
    }

    return 0;
}

