#ifndef SR300_HPP
#define SR300_HPP

// include the librealsense C++ header file
#include <librealsense2/rs.hpp>

#include <librealsense2/rsutil.h>

// include OpenCV header file
#include <opencv2/opencv.hpp>

class SR300
{
protected:
    int k;
    rs2_intrinsics frame_intrinsic;
public:
    SR300()
        : k(0)
    {

    }

    static cv::Mat GrayScale(const cv::Mat &m)
    {
        cv::Mat out;
        cv::normalize(m, out, 0, 255, cv::NORM_MINMAX, CV_8U);
        return out;
    }

    static cv::Mat ColorMap(const cv::Mat &m)
    {
        if(m.depth()!=CV_8U)
            return ColorMap(GrayScale(m));
        cv::Mat out;
        // Apply Histogram Equalization
        cv::equalizeHist( m, out );
        cv::applyColorMap(out, out, cv::COLORMAP_JET);
        return out;
    }

    static float get_depth_scale(rs2::device dev)
    {
        // Go over the device's sensors
        for (rs2::sensor& sensor : dev.query_sensors())
        {
            // Check if the sensor if a depth sensor
            if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>())
            {
                return dpt.get_depth_scale();
            }
        }
        throw std::runtime_error("Device does not have a depth sensor");
    }

    virtual void ProcessFrame(const cv::Mat &color, const cv::Mat &ir,
                              const cv::Mat &depth, const cv::Mat &xyz, const cv::Mat &texture,
                              const cv::Mat &aligned_depth, const cv::Mat &aligned_xyz)
    {

        cv::line(color, cv::Point(320-50, 240), cv::Point(320+50,240), cv::Scalar(0,0,255));

        cv::line(color, cv::Point(320, 240-50), cv::Point(320,240+50), cv::Scalar(0,0,255));

        // Display in a GUI
        cv::namedWindow("color Image", cv::WINDOW_AUTOSIZE );
        cv::imshow("color Image", color);

        // Display the image in GUI
        cv::namedWindow("ir Image ColorMap",cv::WINDOW_AUTOSIZE );
        cv::imshow("ir Image ColorMap", ColorMap(ir));

        // Display the image in GUI
        cv::namedWindow("depth Image GrayScale", cv::WINDOW_AUTOSIZE );
        cv::imshow("depth Image GrayScale", GrayScale(depth));

        // Display the image in GUI
        cv::namedWindow("depth Image ColorMap", cv::WINDOW_AUTOSIZE );
        cv::imshow("depth Image ColorMap", ColorMap(depth));

        cv::namedWindow("aligned depth Image ColorMap", cv::WINDOW_AUTOSIZE );
        cv::imshow("aligned depth Image ColorMap", ColorMap(aligned_depth));

        std::vector<cv::Mat> xy;
        cv::split(texture, xy);

        // Display the image in GUI
        cv::namedWindow("texture x Image ColorMap", cv::WINDOW_AUTOSIZE );
        cv::imshow("texture x Image ColorMap", ColorMap(xy[0]));

        cv::namedWindow("texture y Image ColorMap", cv::WINDOW_AUTOSIZE );
        cv::imshow("texture y Image ColorMap", ColorMap(xy[1]));


        cv::imwrite("xyz.hdr",xyz);

        std::vector<cv::Mat> axis;
        cv::split(xyz, axis);

        cv::Mat x8;
        axis[0].convertTo(x8,CV_8U,500);

        // Display the image in GUI
        cv::namedWindow("x Image scale", cv::WINDOW_AUTOSIZE );
        cv::imshow("x Image scale", x8);


        cv::Mat y8;
        axis[1].convertTo(y8,CV_8U,500);

        // Display the image in GUI
        cv::namedWindow("y Image scale", cv::WINDOW_AUTOSIZE );
        cv::imshow("y Image scale", y8);


        cv::Mat z8;
        axis[2].convertTo(z8,CV_8U,500);

        // Display the image in GUI
        cv::namedWindow("z Image scale", cv::WINDOW_AUTOSIZE );
        cv::imshow("z Image scale", z8);

        // Display the image in GUI
        cv::namedWindow("z Image ColorMap", cv::WINDOW_AUTOSIZE );
        cv::imshow("z Image ColorMap", ColorMap(axis[2]));


        k=cv::waitKey(1);
    }

    void Start(cv::Size frameSize=cv::Size(640,480))
    {
        //Contruct a pipeline which abstracts the device
        rs2::pipeline pipe;

        //Create a configuration for configuring the pipeline with a non default profile
        rs2::config cfg;

        //Add desired streams to configuration
        cfg.enable_stream(RS2_STREAM_COLOR, frameSize.width, frameSize.height, RS2_FORMAT_BGR8, 30);
        cfg.enable_stream(RS2_STREAM_INFRARED, frameSize.width, frameSize.height, RS2_FORMAT_Y8, 30);
        cfg.enable_stream(RS2_STREAM_DEPTH, frameSize.width, frameSize.height, RS2_FORMAT_Z16, 30);

        //Instruct pipeline to start streaming with the requested configuration
        rs2::pipeline_profile profile=pipe.start(cfg);

        auto depth_stream = profile.get_stream(RS2_STREAM_DEPTH).as<rs2::video_stream_profile>();

        rs2_intrinsics depth_intrinsic = depth_stream.get_intrinsics();

        auto color_stream = profile.get_stream(RS2_STREAM_COLOR).as<rs2::video_stream_profile>();

        rs2_intrinsics color_intrinsic = color_stream.get_intrinsics();

        frame_intrinsic=color_stream.get_intrinsics();

        rs2::align align(RS2_STREAM_COLOR);

        // Camera warmup - dropping several first frames to let auto-exposure stabilize
        rs2::frameset frames;


        while(k!=27)
        {
//            for(int i = 0; i < 1; i++)
            {
                //Wait for all configured streams to produce a frame
                frames = pipe.wait_for_frames();
            }

            //Get each frame
            rs2::frame color_frame = frames.get_color_frame();
            rs2::frame ir_frame = frames.first(RS2_STREAM_INFRARED);
            rs2::frame depth_frame = frames.get_depth_frame();

            // Creating OpenCV Matrix from a color image
            cv::Mat color(frameSize, CV_8UC3, (void*)color_frame.get_data(), cv::Mat::AUTO_STEP);

            // Creating OpenCV matrix from IR image
            cv::Mat ir(frameSize, CV_8UC1, (void*)ir_frame.get_data(), cv::Mat::AUTO_STEP);

            // Creating OpenCV matrix from IR image
            cv::Mat depth(frameSize, CV_16UC1, (void*)depth_frame.get_data(), cv::Mat::AUTO_STEP);

            rs2::pointcloud pc;
            rs2::points points = pc.calculate(depth_frame);

            pc.map_to(color_frame);

            /* this segment actually prints the pointcloud */
            auto vertices = points.get_vertices();              // get vertices
            auto tex_coords = points.get_texture_coordinates(); // and texture coordinates

            cv::Mat texture(frameSize, CV_32FC2, (void*)tex_coords, cv::Mat::AUTO_STEP);

            cv::Mat xyz(frameSize,CV_32FC3,(void*)vertices, cv::Mat::AUTO_STEP);

            auto processed = align.process(frames);
            rs2::depth_frame aligned_depth_frame = processed.get_depth_frame();

            cv::Mat aligned_depth(frameSize, CV_16UC1, (void*)aligned_depth_frame.get_data(), cv::Mat::AUTO_STEP);

            cv::Mat aligned_xyz(frameSize, CV_32FC3, cv::Scalar::all(0));

            for(int y=0;y<frameSize.height;y++)
            {
                for(int x=0;x<frameSize.width;x++)
                {
                    float uv[2]={x,y};

                    float dist = aligned_depth_frame.get_distance(x, y);

                    // Deproject from pixel to point in 3D
                    rs2_deproject_pixel_to_point(aligned_xyz.at<cv::Vec3f>(y,x).val, &color_intrinsic, uv, dist);

                }
            }

            ProcessFrame(color, ir, depth, xyz, texture, aligned_depth, aligned_xyz);

//            std::cout<<"upoint="<<upoint[0]<<"\t"<<upoint[1]<<"\t"<<upoint[2]<<"\n"<<std::flush;

        }
        pipe.stop();
    }


    void GetOne(cv::Size frameSize=cv::Size(640,480), unsigned int count=8)
    {
        //Contruct a pipeline which abstracts the device
        rs2::pipeline pipe;

        //Create a configuration for configuring the pipeline with a non default profile
        rs2::config cfg;

        //Add desired streams to configuration
        cfg.enable_stream(RS2_STREAM_COLOR, frameSize.width, frameSize.height, RS2_FORMAT_BGR8, 30);
        cfg.enable_stream(RS2_STREAM_INFRARED, frameSize.width, frameSize.height, RS2_FORMAT_Y8, 30);
        cfg.enable_stream(RS2_STREAM_DEPTH, frameSize.width, frameSize.height, RS2_FORMAT_Z16, 30);

        //Instruct pipeline to start streaming with the requested configuration
        rs2::pipeline_profile profile=pipe.start(cfg);


        auto depth_stream = profile.get_stream(RS2_STREAM_DEPTH).as<rs2::video_stream_profile>();

        rs2_intrinsics depth_intrinsic = depth_stream.get_intrinsics();

        auto color_stream = profile.get_stream(RS2_STREAM_COLOR).as<rs2::video_stream_profile>();

        rs2_intrinsics color_intrinsic = color_stream.get_intrinsics();

        frame_intrinsic=color_stream.get_intrinsics();

        rs2::align align(RS2_STREAM_COLOR);

        // Camera warmup - dropping several first frames to let auto-exposure stabilize
        rs2::frameset frames;

        //Wait for all configured streams to produce a frame
        for(unsigned int i=0;i<count;i++)
            frames = pipe.wait_for_frames();

        //Get each frame
        rs2::frame color_frame = frames.get_color_frame();
        rs2::frame ir_frame = frames.first(RS2_STREAM_INFRARED);
        rs2::frame depth_frame = frames.get_depth_frame();

        // Creating OpenCV Matrix from a color image
        cv::Mat color(frameSize, CV_8UC3, (void*)color_frame.get_data(), cv::Mat::AUTO_STEP);

        // Creating OpenCV matrix from IR image
        cv::Mat ir(frameSize, CV_8UC1, (void*)ir_frame.get_data(), cv::Mat::AUTO_STEP);

        // Creating OpenCV matrix from IR image
        cv::Mat depth(frameSize, CV_16UC1, (void*)depth_frame.get_data(), cv::Mat::AUTO_STEP);

        rs2::pointcloud pc;
        rs2::points points = pc.calculate(depth_frame);

        /* this segment actually prints the pointcloud */
        auto vertices = points.get_vertices();              // get vertices
        auto tex_coords = points.get_texture_coordinates(); // and texture coordinates

        cv::Mat texture(frameSize, CV_32FC2, (void*)tex_coords, cv::Mat::AUTO_STEP);

        cv::Mat xyz(frameSize,CV_32FC3,(void*)vertices, cv::Mat::AUTO_STEP);

        auto processed = align.process(frames);
        rs2::depth_frame aligned_depth_frame = processed.get_depth_frame();

        cv::Mat aligned_depth(frameSize, CV_16UC1, (void*)aligned_depth_frame.get_data(), cv::Mat::AUTO_STEP);

        cv::Mat aligned_xyz(frameSize, CV_32FC3, cv::Scalar::all(0));

        for(int y=0;y<frameSize.height;y++)
        {
            for(int x=0;x<frameSize.width;x++)
            {
                float uv[2]={x,y};

                float dist = aligned_depth_frame.get_distance(x, y);

                // Deproject from pixel to point in 3D
                rs2_deproject_pixel_to_point(aligned_xyz.at<cv::Vec3f>(y,x).val, &color_intrinsic, uv, dist);

            }
        }


        ProcessFrame(color, ir, depth, xyz, texture, aligned_depth, aligned_xyz);

        pipe.stop();
    }

    cv::Vec3f Deproject(cv::Vec2f pixel)
    {
        cv::Vec3f ray;
        // Deproject from pixel to point in 3D
        rs2_deproject_pixel_to_point(ray.val, &frame_intrinsic, pixel.val, 1);
        return ray;
    }

};



#endif
