// License: Apache 2.0. See LICENSE file in root directory.
// Copyright(c) 2015 Intel Corporation. All Rights Reserved.

///  \file rs.hpp
///  \brief
///  Exposes librealsense functionality for C++ compilers

#ifndef LIBREALSENSE_RS_HPP
#define LIBREALSENSE_RS_HPP

#include "rsutil.h"
#include "rscore.hpp"
#include <cmath>
#include <cstdint>
#include <cstring>
#include <sstream>
#include <stdexcept>
#include <functional>
#include <vector>

namespace rs
{
    /// \brief Streams are different types of data provided by RealSense devices
    enum class stream : int32_t
    {
        depth                           ,  /**< Native stream of depth data produced by RealSense device */
        color                           ,  /**< Native stream of color data captured by RealSense device */
        infrared                        ,  /**< Native stream of infrared data captured by RealSense device */
        infrared2                       ,  /**< Native stream of infrared data captured from a second viewpoint by RealSense device */
        fisheye                         ,  /**< Native stream of fish-eye (wide) data captured from the dedicate motion camera */
        points                          ,  /**< Synthetic stream containing point cloud data generated by deprojecting the depth image */
        rectified_color                 ,  /**< Synthetic stream containing undistorted color data with no extrinsic rotation from the depth stream */
        color_aligned_to_depth          ,  /**< Synthetic stream containing color data but sharing intrinsic of depth stream */
        infrared2_aligned_to_depth      ,  /**< Synthetic stream containing second viewpoint infrared data but sharing intrinsic of depth stream */
        depth_aligned_to_color          ,  /**< Synthetic stream containing depth data but sharing intrinsic of color stream */
        depth_aligned_to_rectified_color,  /**< Synthetic stream containing depth data but sharing intrinsic of rectified color stream */
        depth_aligned_to_infrared2         /**< Synthetic stream containing depth data but sharing intrinsic of second viewpoint infrared stream */
    };

    ///  \brief Formats: defines how each stream can be encoded.
    ///    \c rs_format specifies how a frame is represented in memory (similar to the V4L pixel format).
    enum class format : int32_t
    {
        any         ,  /**< When passed to enable stream, librealsense will try to provide best suited format */
        z16         ,  /**< 16 bit linear depth values. The depth is meters is equal to depth scale * pixel value. */
        disparity16 ,  /**< 16 bit linear disparity values. The depth in meters is equal to depth scale / pixel value. */
        xyz32f      ,  /**< 32 bit floating point 3D coordinates. */
        yuyv        ,  /**< Standard YUV pixel format as described in https://en.wikipedia.org/wiki/YUV */
        rgb8        ,  /**< 8-bit red, green, and blue channels */
        bgr8        ,  /**< 8-bit blue, green, and red channels channels -- suitable for OpenCV */
        rgba8       ,  /**< 8-bit red, green, and blue channels + constant alpha channel equal to FF */
        bgra8       ,  /**< 8-bit blue, green, and red channels + constant alpha channel equal to FF */
        y8          ,  /**< 8-bit per-pixel grayscale image */
        y16         ,  /**< 16-bit per-pixel grayscale image */
        raw10       ,  /**< Four 10-bit luminance values encoded into a 5-byte macropixel */
        raw16       ,  /**< 16-bit raw image */
        raw8           /**< 8-bit raw image */
    };

    /// \brief Output buffer format: sets how librealsense works with frame memory.
    enum class output_buffer_format : int32_t
    {
        continous      , /**< Makes sure that the output frame is exposed as a single continuous buffer */
        native           /**< Does not convert buffer to continuous. The user has to handle pitch manually. */
    };

    /// \brief Presets: general preferences that are translated by librealsense into concrete resolution and FPS.
    enum class preset : int32_t
    {
        best_quality     ,/**< Prefer best overall quality */
        largest_image    ,/**< Prefer largest image size */
        highest_framerate /**< Prefer highest frame rate */
    };

    /// \brief Distortion model: defines how pixel coordinates should be mapped to sensor coordinates.
    enum class distortion : int32_t
    {
        none                  , /**< Rectilinear images. No distortion compensation required. */
        modified_brown_conrady, /**< Equivalent to Brown-Conrady distortion, except that tangential distortion is applied to radially distorted points */
        inverse_brown_conrady,  /**< Equivalent to Brown-Conrady distortion, except undistorts image instead of distorting it */
        distortion_ftheta       /**< Distortion model of the fish-eye camera */
    };

  /// \brief Defines general configuration controls. 
  ///
  /// These can generally be mapped to camera UVC controls, and unless stated otherwise, can be set/queried at any time.
    enum class option : int32_t
    {
        color_backlight_compensation                    ,  /**< Enable/disable color backlight compensation*/
        color_brightness                                ,  /**< Color image brightness*/
        color_contrast                                  ,  /**< Color image contrast*/
        color_exposure                                  ,  /**< Controls exposure time of color camera. Setting any value will disable auto exposure.*/
        color_gain                                      ,  /**< Color image gain*/
        color_gamma                                     ,  /**< Color image gamma setting*/
        color_hue                                       ,  /**< Color image hue*/
        color_saturation                                ,  /**< Color image saturation setting*/
        color_sharpness                                 ,  /**< Color image sharpness setting*/
        color_white_balance                             ,  /**< Controls white balance of color image. Setting any value will disable auto white balance.*/
        color_enable_auto_exposure                      ,  /**< Enable/disable color image auto-exposure*/
        color_enable_auto_white_balance                 ,  /**< Enable/disable color image auto-white-balance*/
        f200_laser_power                                , /**< Power of the F200/SR300 projector, with 0 meaning projector off*/
        f200_accuracy                                   , /**< Set the number of patterns projected per frame. The higher the accuracy value, the more patterns projected. Increasing the number of patterns helps to achieve better accuracy. Note that this control affects the depth FPS.  */
        f200_motion_range                               , /**< Motion vs. range trade-off, with lower values allowing for better motion sensitivity and higher values allowing for better depth range*/
        f200_filter_option                              , /**< Set the filter to apply to each depth frame. Each one of the filters is optimized per the application requirements.*/
        f200_confidence_threshold                       , /**< Confidence level threshold used by the depth algorithm pipe to set whether a pixel will get a valid range or will be marked with invalid range*/
        f200_dynamic_fps                                , /**< (F200-only) Allows to reduce FPS without restarting streaming. Valid values are {2, 5, 15, 30, 60}.*/
        sr300_auto_range_enable_motion_versus_range     , /**< Configures SR300 depth auto-range setting. Should not be used directly but through the \c rs_apply_ivcam_preset method in rsutil.h.*/
        sr300_auto_range_enable_laser                   , /**< Configures SR300 depth auto-range setting. Should not be used directly but through the \c rs_apply_ivcam_preset method in rsutil.h.*/
        sr300_auto_range_min_motion_versus_range        , /**< Configures SR300 depth auto-range setting. Should not be used directly but through the \c rs_apply_ivcam_preset method in rsutil.h.*/
        sr300_auto_range_max_motion_versus_range        , /**< Configures SR300 depth auto-range setting. Should not be used directly but through the \c rs_apply_ivcam_preset method in rsutil.h.*/
        sr300_auto_range_start_motion_versus_range      , /**< Configures SR300 depth auto-range setting. Should not be used directly but through the \c rs_apply_ivcam_preset method in rsutil.h.*/
        sr300_auto_range_min_laser                      , /**< Configures SR300 depth auto-range setting. Should not be used directly but through the \c rs_apply_ivcam_preset method in rsutil.h.*/
        sr300_auto_range_max_laser                      , /**< Configures SR300 depth auto-range setting. Should not be used directly but through the \c rs_apply_ivcam_preset method in rsutil.h.*/
        sr300_auto_range_start_laser                    , /**< Configures SR300 depth auto-range setting. Should not be used directly but through the \c rs_apply_ivcam_preset method in rsutil.h.*/
        sr300_auto_range_upper_threshold                , /**< Configures SR300 depth auto-range setting. Should not be used directly but through the \c rs_apply_ivcam_preset method in rsutil.h.*/
        sr300_auto_range_lower_threshold                , /**< Configures SR300 depth auto-range setting. Should not be used directly but through the \c rs_apply_ivcam_preset method in rsutil.h.*/
        r200_lr_auto_exposure_enabled                   , /**< Enable/disable R200 auto-exposure. This will affect both IR and depth images.*/
        r200_lr_gain                                    , /**< IR image gain*/
        r200_lr_exposure                                , /**< This control allows manual adjustment of the exposure time value for the L/R imagers.*/
        r200_emitter_enabled                            , /**< Enables/disables R200 emitter*/
        r200_depth_units                                , /**< Micrometers per increment in integer depth values. 1000 is default (mm scale). Set before streaming.*/
        r200_depth_clamp_min                            , /**< Minimum depth in current depth units that will be output. Any values less than "Min Depth" will be mapped to 0 during the conversion between disparity and depth. Set before streaming.*/
        r200_depth_clamp_max                            , /**< Minimum depth in current depth units that will be output. Any values less than "Max Depth" will be mapped to 0 during the conversion between disparity and depth. Set before streaming.*/
        r200_disparity_multiplier                       , /**< Disparity scale factor used when in disparity output mode. Can only be set before streaming.*/
        r200_disparity_shift                            , /**< {0 - 512}. Can only be set before streaming starts.*/
        r200_auto_exposure_mean_intensity_set_point     , /**< Mean intensity set point. Requires the \c r200_lr_auto_exposure_enabled option to be set to 1.*/
        r200_auto_exposure_bright_ratio_set_point       , /**< Bright ratio set point. Requires the \c r200_lr_auto_exposure_enabled option to be set to 1.*/
        r200_auto_exposure_kp_gain                      , /**< Kp gain. Requires the \c r200_lr_auto_exposure_enabled option to be set to 1.*/
        r200_auto_exposure_kp_exposure                  , /**< Kp exposure. Requires the \c r200_lr_auto_exposure_enabled option to be set to 1.*/
        r200_auto_exposure_kp_dark_threshold            , /**< Kp dark threshold. Requires the \c r200_lr_auto_exposure_enabled option to be set to 1.*/
        r200_auto_exposure_top_edge                     , /**< Auto-exposure region-of-interest top edge (in pixels). Requires the \c r200_lr_auto_exposure_enabled option to be set to 1.*/
        r200_auto_exposure_bottom_edge                  , /**< Auto-exposure region-of-interest bottom edge (in pixels). Requires the \c r200_lr_auto_exposure_enabled option to be set to 1.*/
        r200_auto_exposure_left_edge                    , /**< Auto-exposure region-of-interest left edge (in pixels). Requires the \c r200_lr_auto_exposure_enabled option to be set to 1.*/
        r200_auto_exposure_right_edge                   , /**< Auto-exposure region-of-interest right edge (in pixels). Requires the \c r200_lr_auto_exposure_enabled option to be set to 1.*/
        r200_depth_control_estimate_median_decrement    , /**< Value to subtract when estimating the median of the correlation surface*/
        r200_depth_control_estimate_median_increment    , /**< Value to add when estimating the median of the correlation surface*/
        r200_depth_control_median_threshold             , /**< Threshold: by how much the winning score exceeds the median. */
        r200_depth_control_score_minimum_threshold      , /**< Minimum correlation score that is considered acceptable*/
        r200_depth_control_score_maximum_threshold      , /**< Maximum correlation score that is considered acceptable*/
        r200_depth_control_texture_count_threshold      , /**< Parameter for determining whether the texture in the region is sufficient to justify a depth result*/
        r200_depth_control_texture_difference_threshold , /**< Parameter for determining whether the texture in the region is sufficient to justify a depth result*/
        r200_depth_control_second_peak_threshold        , /**< Threshold: how much the minimum correlation score must differ from the next best score.*/
        r200_depth_control_neighbor_threshold           , /**< Neighbor threshold value for depth calculation*/
        r200_depth_control_lr_threshold                 , /**< Left-right threshold value for depth calculation*/
        fisheye_exposure                                , /**< Fisheye image exposure time in msec*/
        fisheye_gain                                    , /**< Fisheye image gain*/
        fisheye_strobe                                  , /**< Enable/disable fisheye strobe. When enabled, aligns timestamps to common clock-domain with the motion events.*/
        fisheye_external_trigger                        , /**< Enable/disable fisheye external trigger mode. When enabled, fisheye image will be aquired in-sync with the depth image.*/
        fisheye_color_auto_exposure                     , /**< Enable / disable fisheye auto-exposure */
        fisheye_color_auto_exposure_mode                , /**< 0 - static auto-exposure, 1 - anti-flicker auto-exposure, 2 - hybrid */
        fisheye_color_auto_exposure_rate                , /**< Fisheye auto-exposure anti-flicker rate. Can be 50 or 60 Hz. */
        fisheye_color_auto_exposure_sample_rate         , /**< In fisheye auto-exposure sample frame, every given number of pixels */
        fisheye_color_auto_exposure_skip_frames         , /**< In fisheye auto-exposure sample, every given number of frames. */
        frames_queue_size                               , /**< Number of frames the user is allowed to keep per stream. Trying to hold on to more frames will cause frame-drops.*/
        hardware_logger_enabled                         , /**< Enable/disable fetching log data from the device */
        total_frame_drops                               , /**< Total number of detected frame drops from all streams*/
    };

    /// \brief Types of value provided from the device with each frame
    enum class frame_metadata
    {
        actual_exposure, /**< Actual exposure at which the frame was captured */
        actual_fps       /**< Actual FPS at the time of capture */
    };

    /// \brief Specifies various capabilities of a RealSense device.
    ///
    /// To check if a certain capability is supported by a particular device, at runtime call <tt>dev->supports(capability)</tt>.
    enum class capabilities : int32_t
    {
        depth,                      /**< Provides depth stream */
        color,                      /**< Provides color stream */
        infrared,                   /**< Provides infrared stream */
        infrared2,                  /**< Provides second infrared stream */
        fish_eye,                   /**< Provides wide field of view (fish-eye) stream */
        motion_events,              /**< Provides gyroscope and accelorometer events */
        motion_module_fw_update,    /**< Provides method for upgrading motion module firmware */
        adapter_board,              /**< Internally includes MIPI-to-USB adapter */
        enumeration,                /**< Provides enough basic functionality to be considered supported. This is to catch various outdated engineering samples at runtime. */
    };

    /// \brief Proprietary formats for direct communication with device firmware
    enum class blob_type 
    {
        motion_module_firmware_update /**< By using this option, new firmware can be uploaded to the ZR300 motion-module  */
    };

    /// \brief Read-only strings that can be queried from the device.
    ///
    /// Not all information fields are available on all camera types.
    /// This information is mainly available for camera debug and troubleshooting and should not be used in applications.
    enum class camera_info {
        device_name                   , /**< Device friendly name */
        serial_number                 , /**< Device serial number */
        camera_firmware_version       , /**< Primary firmware version */
        adapter_board_firmware_version, /**< MIPI-to-USB adapter board firmware version if such board is present */
        motion_module_firmware_version, /**< Motion module firmware version if motion module is present */
        camera_type                   , /**< R200/LR200/ZR300 camera type */
        oem_id                        , /**< OEM ID */
        isp_fw_version                , /**< ISP firmware version when available */
        content_version               , /**< R200/LR200/ZR300 content version */
        module_version                , /**< R200/LR200/ZR300 module version */
        imager_model_number           , /**< Primary imager model number */
        build_date                    , /**< Device build date */
        calibration_date              , /**< Primary calibration date */
        program_date                  , /**< R200/LR200/ZR300 program date */
        focus_alignment_date          , /**< Focus calibration date */
        emitter_type                  , /**< R200/LR200/ZR300 emitter date */
        focus_value                   , /**< Result of the focus calibration */
        lens_type                     , /**< Primary lens type */
        third_lens_type               , /**< Color imager lens type */
        lens_coating_type             , /**< Lens coating type */
        third_lens_coating_type       , /**< Color coating type */
        lens_nominal_baseline         , /**< Nominal baseline */
        third_lens_nominal_baseline     /**< Color nominal baseline */
    };

    /// \brief Allows the user to choose between available hardware subdevices
    enum class source : uint8_t
    {
        video      , /**< Video streaming of depth, infrared, color, or fish-eye */
        motion_data, /**< Motion tracking from gyroscope and accelerometer */
        all_sources, /**< Enable everything together */
    };

    /// \brief Source device that triggered specific timestamp event from the motion module
    enum class event : uint8_t
    {
        event_imu_accel     , /**< Event from accelerometer */
        event_imu_gyro      , /**< Event from the gyroscope */
        event_imu_depth_cam , /**< Event from depth camera (depth/IR frame) */
        event_imu_motion_cam, /**< Event from the fish-eye camera */
        event_imu_g0_sync   , /**< Event from external GPIO 0 */
        event_imu_g1_sync   , /**< Event from external GPIO 1 */
        event_imu_g2_sync     /**< Event from external GPIO 2 */
    };

    /// \brief Specifies the clock in relation to which the frame timestamp was measured.

    /// When working with a motion microcontroller, motion data timestamps are always in the microcontroller timestamp domain. 
    /// Some frames, however, might not succesfully receive microcontroller timestamp and will be marked as camera domain.
    enum class timestamp_domain
    {
        camera,         /**< Frame timestamp was measured in relation to the camera clock */
        microcontroller /**< Frame timestamp was measured in relation to the microcontroller clock */
    };

    struct float2 { float x,y; };
    struct float3 { float x,y,z; };

    /// \brief Video stream intrinsics
    struct intrinsics : rs_intrinsics
    {
        float       hfov() const                                                        { return (atan2f(ppx + 0.5f, fx) + atan2f(width - (ppx + 0.5f), fx)) * 57.2957795f; }
        float       vfov() const                                                        { return (atan2f(ppy + 0.5f, fy) + atan2f(height - (ppy + 0.5f), fy)) * 57.2957795f; }
        distortion  model() const                                                       { return (distortion)rs_intrinsics::model; }

                    // Helpers for mapping between pixel coordinates and texture coordinates
        float2      pixel_to_texcoord(const float2 & pixel) const                       { return {(pixel.x+0.5f)/width, (pixel.y+0.5f)/height}; }
        float2      texcoord_to_pixel(const float2 & coord) const                       { return {coord.x*width - 0.5f, coord.y*height - 0.5f}; }

                    // Helpers for mapping from image coordinates into 3D space
        float3      deproject(const float2 & pixel, float depth) const                  { float3 point = {}; rs_deproject_pixel_to_point(&point.x, this, &pixel.x, depth); return point; }
        float3      deproject_from_texcoord(const float2 & coord, float depth) const    { return deproject(texcoord_to_pixel(coord), depth); }

                    // Helpers for mapping from 3D space into image coordinates
        float2      project(const float3 & point) const                                 { float2 pixel = {}; rs_project_point_to_pixel(&pixel.x, this, &point.x); return pixel; }
        float2      project_to_texcoord(const float3 & point) const                     { return pixel_to_texcoord(project(point)); }

        bool        operator == (const intrinsics & r) const                            { return memcmp(this, &r, sizeof(r)) == 0; }

    };

    /// \brief Motion device intrinsics: scale, bias, and variances.
    struct motion_intrinsics : rs_motion_intrinsics
    {
        motion_intrinsics(){};
    };

    /// \brief Cross-stream extrinsics: encode the topology describing how the different devices are connected.
    struct extrinsics : rs_extrinsics
    {
        bool        is_identity() const                                                 { return (rotation[0] == 1) && (rotation[4] == 1) && (translation[0] == 0) && (translation[1] == 0) && (translation[2] == 0); }
        float3      transform(const float3 & point) const                               { float3 p = {}; rs_transform_point_to_point(&p.x, this, &point.x); return p; }
    };

    /// \brief Timestamp data from the motion microcontroller
    struct timestamp_data : rs_timestamp_data
    {
        timestamp_data(rs_timestamp_data orig) : rs_timestamp_data(orig) {}
        timestamp_data() {}
    };

    /// \brief Motion data from gyroscope and accelerometer from the microcontroller
    struct motion_data : rs_motion_data
    {
        motion_data(rs_motion_data orig) : rs_motion_data(orig) {}
        motion_data() {}
    };

    class context;
    class device;
    
    class error : public std::runtime_error
    {
        std::string function, args;
    public:
        error(rs_error * err) : std::runtime_error(rs_get_error_message(err))
        { 
            function = (nullptr != rs_get_failed_function(err)) ? rs_get_failed_function(err) : std::string();
            args = (nullptr != rs_get_failed_args(err)) ? rs_get_failed_args(err) : std::string();
            rs_free_error(err);
        }
        const std::string & get_failed_function() const { return function; }
        const std::string & get_failed_args() const { return args; }
        static void handle(rs_error * e) { if(e) throw error(e); }
    };
    /// \brief Context
    class context
    {
        rs_context * handle;
        context(const context &) = delete;
        context & operator = (const context &) = delete;
    public:

        /// \brief Creates RealSense context that is required for the rest of the API
        context()
        {
            rs_error * e = nullptr;
            handle = rs_create_context(RS_API_VERSION, &e);
            error::handle(e);
        }

        explicit context(rs_context * handle) : handle(handle) {}

        ~context()
        {
            rs_delete_context(handle, nullptr);
        }

        /// Determines number of connected devices
        /// \return  Device count
        int get_device_count() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_device_count(handle, &e);
            error::handle(e);
            return r;
        }

        /// Retrieves connected device by index
        /// \param[in] index  Zero-based index of device to retrieve
        /// \return           Requested device
        device * get_device(int index)
        {
            rs_error * e = nullptr;
            auto r = rs_get_device(handle, index, &e);
            error::handle(e);
            return (device *)r;
        }
    };  

    class motion_callback : public rs_motion_callback
    {
        std::function<void(motion_data)> on_event_function;
    public:
        explicit motion_callback(std::function<void(motion_data)> on_event) : on_event_function(on_event) {}

        void on_event(rs_motion_data e) override
        {
            on_event_function(motion_data(e));
        }

        void release() override { delete this; }
    };

    class timestamp_callback : public rs_timestamp_callback
    {
        std::function<void(timestamp_data)> on_event_function;
    public:
        explicit timestamp_callback(std::function<void(timestamp_data)> on_event) : on_event_function(on_event) {}

        void on_event(rs_timestamp_data data) override
        {
            on_event_function(std::move(data));
        }

        void release() override { delete this; }
    };

    /// \brief Frame
    class frame
    {
        rs_device * device;
        rs_frame_ref * frame_ref;

        frame(const frame &) = delete;

    public:
        frame() : device(nullptr), frame_ref(nullptr) {}
        frame(rs_device * device, rs_frame_ref * frame_ref) : device(device), frame_ref(frame_ref) {}
        frame(frame&& other) : device(other.device), frame_ref(other.frame_ref) { other.frame_ref = nullptr; }
        frame& operator=(frame other)
        {
            swap(other);
            return *this;
        }
        void swap(frame& other)
        {
            std::swap(device, other.device);
            std::swap(frame_ref, other.frame_ref);
        }

        ~frame()
        {
            if (device && frame_ref)
            {
                rs_error * e = nullptr;
                rs_release_frame(device, frame_ref, &e);
                error::handle(e);
            }
        }

        /// Retrieves time at which frame was captured
        /// \return            Timestamp of the frame, in milliseconds since the device was started
        double get_timestamp() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_detached_frame_timestamp(frame_ref, &e);
            error::handle(e);
            return r;
        }

        /// Retrieves the timestamp domain 
        /// \return            Timestamp domain (clock name) for timestamp values
        timestamp_domain get_frame_timestamp_domain() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_detached_frame_timestamp_domain(frame_ref, &e);
            error::handle(e);
            return static_cast<timestamp_domain>(r);
        }

        /// Retrieves the current value of a single frame_metadata
        /// \param[in] frame_metadata  Frame metadata whose value should be retrieved
        /// \return                    Value of frame_metadata
        double get_frame_metadata(rs_frame_metadata frame_metadata) const
        {
            rs_error * e = nullptr;
            auto r = rs_get_detached_frame_metadata(frame_ref, (rs_frame_metadata)frame_metadata, &e);
            error::handle(e);
            return r;
        }

        /// Determines if device allows specific metadata to be queried
        /// \param[in] frame_metadata  Frame_metadata to check for support
        /// \return                    true if the frame_metadata can be queried
        bool supports_frame_metadata(rs_frame_metadata frame_metadata) const
        {
            rs_error * e = nullptr;
            auto r = rs_supports_frame_metadata(frame_ref, frame_metadata, &e);
            error::handle(e);
            return r != 0;
        }

        /// Retrieves frame number
        /// \return  Frame number
        unsigned long long get_frame_number() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_detached_frame_number(frame_ref, &e);
            error::handle(e);
            return r;
        }

        /// Retrieves frame content
        /// \return   Frame content
        const void * get_data() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_detached_frame_data(frame_ref, &e);
            error::handle(e);
            return r;
        }

        /// \brief Returns image width in pixels
        int get_width() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_detached_frame_width(frame_ref, &e);
            error::handle(e);
            return r;
        } 

        /// \brief Returns image height in pixels
        int get_height() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_detached_frame_height(frame_ref, &e);
            error::handle(e);
            return r;
        }

        /// \brief Returns configured frame rate
        int get_framerate() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_detached_framerate(frame_ref, &e);
            error::handle(e);
            return r;
        }

        /// \brief Retrieves frame stride, meaning the actual line width in memory in bytes (not the logical image width)
        int get_stride() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_detached_frame_stride(frame_ref, &e);
            error::handle(e);
            return r;
        }

        /// \brief Retrieves bits per pixel
        /// \return            Number of bits per one pixel
        int get_bpp() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_detached_frame_bpp(frame_ref, &e);
            error::handle(e);
            return r;
        }

        /// \brief Retrieves frame format
        /// \return    Frame format
        format get_format() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_detached_frame_format(frame_ref, &e);
            error::handle(e);
            return static_cast<format>(r);
        }

        /// \brief Retrieves frame stream type
        /// \return    Frame stream type
        stream get_stream_type() const
        {
            rs_error * e = nullptr;
            auto s = rs_get_detached_frame_stream_type(frame_ref, &e);
            error::handle(e);
            return static_cast<stream>(s);
        }
    };

    class frame_callback : public rs_frame_callback
    {
        std::function<void(frame)> on_frame_function;
    public:
        explicit frame_callback(std::function<void(frame)> on_frame) : on_frame_function(on_frame) {}

        void on_frame(rs_device * device, rs_frame_ref * fref) override
        {
            on_frame_function(std::move(frame(device, fref)));
        }

        void release() override { delete this; }
    };
    /// \brief Provides convenience methods relating to devices
    class device
    {
        device() = delete;
        device(const device &) = delete;
        device & operator = (const device &) = delete;
        ~device() = delete;


    public:
        /// \brief Retrieves human-readable device model string
        /// \return  Model string, such as "Intel RealSense F200" or "Intel RealSense R200"
        const char * get_name() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_device_name((const rs_device *)this, &e);
            error::handle(e);
            return r;
        }

        /// \brief Retrieves unique serial number of device
        /// \return  Serial number, in a format specific to the device model
        const char * get_serial() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_device_serial((const rs_device *)this, &e);
            error::handle(e);
            return r;
        }

        /// \brief Retrieves USB port number of device
        /// \return  USB port number, in a format that is specific to device model
        const char * get_usb_port_id() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_device_usb_port_id((const rs_device *)this, &e);
            error::handle(e);
            return r;
        }

        /// \brief Retrieves version of firmware currently installed on device
        /// \return  Firmware version string, in a format that is specific to device model
        const char * get_firmware_version() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_device_firmware_version((const rs_device *)this, &e);
            error::handle(e);
            return r;
        }

        /// \brief Retrieves camera-specific information such as versions of various components
        /// \return  Camera info string, in a format that is specific to device model
        const char * get_info(camera_info info) const
        {
            rs_error * e = nullptr;
            auto r = rs_get_device_info((const rs_device *)this, (rs_camera_info)info, &e);
            error::handle(e);
            return r;
        }

        /// \brief Retrieves extrinsic transformation between viewpoints of two different streams
        /// \param[in] from_stream  Stream whose coordinate space to transform from
        /// \param[in] to_stream    Stream whose coordinate space to transform to
        /// \return                 Transformation between two streams
        extrinsics get_extrinsics(stream from_stream, stream to_stream) const
        {
            rs_error * e = nullptr;
            extrinsics extrin;
            rs_get_device_extrinsics((const rs_device *)this, (rs_stream)from_stream, (rs_stream)to_stream, &extrin, &e);
            error::handle(e);
            return extrin;
        }

        /// \brief Retrieves extrinsic transformation between viewpoints of specific stream and motion module
        /// \param[in] from_stream  Stream whose coordinate space to transform from
        /// \return                 Transformation between specific stream and motion module
        extrinsics get_motion_extrinsics_from(stream from_stream) const
        {
            rs_error * e = nullptr;
            extrinsics extrin;
            rs_get_motion_extrinsics_from((const rs_device *)this, (rs_stream)from_stream, &extrin, &e);
            error::handle(e);
            return extrin;
        }

        /// \brief Retrieves mapping between units of depth image and meters
        /// \return  Depth in meters corresponding to a depth value of 1
        float get_depth_scale() const
        {
            rs_error * e = nullptr;
            auto r = rs_get_device_depth_scale((const rs_device *)this, &e);
            error::handle(e);
            return r;
        }

        /// \brief Determines if device allows specific option to be queried and set
        /// \param[in] option  Option to check
        /// \return            true if the option can be queried and set
        bool supports_option(option option) const
        {
            rs_error * e = nullptr;
            auto r = rs_device_supports_option((const rs_device *)this, (rs_option)option, &e);
            error::handle(e);
            return r != 0;
        }

        /// \brief Determines number of streaming modes available for given stream
        /// \param[in] stream  Stream whose modes will be enumerated
        /// \return            Count of available modes
        int get_stream_mode_count(stream stream) const
        {
            rs_error * e = nullptr;
            auto r = rs_get_stream_mode_count((const rs_device *)this, (rs_stream)stream, &e);
            error::handle(e);
            return r;
        }

        /// \brief Determines properties of specific streaming mode
        /// \param[in] stream      Stream whose mode will be queried
        /// \param[in] index       Zero-based index of streaming mode
        /// \param[out] width      Width of a frame image in pixels
        /// \param[out] height     Height of a frame image in pixels
        /// \param[out] format     Pixel format of a frame image
        /// \param[out] framerate  Number of frames that will be streamed per second
        void get_stream_mode(stream stream, int index, int & width, int & height, format & format, int & framerate) const
        {
            rs_error * e = nullptr;
            rs_get_stream_mode((const rs_device *)this, (rs_stream)stream, index, &width, &height, (rs_format *)&format, &framerate, &e);
            error::handle(e);
        }

        /// \brief Enables specific stream and requests specific properties
        /// \param[in] stream                   Stream
        /// \param[in] width                    Desired width of frame image in pixels, or 0 if any width is acceptable
        /// \param[in] height                   Desired height of frame image in pixels, or 0 if any height is acceptable
        /// \param[in] format                   Pixel format of frame image, or ANY if any format is acceptable
        /// \param[in] framerate                Number of frames that will be streamed per second, or 0 if any frame rate is acceptable
        /// \param[in] output_buffer_type       output buffer format (continous in memory / native with pitch)
        void enable_stream(stream stream, int width, int height, format format, int framerate, output_buffer_format output_buffer_type = output_buffer_format::continous)
        {
            rs_error * e = nullptr;
            rs_enable_stream_ex((rs_device *)this, (rs_stream)stream, width, height, (rs_format)format, framerate, (rs_output_buffer_format)output_buffer_type, &e);
            error::handle(e);
        }

        /// \brief Enables specific stream and requests properties using preset
        /// \param[in] stream  Stream to enable
        /// \param[in] preset  Preset to use to enable the stream
        void enable_stream(stream stream, preset preset)
        {
            rs_error * e = nullptr;
            rs_enable_stream_preset((rs_device *)this, (rs_stream)stream, (rs_preset)preset, &e);
            error::handle(e);
        }

        /// \brief Disables specific stream
        /// \param[in] stream  Stream
        void disable_stream(stream stream)
        {
            rs_error * e = nullptr;
            rs_disable_stream((rs_device *)this, (rs_stream)stream, &e);
            error::handle(e);
        }

        /// \brief Determines if specific stream is enabled
        /// \param[in] stream  Stream to check
        /// \return            true if the stream is currently enabled
        bool is_stream_enabled(stream stream) const
        {
            rs_error * e = nullptr;
            auto r = rs_is_stream_enabled((const rs_device *)this, (rs_stream)stream, &e);
            error::handle(e);
            return r != 0;
        }

        ///  \brief Retrieves width, in pixels, of a specific stream, equivalent to the width field from the stream's intrinsic
        /// \param[in] stream  Stream 
        /// \return            Width, in pixels of images from this stream
        int get_stream_width(stream stream) const
        {
            rs_error * e = nullptr;
            auto r = rs_get_stream_width((const rs_device *)this, (rs_stream)stream, &e);
            error::handle(e);
            return r;
        }

        /// \brief Retrieves height, in pixels, of a specific stream, equivalent to the height field from the stream's intrinsic
        /// \param[in] stream  Stream
        /// \return            Height, in pixels of images from this stream
        int get_stream_height(stream stream) const
        {
            rs_error * e = nullptr;
            auto r = rs_get_stream_height((const rs_device *)this, (rs_stream)stream, &e);
            error::handle(e);
            return r;
        }

        /// \brief Retrieves pixel format for specific stream
        /// \param[in] stream  Stream
        /// \return            Pixel format of the stream
        format get_stream_format(stream stream) const
        {
            rs_error * e = nullptr;
            auto r = rs_get_stream_format((const rs_device *)this, (rs_stream)stream, &e);
            error::handle(e);
            return (format)r;
        }

        /// \brief Retrieves frame rate for specific stream
        /// \param[in] stream  Stream
        /// \return            Frame rate of the stream, in frames per second
        int get_stream_framerate(stream stream) const
        {
            rs_error * e = nullptr;
            auto r = rs_get_stream_framerate((const rs_device *)this, (rs_stream)stream, &e);
            error::handle(e);
            return r;
        }

        /// \brief Retrieves intrinsic camera parameters for specific stream
        /// \param[in] stream  Stream
        /// \return            Intrinsic parameters of the stream
        intrinsics get_stream_intrinsics(stream stream) const
        {
            rs_error * e = nullptr;
            intrinsics intrin;
            rs_get_stream_intrinsics((const rs_device *)this, (rs_stream)stream, &intrin, &e);
            error::handle(e);
            return intrin;
        }

        /// \brief Retrieves intrinsic camera parameters for motion module
        /// \return            Intrinsic parameters
        motion_intrinsics get_motion_intrinsics() const
        {
            rs_error * e = nullptr;
            motion_intrinsics intrinsics;
            rs_get_motion_intrinsics((const rs_device *)this, &intrinsics, &e);
            error::handle(e);
            return intrinsics;
        }

        /// \brief Sets callback for frame arrival event. 
		/// 
		/// The provided callback will be called the instant new frame of given stream becomes available
        /// once callback is set on certain stream type, frames of this type will no longer be available throuhg wait/poll methods (those two approaches are mutually exclusive) 
        /// while wait/poll methods provide consistent set of syncronized frames at the expense of extra latency,
        /// set frame callbacks provides low latency solution with no syncronization
        /// \param[in] stream         Stream 
        /// \param[in] frame_handler  Frame callback to be invoked on every new frame
        /// \return                   Frame rate of the stream, in frames per second
        void set_frame_callback(rs::stream stream, std::function<void(frame)> frame_handler)
        {
            rs_error * e = nullptr;
            rs_set_frame_callback_cpp((rs_device *)this, (rs_stream)stream, new frame_callback(frame_handler), &e);
            error::handle(e);
        }

        ///  \brief Sets callback for motion module event. 
		/// 
		///  The provided callback will be called the instant new motion or timestamp event is available. 
        ///  <b>Note</b>: 
        /// 
        ///  The \c rs_enable_motion_tracking_cpp() method is responsible for activating the motion module on-board the device. 
        ///  One of the services it provides is to produce shared and high-resolution timestamps for all components that are connected to it. 
        ///  For Depth, IR, Color and Fisheye sensors, librealsense takes care of that and copies the timestamps on the relevant frames.
        ///
        ///  However, when you have an external device (such as a compass, magnetometer, light sensor, or other) and wish to synchronize it precisely with image and 
        ///  motion streams, you can connect that sensor to a GPIO that is available on some devices. Every time the sensor signals, you 
        ///  get a timestamp callback with a frame number, source ID, and a timestamp.
        ///  This timestamp callback allows advanced users to synchronize compass events (presumably coming though I2C or some other method) with RealSense data.        

        /// \param[in] motion_handler     Frame callback to be invoke on every new motion event
        /// \param[in] timestamp_handler  Frame callback to be invoke on every new timestamp event (can be left-out)
        /// \return                       Frame rate of the stream, in frames per second
        void enable_motion_tracking(std::function<void(motion_data)> motion_handler, std::function<void(timestamp_data)> timestamp_handler)
        {
            rs_error * e = nullptr;            
            rs_enable_motion_tracking_cpp((rs_device *)this, new motion_callback(motion_handler), new timestamp_callback(timestamp_handler), &e);
            error::handle(e);
        }

        /// \brief Sets the callback for motion module event. 
		///
		/// The provided callback will be called the instant new motion event is available. 
        /// \param[in] motion_handler     Frame callback to be invokes on every new motion event
        /// \return                       Frame rate of the stream, in frames per second
        void enable_motion_tracking(std::function<void(motion_data)> motion_handler)
        {
            rs_error * e = nullptr;            
            rs_enable_motion_tracking_cpp((rs_device *)this, new motion_callback(motion_handler), new timestamp_callback([](rs::timestamp_data data) {}), &e);
            error::handle(e);
        }

        /// \brief Disables events polling
        void disable_motion_tracking(void)
        {
            rs_error * e = nullptr;
            rs_disable_motion_tracking((rs_device *)this, &e);
            error::handle(e);
        }

        /// \brief Checks if data acquisition is active        
        int is_motion_tracking_active()
        {
            rs_error * e = nullptr;
            auto result = rs_is_motion_tracking_active((rs_device *)this,&e);
            error::handle(e);
            return result;
        }


        /// \brief Begins streaming on all enabled streams for this device
        void start(rs::source source = rs::source::video)
        {            
            rs_error * e = nullptr;
            rs_start_source((rs_device *)this, (rs_source)source, &e);
            error::handle(e);
        }

        /// \brief Ends streaming on all streams for this device
        void stop(rs::source source = rs::source::video)
        {
            rs_error * e = nullptr;
            rs_stop_source((rs_device *)this, (rs_source)source, &e);
            error::handle(e);
        }

        /// \brief Determines if device is currently streaming
        /// \return  true if device is currently streaming
        bool is_streaming() const
        {
            rs_error * e = nullptr;
            auto r = rs_is_device_streaming((const rs_device *)this, &e);
            error::handle(e);
            return r != 0;
        }

        /// \brief Retrieves available range of values of supported option
        /// \param[in] option  Option
        /// \param[out] min    Minimum value that is acceptable for this option
        /// \param[out] max    Maximum value that is acceptable for this option
        /// \param[out] step   Granularity of options that accept discrete values, or zero if the option accepts continuous values        
        void get_option_range(option option, double & min, double & max, double & step)
        {
            rs_error * e = nullptr;
            rs_get_device_option_range((rs_device *)this, (rs_option)option, &min, &max, &step, &e);
            error::handle(e);
        }

        /// \brief Retrieves available range of values of supported option
        /// \param[in] option  Option
        /// \param[out] min    Minimum value that is acceptable for this option
        /// \param[out] max    Maximum value that is acceptable for this option
        /// \param[out] step   Granularity of options that accept discrete values, or zero if the option accepts continuous values
        /// \param[out] def    Default value of the option
        void get_option_range(option option, double & min, double & max, double & step, double & def)
        {
            rs_error * e = nullptr;
            rs_get_device_option_range_ex((rs_device *)this, (rs_option)option, &min, &max, &step, &def, &e);
            error::handle(e);
        }

        /// \brief Efficiently retrieves value of arbitrary number of options, using minimal hardware IO
        /// \param[in] options  Array of options that should be queried
        /// \param[in] count    Length of options and values arrays
        /// \param[out] values  Array that receives the values of the queried options
        void get_options(const option * options, size_t count, double * values)
        {
            rs_error * e = nullptr;
            rs_get_device_options((rs_device *)this, (const rs_option *)options, (unsigned int)count, values, &e);
            error::handle(e);
        }

        /// \brief Efficiently sets value of arbitrary number of options, using minimal hardware IO
        /// \param[in] options  Array of options that should be set
        /// \param[in] count    Length of options and values arrays
        /// \param[in] values   Array of values to which the options should be set
        void set_options(const option * options, size_t count, const double * values)
        {
            rs_error * e = nullptr;
            rs_set_device_options((rs_device *)this, (const rs_option *)options, (unsigned int)count, values, &e);
            error::handle(e);
        }

        /// \brief Retrieves current value of single option
        /// \param[in] option  Option
        /// \return            Option value
        double get_option(option option)
        {
            rs_error * e = nullptr;
            auto r = rs_get_device_option((rs_device *)this, (rs_option)option, &e);
            error::handle(e);
            return r;
        }
        
        /// \brief Retrieves device-specific option description
        /// \param[in] option  Option
        /// \return            Option value
        const char * get_option_description(option option)
        {
            rs_error * e = nullptr;
            auto r = rs_get_device_option_description((rs_device *)this, (rs_option)option, &e);
            error::handle(e);
            return r;
        }

        /// \brief Sets current value of single option
        /// \param[in] option  Option
        /// \param[in] value   Option value
        void set_option(option option, double value)
        {
            rs_error * e = nullptr;
            rs_set_device_option((rs_device *)this, (rs_option)option, value, &e);
            error::handle(e);
        }

        /// \brief Blocks until new frames are available
        ///
        void wait_for_frames()
        {
            rs_error * e = nullptr;
            rs_wait_for_frames((rs_device *)this, &e);
            error::handle(e);
        }

        /// \brief Checks if new frames are available, without blocking
        /// \return  true if new frames are available; false if no new frames have arrived.
        bool poll_for_frames()
        {
            rs_error * e = nullptr;
            auto r = rs_poll_for_frames((rs_device *)this, &e);
            error::handle(e);
            return r != 0;
        }

        /// \brief Determines device capabilities
        /// \param[in] capability  Capability to check
        /// \return                true if device has this capability
        bool supports(capabilities capability) const
        {
            rs_error * e = nullptr;
            auto r = rs_supports((rs_device *)this, (rs_capabilities)capability, &e);
            error::handle(e);
            return r? true: false;
        }


        /// \brief Determines device capabilities
        /// \param[in] info_param  Capability to check for support
        /// \return                true if device has this capability
        bool supports(camera_info info_param) const
        {
            rs_error * e = nullptr;
            auto r = rs_supports_camera_info((rs_device *)this, (rs_camera_info)info_param, &e);
            error::handle(e);
            return r ? true : false;
        }

        /// \brief Retrieves time at which the latest frame on a stream was captured
        /// \param[in] stream  Stream of interest
        /// \return            Timestamp of frame, in milliseconds since the device was started
        double get_frame_timestamp(stream stream) const
        {
            rs_error * e = nullptr;
            auto r = rs_get_frame_timestamp((const rs_device *)this, (rs_stream)stream, &e);
            error::handle(e);
            return r;
        }

        /// \brief Retrieves frame number
        /// \param[in] stream  Stream of interest
        /// \return            Number of frame since device was started
        unsigned long long get_frame_number(stream stream) const
        {
            rs_error * e = nullptr;
            auto r = rs_get_frame_number((const rs_device *)this, (rs_stream)stream, &e);
            error::handle(e);
            return r;
        }

        /// \brief Retrieves contents of latest frame on a stream
        /// \param[in] stream  Stream
        /// \return            Pointer to start of frame data
        const void * get_frame_data(stream stream) const
        {
            rs_error * e = nullptr;
            auto r = rs_get_frame_data((const rs_device *)this, (rs_stream)stream, &e);
            error::handle(e);
            return r;
        }

        /// \brief Sends device-specific data to device
        /// \param[in] type  Type of raw data to send to the device
        /// \param[in] data  Raw data pointer to send
        /// \param[in] size  Size, in bytes of the raw data to send
        void send_blob_to_device(rs::blob_type type, void * data, int size)
        {
            rs_error * e = nullptr;
            rs_send_blob_to_device((rs_device *)this, (rs_blob_type)type, data, size, &e);
            error::handle(e);
        }
    };

    inline std::ostream & operator << (std::ostream & o, stream stream) { return o << rs_stream_to_string((rs_stream)stream); }
    inline std::ostream & operator << (std::ostream & o, format format) { return o << rs_format_to_string((rs_format)format); }
    inline std::ostream & operator << (std::ostream & o, preset preset) { return o << rs_preset_to_string((rs_preset)preset); }
    inline std::ostream & operator << (std::ostream & o, distortion distortion) { return o << rs_distortion_to_string((rs_distortion)distortion); }
    inline std::ostream & operator << (std::ostream & o, option option) { return o << rs_option_to_string((rs_option)option); }    
    inline std::ostream & operator << (std::ostream & o, capabilities capability) { return o << rs_capabilities_to_string((rs_capabilities)capability); }
    inline std::ostream & operator << (std::ostream & o, source src) { return o << rs_source_to_string((rs_source)src); }
    inline std::ostream & operator << (std::ostream & o, event evt) { return o << rs_event_to_string((rs_event_source)evt); }

    /// \brief Severity of the librealsense logger
    enum class log_severity : int32_t
    {
        debug = 0, /**< Detailed information about ordinary operations */
        info  = 1, /**< Terse information about ordinary operations */
        warn  = 2, /**< Indication of possible failure */
        error = 3, /**< Indication of definite failure */
        fatal = 4, /**< Indication of unrecoverable failure */
        none  = 5, /**< No logging will occur */
    };

    class log_callback : public rs_log_callback
    {
        std::function<void(log_severity, const char *)> on_event_function;
    public:
        explicit log_callback(std::function<void(log_severity, const char *)> on_event) : on_event_function(on_event) {}

        void on_event(rs_log_severity severity, const char * message) override
        {
            on_event_function((log_severity)severity, message);
        }

        void release() override { delete this; }
    };

    inline void log_to_console(log_severity min_severity)
    {
        rs_error * e = nullptr;
        rs_log_to_console((rs_log_severity)min_severity, &e);
        error::handle(e);
    }

    inline void log_to_file(log_severity min_severity, const char * file_path)
    {
        rs_error * e = nullptr;
        rs_log_to_file((rs_log_severity)min_severity, file_path, &e);
        error::handle(e);
    }

    inline void log_to_callback(log_severity min_severity, std::function<void(log_severity, const char *)> callback)
    {
        rs_error * e = nullptr;
        rs_log_to_callback_cpp((rs_log_severity)min_severity, new log_callback(callback), &e);
        error::handle(e);
    }

    // Additional utilities
    inline void apply_depth_control_preset(device * device, int preset) { rs_apply_depth_control_preset((rs_device *)device, preset); }
    inline void apply_ivcam_preset(device * device, rs_ivcam_preset preset) { rs_apply_ivcam_preset((rs_device *)device, preset); }
    inline void apply_ivcam_preset(device * device, int preset) { rs_apply_ivcam_preset((rs_device *)device, (rs_ivcam_preset)preset); } // duplicate for better backward compatibility with existing applications
}
#endif
