#ifndef CAMERA_NODE_HPP
#define CAMERA_NODE_HPP

#include "rclcpp/rclcpp.hpp"
#include "sensor_msgs/msg/image.hpp"
#include <std_msgs/msg/header.hpp>
#include <string>
#include "common.hpp"
#include <thread>
#include <atomic>
#include <list>
#include <chrono>
#include <utility>
// #include <opencv2/highgui/highgui.hpp>
// #include <opencv2/imgproc.hpp>
// #include <cv_bridge/cv_bridge.h>
#include "mycommon.hpp"

class CameraNode : public rclcpp::Node
{
public:
    CameraNode(
      const std::string& node_name = "camera_node", 
      unsigned int device = 0)
    : Node(node_name, rclcpp::NodeOptions().use_intra_process_comms(true)), 
      m_canceled(false)
    {
        // Initialize Camera
        initCamera(device);
        // Create a publisher on the output topic.
        m_rgb_pub = this->create_publisher<sensor_msgs::msg::Image>("rgb/image_raw", rclcpp::SensorDataQoS());
        m_depth_pub = this->create_publisher<sensor_msgs::msg::Image>("depth/image_raw", rclcpp::SensorDataQoS());
        m_lir_pub = this->create_publisher<sensor_msgs::msg::Image>("lir/image_raw", rclcpp::SensorDataQoS());
        m_rir_pub = this->create_publisher<sensor_msgs::msg::Image>("rir/image_raw", rclcpp::SensorDataQoS());
        LOGD("create publisher success!");

        // Create the camera reading loop.
        m_thread = std::thread(std::bind(&CameraNode::loop, this));
        LOGD("create thread success!");
    }

    virtual ~CameraNode()
    {
        ASSERT_OK(MV3D_RGBD_Stop(m_handle));
        ASSERT_OK(MV3D_RGBD_CloseDevice(&m_handle));
        ASSERT_OK(MV3D_RGBD_Release());
    }

    void loop()
    {
        // While running...
        LOGD("looping ..");
        while (rclcpp::ok() && !m_canceled.load())
        {
            //int nRet = MV3D_RGBD_FetchFrame(this->m_handle, this->m_pstFrameData, 5000);
            int nRet = MV3D_RGBD_FetchFrame(m_handle, m_pstFrameData, 5000);
            LOGD("FetchFrame! nRet = %d", nRet);
            if (MV3D_RGBD_OK == nRet)
            {
                std::list<std::thread> lstThread;
                lstThread.push_back(std::thread(std::bind(&CameraNode::pubRGB, this)));
                lstThread.push_back(std::thread(std::bind(&CameraNode::pubDepth, this)));
                lstThread.push_back(std::thread(std::bind(&CameraNode::pubLIR, this)));
                lstThread.push_back(std::thread(std::bind(&CameraNode::pubRIR, this)));
                LOGD("thread list success!");

                for (auto& th : lstThread)
                {
                    th.join();
                }
            }
        }
    }

    void initCamera(unsigned int nIndex)
    {
        LOGD("Initialize camere");
        ASSERT_OK( MV3D_RGBD_Initialize() );

        MV3D_RGBD_VERSION_INFO stVersion;
        ASSERT_OK( MV3D_RGBD_GetSDKVersion(&stVersion) );
        LOGD("dll version: %d.%d.%d", stVersion.nMajor, stVersion.nMinor, stVersion.nRevision);

        unsigned int nDevNum;
        ASSERT_OK(MV3D_RGBD_GetDeviceNumber(DeviceType_USB, &nDevNum));
        LOGD("MV3D_RGBD_GetDeviceNumber success! nDevNum:%d.", nDevNum);
        ASSERT(nDevNum);

        // find device
        LOG("---------------------------------------------------------------\r\n");
        std::vector<MV3D_RGBD_DEVICE_INFO> devs(nDevNum);
        ASSERT_OK(MV3D_RGBD_GetDeviceList(DeviceType_USB, &devs[0], nDevNum, &nDevNum));
        for (unsigned int i = 0; i < nDevNum; i++)
        {  
            LOG("Index[%d]. SerialNum[%s] IP[%s] name[%s].\r\n", i, devs[i].chSerialNumber, devs[i].SpecialInfo.stNetInfo.chCurrentIp, devs[i].chModelName);
        }
        LOG("---------------------------------------------------------------");

        // open camera
        
        ASSERT_OK(MV3D_RGBD_OpenDevice(&m_handle, &devs[nIndex]));
        LOGD("OpenDevice success.");

        // Start work
        ASSERT_OK(MV3D_RGBD_Start(m_handle));
        LOGD("Start work success.");
    }

    // B分量
    inline int YUV2B(unsigned char y, unsigned char u)
    {
        return (int)(y + 1.732446 * (u - 128));
    }

    // G分量
    inline int YUV2G(unsigned char y, unsigned char u, unsigned char v)
    {
        return (int)(y - 0.698001 * (u - 128) - 0.703125 * (v - 128));
    }

    // R分量
    inline int YUV2R(unsigned char y, unsigned char v)
    {
        return (int)(y + 1.370705  * (v - 128));
    }

    // YUV422转BGR8
    int YUYVToBGR24_Native(unsigned char* pYUV, int width, int height)
    {
        if (m_pBGR24 == NULL)
        {
            m_pBGR24 = (uint8_t *)malloc(sizeof(uint8_t) * width * height * 3);
        }
        if (width < 1 || height < 1 || pYUV == NULL || m_pBGR24 == NULL)
        {
            return 0;
        }
        //const long len = width * height;
        unsigned char* yData = pYUV;
        unsigned char* vData = pYUV;
        unsigned char* uData = pYUV;
        int y, x, k;

        int bgr[3];
        int yIdx, uIdx, vIdx, idx;
        for (y = 0; y < height; y++)
        {
            for (x = 0; x < width; x++)
            {
                yIdx = 2 * ((y*width) + x);
                uIdx = 4 * (((y*width) + x) >> 1) + 1;
                vIdx = 4 * (((y*width) + x) >> 1) + 3;

                bgr[0] = YUV2B(yData[yIdx], uData[uIdx]);
                bgr[1] = YUV2G(yData[yIdx], uData[uIdx], vData[vIdx]);                                  
                bgr[2] = YUV2R(yData[yIdx], vData[vIdx]);

                for (k = 0; k < 3; k++)
                {
                    idx = (y * width + x) * 3 + k;
                    if (bgr[k] >= 0 && bgr[k] <= 255)
                    {
                        m_pBGR24[idx] = bgr[k];
                    }
                    else
                    {
                        m_pBGR24[idx] = (bgr[k] < 0) ? 0 : 255;
                    }
                }
            }
        }
        return 1;
    }

    void pubRGB()
    {
        for (unsigned int i = 0; i < m_pstFrameData->nImageCount; i++)
        {
            if (ImageType_YUV422 == m_pstFrameData->stImageData[i].enImageType)
            {
                LOGD("MV3D_RGBD_FetchFrame success: framenum (%d) height(%d) width(%d)  len (%d)!", m_pstFrameData->stImageData[i].nFrameNum,
                    m_pstFrameData->stImageData[i].nHeight, m_pstFrameData->stImageData[i].nWidth, m_pstFrameData->stImageData[i].nDataLen);
                YUYVToBGR24_Native(m_pstFrameData->stImageData[i].pData, m_pstFrameData->stImageData[i].nWidth, m_pstFrameData->stImageData[i].nHeight);
                LOGD("YUYVToBGR24_Native success.");
                cv::Mat image(m_pstFrameData->stImageData[i].nHeight, m_pstFrameData->stImageData[i].nWidth, CV_8UC3, m_pBGR24);

                // Create a new unique_ptr to an Image message for storage.
                sensor_msgs::msg::Image::UniquePtr msg(new sensor_msgs::msg::Image());

                // Pack the OpenCV image into the ROS image.
                set_now(msg->header.stamp);
                msg->header.frame_id = "camera_frame";
                msg->height = image.rows;
                msg->width = image.cols;
                msg->encoding = mat_type2encoding(image.type());
                msg->is_bigendian = false;
                msg->step = static_cast<sensor_msgs::msg::Image::_step_type>(image.step);
                msg->data.assign(image.datastart, image.dataend);
                LOGD("msg height(%d) width(%d)", msg->height, msg->width);
                m_rgb_pub->publish(std::move(msg));  // Publish.
                //sensor_msgs::ImagePtr imageMsg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", image).toImageMsg();
                // sensor_msgs::msg::Image msg;
                // std_msgs::msg::Header _header;
                // _header.stamp = this->get_clock()->now();
                // _header.frame_id = "camera_frame";
                // cv_bridge::CvImage(_header, "bgr8", image).toImageMsg(msg);
                // //sensor_msgs::msg::Image::UniquePtr msg(&_msg);
                // m_rgb_pub->publish(msg);
            }
        }
    }

    void pubDepth()
    {
        for (unsigned int i = 0; i < m_pstFrameData->nImageCount; i++)
        {
            if (ImageType_Depth == m_pstFrameData->stImageData[i].enImageType)
            {
                LOGD("MV3D_RGBD_FetchFrame success: framenum (%d) height(%d) width(%d)  len (%d)!", m_pstFrameData->stImageData[i].nFrameNum,
                    m_pstFrameData->stImageData[i].nHeight, m_pstFrameData->stImageData[i].nWidth, m_pstFrameData->stImageData[i].nDataLen);
                cv::Mat depth_image(m_pstFrameData->stImageData[i].nHeight, m_pstFrameData->stImageData[i].nWidth, CV_16UC1, m_pstFrameData->stImageData[i].pData);
                cv::Mat temp;
                cv::convertScaleAbs(depth_image, temp, 0.05);
                cv::Mat image;
                cv::applyColorMap(temp, image, cv::COLORMAP_JET);

                // Create a new unique_ptr to an Image message for storage.
                sensor_msgs::msg::Image::UniquePtr msg(new sensor_msgs::msg::Image());

                // Pack the OpenCV image into the ROS image.
                set_now(msg->header.stamp);
                msg->header.frame_id = "camera_frame";
                msg->height = image.rows;
                msg->width = image.cols;
                msg->encoding = mat_type2encoding(image.type());
                msg->is_bigendian = false;
                msg->step = static_cast<sensor_msgs::msg::Image::_step_type>(image.step);
                msg->data.assign(image.datastart, image.dataend);
                m_depth_pub->publish(std::move(msg));  // Publish.

                // sensor_msgs::ImagePtr imageMsg = cv_bridge::CvImage(std_msgs::Header(), sensor_msgs::image_encodings::TYPE_8UC3, image).toImageMsg();
                // m_depth_pub->publish(*imageMsg);
            }
        }
    }

    void pubLIR()
    {
        for (unsigned int i = 0; i < m_pstFrameData->nImageCount; i++)
        {
            if (i == 2)
            {
                LOGD("MV3D_RGBD_FetchFrame success: framenum (%d) height(%d) width(%d)  len (%d)!", m_pstFrameData->stImageData[i].nFrameNum,
                    m_pstFrameData->stImageData[i].nHeight, m_pstFrameData->stImageData[i].nWidth, m_pstFrameData->stImageData[i].nDataLen);
                cv::Mat image(m_pstFrameData->stImageData[i].nHeight, m_pstFrameData->stImageData[i].nWidth, CV_8UC1, m_pstFrameData->stImageData[i].pData);

                // Create a new unique_ptr to an Image message for storage.
                sensor_msgs::msg::Image::UniquePtr msg(new sensor_msgs::msg::Image());

                // Pack the OpenCV image into the ROS image.
                set_now(msg->header.stamp);
                msg->header.frame_id = "camera_frame";
                msg->height = image.rows;
                msg->width = image.cols;
                msg->encoding = mat_type2encoding(image.type());
                msg->is_bigendian = false;
                msg->step = static_cast<sensor_msgs::msg::Image::_step_type>(image.step);
                msg->data.assign(image.datastart, image.dataend);
                m_lir_pub->publish(std::move(msg));  // Publish.
                // sensor_msgs::ImagePtr imageMsg = cv_bridge::CvImage(std_msgs::Header(), "mono8", image).toImageMsg();
                // m_lir_pub->publish(*imageMsg);
            }
        }
    }

    void pubRIR()
    {
        for (unsigned int i = 0; i < m_pstFrameData->nImageCount; i++)
        {
            if (i == 3)
            {
                LOGD("MV3D_RGBD_FetchFrame success: framenum (%d) height(%d) width(%d)  len (%d)!", m_pstFrameData->stImageData[i].nFrameNum,
                    m_pstFrameData->stImageData[i].nHeight, m_pstFrameData->stImageData[i].nWidth, m_pstFrameData->stImageData[i].nDataLen);
                cv::Mat image(m_pstFrameData->stImageData[i].nHeight, m_pstFrameData->stImageData[i].nWidth, CV_8UC1, m_pstFrameData->stImageData[i].pData);

                // Create a new unique_ptr to an Image message for storage.
                sensor_msgs::msg::Image::UniquePtr msg(new sensor_msgs::msg::Image());

                // Pack the OpenCV image into the ROS image.
                set_now(msg->header.stamp);
                msg->header.frame_id = "camera_frame";
                msg->height = image.rows;
                msg->width = image.cols;
                msg->encoding = mat_type2encoding(image.type());
                msg->is_bigendian = false;
                msg->step = static_cast<sensor_msgs::msg::Image::_step_type>(image.step);
                msg->data.assign(image.datastart, image.dataend);
                m_rir_pub->publish(std::move(msg));  // Publish.

                // sensor_msgs::ImagePtr imageMsg = cv_bridge::CvImage(std_msgs::Header(), "mono8", image).toImageMsg();
                // m_rir_pub->publish(*imageMsg);
            }
        }
    }

private:
    rclcpp::Publisher<sensor_msgs::msg::Image>::SharedPtr m_rgb_pub;
    rclcpp::Publisher<sensor_msgs::msg::Image>::SharedPtr m_depth_pub;
    rclcpp::Publisher<sensor_msgs::msg::Image>::SharedPtr m_lir_pub;
    rclcpp::Publisher<sensor_msgs::msg::Image>::SharedPtr m_rir_pub;
    std::atomic<bool> m_canceled;
    std::thread m_thread;
    void* m_handle;
    MV3D_RGBD_FRAME_DATA m_stFrameData = {0};
    MV3D_RGBD_FRAME_DATA* m_pstFrameData = &m_stFrameData;
    unsigned char* m_pBGR24;
};

#endif