#include "astra_camera.h"
#include <sys/time.h>
#include <thread>

using namespace std;
using namespace openni;
using namespace cv;

AstraCamera::AstraCamera(int32_t rows, int32_t cols, int32_t fps, int32_t div) : m_cols_(cols), m_rows_(rows), m_fps_(fps), m_div_freq_(div)
{
    m_camera_ready_ = initializeCamera();
}

AstraCamera::~AstraCamera()
{
    m_depth_stream_.stop();
    m_depth_stream_.destroy();

    m_device_.close();
    OpenNI::shutdown();

    m_depth_frame_.clear();
    m_color_queue_.clear();
}

bool AstraCamera::setVideoMode(const SensorInfo *sinfo, VideoStream &stream, PixelFormat pf)
{
    const Array<VideoMode> &modes = sinfo->getSupportedVideoModes();

    int32_t target = -1;
    for (int32_t i = 0; i < modes.getSize(); i++)
    {
        //PIXEL_FORMAT_DEPTH_1_MM = 100, PIXEL_FORMAT_DEPTH_100_UM
        printf("%i: %ix%i, %i fps, %i format\n", i,
               modes[i].getResolutionX(),
               modes[i].getResolutionY(),
               modes[i].getFps(),
               modes[i].getPixelFormat());

        if (modes[i].getResolutionX() == m_cols_ && modes[i].getResolutionY() == m_rows_ && modes[i].getPixelFormat() == pf && modes[i].getFps() == m_fps_)
        {
            target = i;
        }
    }

    if (target == -1)
    {
        printf("there is no suitable video mode for %dx%d fps=%d\n", m_cols_, m_rows_, m_fps_);
        return false;
    }
    else
    {
        printf("SetVideoMode target:%d \n", target);
        Status rc = stream.setVideoMode(modes[target]);
        return rc == STATUS_OK;
    }
}

bool AstraCamera::initializeCameraDepth()
{
    Status rc;

    // Initialize OpenNI
    rc = OpenNI::initialize();
    if (rc != STATUS_OK)
    {
        printf("Initialize failed\n%s\n", OpenNI::getExtendedError());
        return false;
    }

    // Open device

    rc = m_device_.open(ANY_DEVICE);
    if (rc != STATUS_OK)
    {
        printf("Couldn't open device\n%s\n", OpenNI::getExtendedError());
        return false;
    }

    // Create depth stream
    rc = m_depth_stream_.create(m_device_, SENSOR_DEPTH);
    if (rc != STATUS_OK)
    {
        printf("Couldn't create depth stream\n%s\n", OpenNI::getExtendedError());
        return false;
    }
    if (m_depth_stream_.isValid())
    {
        bool rc = setVideoMode(m_device_.getSensorInfo(SENSOR_DEPTH), m_depth_stream_, PIXEL_FORMAT_DEPTH_1_MM);
        if (!rc)
        {
            cout << "failed to set depth video mode\n.";
            return false;
        }
        VideoMode rightvm = m_depth_stream_.getVideoMode();
        printf("DepthMode: %ix%i, %i fps, %i format \n", rightvm.getResolutionX(), rightvm.getResolutionY(),
               rightvm.getFps(), rightvm.getPixelFormat());
    }

    // Set REGISTRATION_DEPTH_TO_COLOR
    if (m_device_.isImageRegistrationModeSupported(IMAGE_REGISTRATION_DEPTH_TO_COLOR))
    {
        m_device_.setImageRegistrationMode(IMAGE_REGISTRATION_DEPTH_TO_COLOR);
        cout << "Depth channel is registered to color channel. " << endl;
    }
    else
    {
        cout << "Depth to color registration is not supported by this m_device_." << endl;
    }

    m_pStream_[DEPTH] = &m_depth_stream_;
    return true;
}

bool AstraCamera::initializeCameraColor()
{
    V4L2DeviceParameters param("/dev/video0", V4L2_PIX_FMT_YUYV, m_cols_, m_rows_, 15);
    // videoCapture = make_shared<V4l2Capture>();
    // videoCapture.reset(V4l2Capture::create(param, V4l2Access::IOTYPE_MMAP));

    return true;
}

bool AstraCamera::initializeCamera()
{
    initializeCameraDepth();
    initializeCameraColor();

    return true;
}
#if 0
long long AstraCamera::get_current_timestamp()
{
    struct timeval te;
    gettimeofday(&te, NULL);                                         // get current time
    long long milliseconds = te.tv_sec * 1000LL + te.tv_usec / 1000; // calculate milliseconds
    return milliseconds;
}
#endif

int AstraCamera::convert_yuv_to_rgb_pixel(int y, int u, int v)
{
    unsigned int pixel32 = 0;
    unsigned char *pixel = (unsigned char *)&pixel32;
    int r, g, b;
    r = y + (1.370705 * (v - 128));
    g = y - (0.698001 * (v - 128)) - (0.337633 * (u - 128));
    b = y + (1.732446 * (u - 128));
    if (r > 255)
        r = 255;
    if (g > 255)
        g = 255;
    if (b > 255)
        b = 255;
    if (r < 0)
        r = 0;
    if (g < 0)
        g = 0;
    if (b < 0)
        b = 0;
    pixel[0] = r;
    pixel[1] = g;
    pixel[2] = b;
    return pixel32;
}

int AstraCamera::convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
    unsigned int in, out = 0;
    unsigned int pixel_16;
    unsigned char pixel_24[3];
    unsigned int pixel32;
    int y0, u, y1, v;

    for (in = 0; in < width * height * 2; in += 4)
    {
        pixel_16 =
            yuv[in + 3] << 24 |
            yuv[in + 2] << 16 |
            yuv[in + 1] << 8 |
            yuv[in + 0];
        y0 = (pixel_16 & 0x000000ff);
        u = (pixel_16 & 0x0000ff00) >> 8;
        y1 = (pixel_16 & 0x00ff0000) >> 16;
        v = (pixel_16 & 0xff000000) >> 24;
        pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
        pixel_24[0] = (pixel32 & 0x000000ff);
        pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
        pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
        rgb[out++] = pixel_24[0];
        rgb[out++] = pixel_24[1];
        rgb[out++] = pixel_24[2];
        pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
        pixel_24[0] = (pixel32 & 0x000000ff);
        pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
        pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
        rgb[out++] = pixel_24[0];
        rgb[out++] = pixel_24[1];
        rgb[out++] = pixel_24[2];
    }
    return 0;
}

Mat AstraCamera::BGRToRGB(Mat img)
{
    Mat image(img.rows, img.cols, CV_8UC3);
    for (int i = 0; i < img.rows; ++i)
    {
        //获取第i行首像素指针
        Vec3b *p1 = img.ptr<Vec3b>(i);
        Vec3b *p2 = image.ptr<Vec3b>(i);
        for (int j = 0; j < img.cols; ++j)
        {
            //将img的bgr转为image的rgb
            p2[j][2] = p1[j][0];
            p2[j][1] = p1[j][1];
            p2[j][0] = p1[j][2];
        }
    }
    return image;
}
#define YUV422_SIZE (m_rows_ * m_rows_ * 2)
#define RGB_SIZE (m_rows_ * m_rows_ * 3)
bool AstraCamera::getData(cv::Mat &color_data, cv::Mat &depth_data)
{
    //depth
    shared_ptr<VideoFrameRef> frame;

    // If camera not ready
    if (m_camera_ready_ == false)
        return false;

    // Wait for stream
    int32_t changedIndex;
    Status rc = OpenNI::waitForAnyStream(m_pStream_, 1, &changedIndex, READ_WAIT_TIMEOUT);
    if (rc != STATUS_OK)
    {
        printf("Wait failed!\n%s\n", OpenNI::getExtendedError());
        return false;
    }

    // Wait for stream
    switch (changedIndex)
    {
    case DEPTH:
        frame = make_shared<VideoFrameRef>();
        rc = m_depth_stream_.readFrame(frame.get());
        if (rc != STATUS_OK)
        {
            cout << "Read depth frame failed: " << OpenNI::getExtendedError() << endl;
            return false;
        }
        m_depth_frame_.push_back(frame);
        //depth_data(m_cols_, m_rows_, CV_16UC1, (uint16_t *)frame->getData());

        break;
    case COLOR:
        cout << "Read color frame " << endl;
        break;
    case IR:
        cout << "Read IR frame " << endl;
        break;
    default:
        cout << "wait frame failed!" << endl;
        return false;
    }

    // color
    timeval timeout;
    timeout.tv_usec = 10000;
    bool isReadable = (videoCapture->isReadable(&timeout) == 1);
    if (!isReadable)
    {
        printf("Read Color failed !\n");
        return false;
    }

    char buffer[YUV422_SIZE];
    size_t buff_size = videoCapture->read(buffer, YUV422_SIZE);

    unsigned char reg_buff[RGB_SIZE];
    convert_yuv_to_rgb_buffer((unsigned char *)buffer, (unsigned char *)reg_buff, m_cols_, m_rows_);
    cv::Mat rgb_mat(m_cols_, m_rows_, CV_8UC3, (uint8_t *)reg_buff);
    rgb_mat = BGRToRGB(rgb_mat);

    color_data = rgb_mat;

    return true;
}

bool AstraCamera::getCameraFov(float &horizontalFov, float &verticalFov)
{
    Status status;
    int size = sizeof(float);

    status = m_depth_stream_.getProperty(ONI_STREAM_PROPERTY_HORIZONTAL_FOV, &horizontalFov, &size);
    if (status != STATUS_OK)
    {
        return false;
    }

    status = m_depth_stream_.getProperty(ONI_STREAM_PROPERTY_VERTICAL_FOV, &verticalFov, &size);
    if (status != STATUS_OK)
    {
        return false;
    }

    return true;
}

void AstraCamera::setConfig(const std::string message)
{
    if (message == "LASER_ON")
    {
        int dataSize = 4;

        int ldp_en = true;
        int laser_en = true;

        m_device_.setProperty(openni::OBEXTENSION_ID_LDP_EN, (uint8_t *)&ldp_en, dataSize);
        m_device_.setProperty(openni::OBEXTENSION_ID_LASER_EN, (uint8_t *)&laser_en, dataSize);
    }
    else if (message == "LASER_OFF")
    {
        int dataSize = 4;

        int ldp_en = false;
        int laser_en = false;

        m_device_.setProperty(openni::OBEXTENSION_ID_LDP_EN, (uint8_t *)&ldp_en, dataSize);
        m_device_.setProperty(openni::OBEXTENSION_ID_LASER_EN, (uint8_t *)&laser_en, dataSize);
    }
}
