#include <libobsensor/ObSensor.hpp>
#include "libobsensor/hpp/Pipeline.hpp"
#include "libobsensor/hpp/Error.hpp"
#include <opencv2/opencv.hpp>
#include <iostream>

const char *metaDataTypes[] = {"TIMESTAMP",
                               "SENSOR_TIMESTAMP",
                               "FRAME_NUMBER",
                               "AUTO_EXPOSURE",
                               "EXPOSURE",
                               "GAIN",
                               "AUTO_WHITE_BALANCE",
                               "WHITE_BALANCE",
                               "BRIGHTNESS",
                               "CONTRAST",
                               "SATURATION",
                               "SHARPNESS",
                               "BACKLIGHT_COMPENSATION",
                               "HUE",
                               "GAMMA",
                               "POWER_LINE_FREQUENCY",
                               "LOW_LIGHT_COMPENSATION",
                               "MANUAL_WHITE_BALANCE",
                               "ACTUAL_FRAME_RATE",
                               "FRAME_RATE",
                               "AE_ROI_LEFT",
                               "AE_ROI_TOP",
                               "AE_ROI_RIGHT",
                               "AE_ROI_BOTTOM",
                               "EXPOSURE_PRIORITY",
                               "HDR_SEQUENCE_NAME",
                               "HDR_SEQUENCE_SIZE",
                               "HDR_SEQUENCE_INDEX",
                               "LASER_POWER",
                               "LASER_POWER_LEVEL",
                               "LASER_STATUS",
                               "GPIO_INPUT_DATA"};

int main(int argc, char **argv)
try
{
    // 初始化 SDK
    ob::Context context; // 创建上下文
    ob::Context::setLoggerSeverity(OB_LOG_SEVERITY_ERROR);
    std::string deviceNumber = argv[1];
    std::string outputVideoPath = "./" + deviceNumber + ".mp4";
    cv::VideoWriter outputVideo;
    outputVideo.open(outputVideoPath, cv::VideoWriter::fourcc('D', 'I', 'V', 'X'), 30.0, cv::Size(1280, 720));

    if (!outputVideo.isOpened())
    {
        std::cout << "can not open video and write" << std::endl;
        return -1;
    }
    

    // 获取设备列表
    auto deviceList = context.queryDeviceList();
    int devCount = deviceList->deviceCount();
    if (devCount == 0)
    {
        std::cerr << "No device found!" << std::endl;
        return -1;
    }

    // 遍历设备列表，查找目标设备
    std::shared_ptr<ob::Device> targetDevice = nullptr;
    for (size_t i = 0; i < devCount; i++)
    {
        auto device = deviceList->getDevice(i);
        auto devInfo = device->getDeviceInfo();
        std::string sn = devInfo->serialNumber();

        if (sn == deviceNumber)
        {
            targetDevice = device;
            std::cout << "Found target device: " << devInfo->name() << " (SN: " << sn << ")" << std::endl;
            break;
        }
    }

    if (!targetDevice)
    {
        std::cerr << "Target device not found!" << std::endl;
        return -1;
    }
    // Create a pipeline with default device
    auto pipe = std::make_shared<ob::Pipeline>(targetDevice);
    std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();

    auto colorProfiles = pipe->getStreamProfileList(OB_SENSOR_COLOR);
    std::shared_ptr<ob::VideoStreamProfile> colorProfile = nullptr;
    try
    {
        // Find the corresponding profile according to the specified format, first look for the y16 format
        colorProfile = colorProfiles->getVideoStreamProfile(1280, 720, OB_FORMAT_BGR, 30);
        // OB_HEIGHT_ANY
    }
    catch (ob::Error &e)
    {
        // If the specified format is not found, search for the default profile to open the stream
        colorProfile = std::const_pointer_cast<ob::StreamProfile>(colorProfiles->getProfile(OB_PROFILE_DEFAULT))->as<ob::VideoStreamProfile>();
    }
    config->enableStream(colorProfile); // 彩色流

    // Start the pipeline with config
    pipe->start(config);

    // Create a window for rendering, and set the resolution of the window
    // auto currentProfile = pipe.getEnabledStreamProfileList()->getProfile(0)->as<ob::VideoStreamProfile>();
    // Window app("ColorViewer", currentProfile->width(), currentProfile->height());
    while (true)
    {
        // Wait for up to 100ms for a frameset in blocking mode.
        auto frameSet = pipe->waitForFrames(100);
        if (frameSet == nullptr)
        {
            continue;
        }

        // get color frame from frameset
        auto colorFrame = frameSet->colorFrame();
        if (colorFrame == nullptr)
        {
            continue;
        }

        // print metadata every 30 frames
        auto index = colorFrame->index();
        if (index % 30 == 0)
        {
            std::cout << "*************************** Color Frame #" << index << " Metadata List ********************************" << std::endl;
            for (int metaDataType = 0; metaDataType < OB_FRAME_METADATA_TYPE_COUNT; metaDataType++)
            {
                // Check if it is supported metaDataType for current frame
                if (colorFrame->hasMetadata((OBFrameMetadataType)metaDataType))
                {
                    // Get the value of the metadata
                    std::cout << metaDataTypes[metaDataType] << ": " << colorFrame->getMetadataValue((OBFrameMetadataType)metaDataType) << std::endl;
                }
                else
                {
                    std::cout << metaDataTypes[metaDataType] << ": "
                              << "unsupported" << std::endl;
                }
            }
            std::cout << "********************************************************************************" << std::endl
                      << std::endl;
        }

        // Render frameset in the window, only color frames are rendered here.
        // app.addToRender(colorFrame);

        auto videoFrame_color = colorFrame->as<ob::VideoFrame>();
        cv::Mat rawMat(videoFrame_color->height(), videoFrame_color->width(), CV_8UC3, videoFrame_color->data());
        outputVideo.write(rawMat);

        cv::imshow("live", rawMat);
        if (cv::waitKey(30) == 27)
        {
            break;
        }
        

    }

    // Stop the Pipeline, no frame data will be generated
    pipe->stop();
    outputVideo.release();
    cv::destroyAllWindows();
    return 0;
}
catch (ob::Error &e)
{
    std::cerr << "function:" << e.getName() << "\nargs:" << e.getArgs() << "\nmessage:" << e.getMessage() << "\ntype:" << e.getExceptionType() << std::endl;
    exit(EXIT_FAILURE);
}
