#include "UVCCamera.hpp"

namespace wmj {

/**
 * @brief 基本工具，利用linux的ioctl控制相机
 *
 * @param fd:文件描述符
 * @param request:交互协议，设备驱动命令
 * @param arg
 * @return int
 */
static int xioctl(int fd, int request, void* arg) {
    int result;
    int cnt = 0;
    do {
        result = ioctl(fd, request, arg);  // 成功返回0，失败返回-1
        if (++cnt > 100) {
            std::cerr << "xioctl has circulate for " << cnt
                      << " times, error num: " << errno << "\n";
        }
    } while (-1 == result && EINTR == errno);
    return result;
}
UVCCamera::UVCCamera(std::string cameraPath) {
    loadSerialOpenDevice(cameraPath);
    // ResetImageSize();
    init3rdParty();
    initDevice();
    initBuffers();
    startStream();
    setParam();

    // 将所有相机图像更新标志设置为false
    for (auto &pair : m_handles) {
        m_ifupdate[pair.first] = false;
    }

    _captureThread = std::thread(&wmj::UVCCamera::mainRun, this);
    _captureThread.detach();
    if (m_debug) {
        std::cout << "usbCapture start!" << std::endl;
    }
}

/**
 * @brief 尝试打开相机并获得相机句柄
 *
 * @param source
 * @param orientation
 * @return handle int
 */
int UVCCamera::tryOpenDevice(std::string source, std::string orientation) {
    int handle = open(source.c_str(),
                      O_RDWR);  // 该open函数在<fcntl.h>,返回的是设备描述符
    if (handle < 0) {
        std::cerr << "Can not open device: " << orientation << std::endl;
        return -1;
    }
    std::cout << "Open device " + orientation + " successfully" << std::endl;
    return handle;
}
void UVCCamera::readCameraParam(std::string orientation,
                                CameraParam& cameraParamTemp) {
    cv::FileStorage fs(UVC_CONFIG, cv::FileStorage::READ);
    fs[orientation]["setParam"] >> cameraParamTemp.SetParam;  // 是否设置参数
    fs[orientation]["rate"] >> cameraParamTemp.VideoRate;
    fs[orientation]["height"] >> cameraParamTemp.Height;
    std::cout << "cameraParamTemp.Height" << cameraParamTemp.Height
              << std::endl;
    fs[orientation]["width"] >> cameraParamTemp.Width;
    std::cout << "cameraParamTemp.Width" << cameraParamTemp.Width << std::endl;
    fs[orientation]["exposuretime"] >> cameraParamTemp.ExposureTime;
    fs[orientation]["brightness"] >> cameraParamTemp.Brightness;
    fs[orientation]["contrast"] >> cameraParamTemp.Contrast;
    fs[orientation]["gamma"] >> cameraParamTemp.Gamma;
    fs[orientation]["satuation"] >> cameraParamTemp.Satuation;
    // fs[orientation]["whiteBalance"]["G"] >> cameraParamTemp.WhiteBalance.G;
    fs[orientation]["whiteBalance"]["B"] >> cameraParamTemp.WhiteBalance.B;
    fs[orientation]["whiteBalance"]["R"] >> cameraParamTemp.WhiteBalance.R;
    fs[orientation]["gain"] >> cameraParamTemp.Gain;
    fs[orientation]["cameraMatrix"] >> cameraParamTemp.CameraMatrix;
    fs[orientation]["distCoeffs"] >> cameraParamTemp.DistCoeffs;
    fs.release();
}
// 通过Yaml文件第一行查找所有相机，并依次打开，如果打开成功则设置填入参数到map
int UVCCamera::loadSerialOpenDevice(std::string cameraPath) {
    std::cout << "cameraPath: " << cameraPath << std::endl;
    cv::FileStorage fs(cameraPath, cv::FileStorage::READ);
    fs["debug"] >> m_debug;
    int cameraCount = 0;
    for (auto node : fs["cameras"]) {
        std::string orientation = node["orientation"];
        std::cout << "orientation  " << orientation << std::endl;
        std::cout << orientation << std::endl;
        std::string source = node["source"];
        // 打开相机成功之后再将参数填入map
        m_handles[orientation] = tryOpenDevice(source, orientation);
        if (m_handles[orientation] != -1) {
            // 读出每一个相机的参数，并设置，若都设置成功后则将写入Map
            CameraParam cameraParamTemp;
            readCameraParam(orientation, cameraParamTemp);
            if (setCameraParam(orientation, cameraParamTemp) == 0) {
                m_source[orientation] = source;
                m_buffers[orientation] = initBuffer;

                m_param[orientation] = cameraParamTemp;
            } else {
                std::cout << "Set device " + orientation + " param faild"
                          << std::endl;
            }
        } else {
            std::cout << "Open device " + orientation + "faild" << std::endl;
            continue;
        }
        cameraCount++;
    }
    fs.release();
    m_device_num = cameraCount;
    std::cout << "open camera count:" << m_device_num << std::endl;
    return int(m_device_num < 1);
}
int UVCCamera::init3rdParty() {
    _JPEG_Decoder = tjInitDecompress();  // turbo_jpeg
    return 0;
}
int UVCCamera::free3rdParty() {
    tjDestroy(_JPEG_Decoder);  // turbo_jpeg
    return 0;
}
int UVCCamera::initDevice() {
    for (auto pairs : m_handles) {
        v4l2_format cam_format;
        cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        cam_format.fmt.pix.width = m_param[pairs.first].Width;
        cam_format.fmt.pix.height = m_param[pairs.first].Height;
        cam_format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
        cam_format.fmt.pix.field = V4L2_FIELD_NONE;  // 逐行扫描
        // cam_format.fmt.pix.field = V4L2_FIELD_INTERLACED;    //隔行扫描
        // cam_format.fmt.pix.field = V4L2_FIELD_ANY;           //默认
        int result = xioctl(pairs.second, VIDIOC_S_FMT, &cam_format);
        if (result < 0) {
            std::cerr << pairs.first << ": Can not set frame format"
                      << std::endl;
            exit(-1);
            return -1;
        }
        std::cout << pairs.first << ": Init device successfully" << std::endl;
	is_running=true;
        return 0;
    }
}

int UVCCamera::initBuffers() {
    for (auto pairs : m_handles) {
        v4l2_requestbuffers req{0};
        req.count = 3;  // 缓冲区队列大小
        req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        req.memory = V4L2_MEMORY_MMAP;
        int result = xioctl(pairs.second, VIDIOC_REQBUFS, &req);
        if (result < 0) {
            std::cerr << pairs.first << ": Request frame buffers failed";
            exit(-1);
            return -1;
        }
        if (req.count < 2) {
            std::cerr
                << pairs.first
                << ": Request frame buffers while insufficient buffer memory";
        }
        printf("BUFFER_LENGTH: %d\n", req.count);
        m_buffers[pairs.first] =
            (videoBuffer*)calloc(req.count, sizeof(videoBuffer));
        for (n_buffers = 0; n_buffers < req.count; n_buffers++) {
            v4l2_buffer buf{0};
            buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            buf.memory = V4L2_MEMORY_MMAP;
            buf.index = n_buffers;
            result = xioctl(pairs.second, VIDIOC_QUERYBUF, &buf);
            if (result < 0) {
                std::cerr << pairs.first << " VIDIOC_QUERYBUF failed\n";
                exit(-2);
                return -1;
            }
            m_buffers[pairs.first][n_buffers].len = buf.length;
            m_buffers[pairs.first][n_buffers].pBuffer =
                (uint8_t*)mmap(NULL, buf.length, PROT_READ | PROT_WRITE,
                               MAP_SHARED, pairs.second, buf.m.offset);
            if (MAP_FAILED == m_buffers[pairs.first][n_buffers].pBuffer) {
                std::cerr << "mmap buffer failed\n";
                return -2;
            }
        }
        std::cout << pairs.first << ": init buffers successfully" << std::endl;
        return req.count;
    }
}

UVCCamera::~UVCCamera() {
    free3rdParty();
    stopStream();
    freeBuffers();
    closeDevice();
    std::cout << "UsbCapture successfully exited\n";
}
int UVCCamera::closeDevice() {
    for (auto pairs : m_handles) {
        int result = close(pairs.second);
        if (result < 0) {
            std::cerr << "Can not close video device";
            return -1;
        }
        return 0;
    }
}
int UVCCamera::freeBuffers() {
    for (auto pairs : m_buffers) {
        for (uint32_t i = 0; i < n_buffers; i++) {
            if (-1 == munmap(pairs.second[i].pBuffer, pairs.second[i].len)) {
                std::cerr << "munmap buffer failed\n";
                return -1;
            }
        }
        delete pairs.second;
    }
    return 0;
}

int UVCCamera::startStream() {
    for (auto pairs : m_handles) {
        for (uint32_t i = 0; i < n_buffers; i++) {
            v4l2_buffer buf{0};
            buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            buf.memory = V4L2_MEMORY_MMAP;
            buf.index = i;
            if (-1 == xioctl(pairs.second, VIDIOC_QBUF, &buf)) {
                std::cerr << "VIDIOC_QBUF failed" << std::endl;
                exit(-1);
                return -1;
            }
        }
        enum v4l2_buf_type type;
        type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if (-1 == xioctl(pairs.second, VIDIOC_STREAMON, &type)) {
            std::cerr << "VIDIOC_STREAMON error" << std::endl;
            exit(-2);
            return -2;
        }
        std::cout << pairs.first << ": start stream successfully" << std::endl;
    }
    return 0;
}

int UVCCamera::stopStream() {
    enum v4l2_buf_type type;
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    for (auto pairs : m_handles) {
        if (-1 == xioctl(pairs.second, VIDIOC_STREAMOFF, &type)) {
            std::cerr << "VIDIOC_STREAMOFF error\n";
            return -1;
        }
    }
    return 0;
}

/***********************video**********************************/

int INT_VODIOC_DQBUF = 0;
int INT_VODIOC_QBUF = 0;
int UVCCamera::getFrameRaw() {
    if(!is_running)
    {
    	std::cout <<"V4L2 Thread exit."<<std::endl;
	return 0;
    }
    for (auto pairs : m_handles) {
        v4l2_buffer queue_buf{0};
        queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        queue_buf.memory = V4L2_MEMORY_MMAP;
        if (-1 == xioctl(pairs.second, VIDIOC_DQBUF, &queue_buf)) {
            std::cout << "VIDIOC_DQBUF error\n";
            if (++INT_VODIOC_DQBUF > 20) {
                std::cerr << "VIDIOC_DQBUF FATAL, circulate more than "
                          << INT_VODIOC_DQBUF << " times";
                // exit(-1);
            }
            return -1;
        }
        INT_VODIOC_DQBUF = 0;
        if (queue_buf.index == -1) {
            return -3;
        }
        cv::Mat temp = cv::Mat::zeros(
            cv::Size(m_param[pairs.first].Width, m_param[pairs.first].Height),
            CV_8UC3);
        int ret = tjDecompress2(
            _JPEG_Decoder, m_buffers[pairs.first][queue_buf.index].pBuffer,
            m_buffers[pairs.first][queue_buf.index].len, temp.data,
            m_param[pairs.first].Width,
            m_param[pairs.first].Width * tjPixelSize[TJPF_BGR],
            m_param[pairs.first].Height, TJPF_BGR,
            TJFLAG_FASTUPSAMPLE | TJFLAG_FASTDCT);
        if (ret < 0) {
            std::cout << "解码错误,错误码:" << ret << std::endl;
            return -4;
        }
        m_mutex[pairs.first].lock();
        temp.copyTo(m_imgbuffers[pairs.first]);
        m_mutex[pairs.first].unlock();
        v4l2_buffer dq_buf{0};
        dq_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        dq_buf.memory = V4L2_MEMORY_MMAP;
        dq_buf.index = queue_buf.index;
        if (-1 == xioctl(pairs.second, VIDIOC_QBUF, &dq_buf)) {
            std::cout << "VIDIOC_QBUF error\n";
            if (++INT_VODIOC_QBUF > 20) {
                std::cerr << "VODIOC_QBUF FATAL, circulate more than "
                          << INT_VODIOC_QBUF << " times";
                // exit(-1);
            }
            return -2;
        }
        m_ifupdate[pairs.first] = true;
        INT_VODIOC_QBUF = 0;
    }
    return 0;
}

void UVCCamera::mainRun() {
    // Rate rate(30);
    while (is_running) {
        if (getFrameRaw() == -4) {
            continue;
        }
    }
    std::cout << "Main Thread has successfully exited~" << std::endl;
}

/*********************capture stste************************/

cv::Mat UVCCamera::getCameraMatrix(std::string orientation) {
    return m_param[orientation].CameraMatrix;
}

cv::Mat UVCCamera::getDistCoeffs(std::string orientation) {
    return m_param[orientation].DistCoeffs;
}

int UVCCamera::setCameraParam(std::string orientation,
                              CameraParam cameraParamTemp) {
    if (m_param[orientation].SetParam) {
        if (m_param[orientation].Brightness != -1) {
            setBrightness(orientation, m_param[orientation].Brightness);
        }
        if (m_param[orientation].Contrast != -1) {
            setContrast(orientation, m_param[orientation].Contrast);
        }
        if (m_param[orientation].ExposureTime != -1) {
            setExposureTime(orientation, m_param[orientation].ExposureTime);
        }
        if (m_param[orientation].Gamma != -1) {
            setGamma(orientation, m_param[orientation].Gamma);
        }
        if (m_param[orientation].Satuation != -1) {
            setSatuation(orientation, m_param[orientation].Satuation);
        }
        //if (m_param[orientation].WhiteBalance.B != -1) {
        //    setWhiteBalance(orientation, m_param[orientation].WhiteBalance.B);
        //}
	if(m_param[orientation].WhiteBalance.R != -1 && m_param[orientation].WhiteBalance.B != -1)
	{
	    setWhiteBalance(orientation,cv::Point2i(m_param[orientation].WhiteBalance.B,m_param[orientation].WhiteBalance.R));
	}
        if (m_param[orientation].Gain != -1) {
            setGain(orientation, m_param[orientation].Gain);
        }
    }
    return 0;
}
int UVCCamera::setParam() {
    for (auto pairs : m_param) {
        if (pairs.second.SetParam) {
            if (pairs.second.Brightness != -1) {
                setBrightness(pairs.first, pairs.second.Brightness);
            }
            if (pairs.second.Contrast != -1) {
                setContrast(pairs.first, pairs.second.Contrast);
            }
            if (pairs.second.ExposureTime != -1) {
                setExposureTime(pairs.first, pairs.second.ExposureTime);
            }
            if (pairs.second.Gamma != -1) {
                setGamma(pairs.first, pairs.second.Gamma);
            }
            if (pairs.second.Satuation != -1) {
                setSatuation(pairs.first, pairs.second.Satuation);
            }
            //if (pairs.second.WhiteBalance.B != -1) {
            //    setWhiteBalance(pairs.first, pairs.second.WhiteBalance.B);
            //}
	    if (pairs.second.WhiteBalance.B != -1 && pairs.second.WhiteBalance.R != -1){
	        setWhiteBalance(pairs.first, cv::Point2i(pairs.second.WhiteBalance.B,pairs.second.WhiteBalance.R));
	    }
            if (pairs.second.Gain != -1) {
                setGain(pairs.first, pairs.second.Gain);
            }
        } else {
            continue;
        }
    }
    return 0;
}

int UVCCamera::setExposureTime(std::string orientation, int value) {
    v4l2_control control_s;
    control_s.id = V4L2_CID_EXPOSURE_AUTO;
    control_s.value = V4L2_EXPOSURE_MANUAL;  // V4L2_EXPOSURE_SHUTTER_PRIORITY
    if (xioctl(m_handles[orientation], VIDIOC_S_CTRL, &control_s) < 0) {
        std::cerr << "Close Auto Exposure error\n";
        return -1;
    }
    control_s.id = V4L2_CID_EXPOSURE_ABSOLUTE;
    control_s.value = value;
    if (xioctl(m_handles[orientation], VIDIOC_S_CTRL, &control_s) < 0) {
        std::cerr << "Set MANUAL Exposure error\n";
        return -2;
    }
    return 0;
}

int UVCCamera::setBrightness(std::string orientation, int value) {
    v4l2_control control_s;
    control_s.id = V4L2_CID_BRIGHTNESS;
    control_s.value = value;
    if (xioctl(m_handles[orientation], VIDIOC_S_CTRL, &control_s) < 0) {
        printf("Set Brightness error\n");
        return -1;
    }

    return 0;
}

int UVCCamera::setGamma(std::string orientation, int value) {
    v4l2_control control_s;
    control_s.id = V4L2_CID_GAMMA;
    control_s.value = value;
    if (xioctl(m_handles[orientation], VIDIOC_S_CTRL, &control_s) < 0) {
        printf("Set Gamma error\n");
        return -1;
    }

    return 0;
}

int UVCCamera::setContrast(std::string orientation, int value) {
    v4l2_control control_s;
    control_s.id = V4L2_CID_CONTRAST;
    control_s.value = value;
    if (xioctl(m_handles[orientation], VIDIOC_S_CTRL, &control_s) < 0) {
        printf("Set Contrast error\n");
        return -1;
    }

    return 0;
}

int UVCCamera::setSatuation(std::string orientation, int value) {
    v4l2_control control_s;
    control_s.id = V4L2_CID_SATURATION;
    control_s.value = value;
    if (xioctl(m_handles[orientation], VIDIOC_S_CTRL, &control_s) < 0) {
        printf("Set Satuation error\n");
        return -1;
    }

    return 0;
}

int UVCCamera::setWhiteBalance(std::string orientation, cv::Point2i value) {
    int ret;
    v4l2_control control_s;
    if (value == cv::Point2i(-1, -1)) {
        return 0;
    } else {
        if (value != cv::Point2i(0, 0)) {
            if (value.x >= 0) {
                control_s.id = V4L2_CID_BLUE_BALANCE;
                control_s.value = value.x;
                if (xioctl(m_handles[orientation], VIDIOC_S_CTRL, &control_s) <
                    0) {
                    printf("Set Bule Balance error\n");
                    value.x = -2;
                }
            }
            if (value.y >= 0) {
                control_s.id = V4L2_CID_RED_BALANCE;
                control_s.value = value.y;
                if (xioctl(m_handles[orientation], VIDIOC_S_CTRL, &control_s) <
                    0) {
                    printf("Set Red Balance error\n");
                    value.y = -2;
                }
            }
            if (value.x == -2 || value.y == -2) {
                return -2;
            }
        } else {
            control_s.id = V4L2_CID_AUTO_WHITE_BALANCE;
            control_s.value = V4L2_WHITE_BALANCE_MANUAL;
            ret = xioctl(m_handles[orientation], VIDIOC_S_CTRL, &control_s);
            if (ret < 0) {
                printf("Set White Balance error\n");
                return -1;
            }
        }
    }

    return 0;
}

int UVCCamera::setWhiteBalance(std::string orientation, int value) {
    v4l2_control control_s;
    control_s.id =
        V4L2_CID_WHITE_BALANCE_TEMPERATURE;  // 指将白平衡按照开尔文色温设置
                                             // 范围2800-6500
    control_s.value = value;
    if (xioctl(m_handles[orientation], VIDIOC_S_CTRL, &control_s) < 0) {
        printf("Set White Balance temperature error\n");
        return -1;
    }
    return 0;
}

int UVCCamera::setSharpness(std::string orientation, int value) {
    v4l2_control control_s;
    control_s.id = V4L2_CID_SHARPNESS;
    control_s.value = value;
    if (xioctl(m_handles[orientation], VIDIOC_S_CTRL, &control_s) < 0) {
        printf("Set Gain error\n");
        return -1;
    }

    return 0;
}

int UVCCamera::setGain(std::string orientation, int value) {
    v4l2_control control_s;
    control_s.id = V4L2_CID_GAIN;
    control_s.value = value;
    if (xioctl(m_handles[orientation], VIDIOC_S_CTRL, &control_s) < 0) {
        printf("Set Gain error\n");
        return -1;
    }

    return 0;
}

int UVCCamera::operator>>(cv::Mat& img) {
    for (auto pairs : m_handles) {
        // 等待图像更新后，再获取图像
        while(!m_ifupdate[pairs.first]) {
            std::this_thread::sleep_for(std::chrono::milliseconds(1));
        }
        // 获取图像
        m_mutex[pairs.first].lock();
        m_imgbuffers[pairs.first].copyTo(img);
        
        // 复制后更新标志
        m_ifupdate[pairs.first] = false;
        m_mutex[pairs.first].unlock();
    }
}

int UVCCamera::getImage(cv::Mat& img) {
    return *this >> img;
}
int UVCCamera::getImage(std::string orientation, cv::Mat& img) {
    m_mutex[orientation].lock();
    m_imgbuffers[orientation].copyTo(img);
    m_mutex[orientation].unlock();
    return 0;
}

// 0 读取;1 暂停，中止; 2 退出循环,终止线程
int UVCCamera::setCaptureState(uint8_t captureState) {
    for (auto pairs : m_handles) {
        if (m_mutex[pairs.first].try_lock()) {
            m_mutex[pairs.first].unlock();
            return 0;
        }
        printf("Set Capture State failed\n");
    }
    return -1;
}
/**
 * stat结构体内容等于使用l命令时显示的信息 如
 * crw-rw----+  1 root   video    81,     0 11月 12 13:11 video0
 * 其中第一个字符代表含义如下，本程序采用此方法区分设备和文件。uvc摄像头为c，视频为-
 * d：目录文件，
 * -：普通文件
 * p：管理文件
 * l：链接文件
 * b：块设备文件
 * c：字符设备文件
 * s：套接字文件
 */
bool UVCCamera::checkVideo(std::string path) {
    // struct stat st;
    // // 确定文件类型
    // if (access(path.c_str(), F_OK) < 0) {
    //     printf("video or device path %s : %s", path.c_str(),
    //     strerror(errno)); exit(0);
    // }
    // stat(path.c_str(), &st);
    // if (S_ISCHR(st.st_mode)) {  // 是字符设备
    //     m_useVideo = false;
    // } else if (S_ISREG(st.st_mode)) {  // 是一般文件
    //     m_useVideo = true;
    // } else
    //     return false;
    // // bool isVideo = (path.find(".avi", 0) != std::string::npos ||
    // //                 path.find(".mp4", 0) != std::string::npos ||
    // //                 path.find(".mkv", 0) != std::string::npos);
    // // _useVideo = isVideo;
    // return m_useVideo;
}

int UVCCamera::ResetImageSize() {
    for (auto pairs : m_handles) {
        struct v4l2_fmtdesc fmtdesc = {0};
        fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        char fourcc[5] = {0};
        while (0 == xioctl(pairs.second, VIDIOC_ENUM_FMT, &fmtdesc)) {fmtdesc.index++;}
        struct v4l2_format fmt = {0};
        fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if (-1 == xioctl(pairs.second, VIDIOC_G_FMT, &fmt)) {
            perror("Querying Pixel Format\n");
            return 1;
        }
        strncpy(fourcc, (char*)&fmt.fmt.pix.pixelformat, 4);
        m_param[pairs.first].Width = fmt.fmt.pix.width;
        m_param[pairs.first].Height = fmt.fmt.pix.height;
    }
}

int UVCCamera::infoPrint() {
    for (auto pairs : m_handles) {
        struct v4l2_capability caps = {};
        if (-1 == xioctl(pairs.second, VIDIOC_QUERYCAP, &caps)) {
            perror("Querying Capabilities");
            return 1;
        }
        printf(
            "Driver Caps:\n"
            "  Driver: \"%s\"\n"
            "  Card: \"%s\"\n"
            "  Bus: \"%s\"\n"
            "  Version: %d.%d\n"
            "  Capabilities: %08x\n",
            caps.driver, caps.card, caps.bus_info, (caps.version >> 16) & 0xff,
            (caps.version >> 24) & 0xff, caps.capabilities);
        struct v4l2_cropcap cropcap = {0};
        cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if (-1 == xioctl(pairs.second, VIDIOC_CROPCAP, &cropcap)) {
            perror("Querying Cropping Capabilities");
            return 1;
        }
        printf(
            "Camera Cropping:\n"
            "  Bounds: %dx%d+%d+%d\n"
            "  Default: %dx%d+%d+%d\n"
            "  Aspect: %d/%d\n",
            cropcap.bounds.width, cropcap.bounds.height, cropcap.bounds.left,
            cropcap.bounds.top, cropcap.defrect.width, cropcap.defrect.height,
            cropcap.defrect.left, cropcap.defrect.top,
            cropcap.pixelaspect.numerator, cropcap.pixelaspect.denominator);

        int support_grbg10 = 0;

        struct v4l2_fmtdesc fmtdesc = {0};
        fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        char fourcc[5] = {0};
        char c, e;
        printf("  FMT : CE Desc\n--------------------\n");
        while (0 == xioctl(pairs.second, VIDIOC_ENUM_FMT, &fmtdesc)) {
            strncpy(fourcc, (char*)&fmtdesc.pixelformat, 4);
            if (fmtdesc.pixelformat == V4L2_PIX_FMT_SGRBG10)
                support_grbg10 = 1;
            c = fmtdesc.flags & 1 ? 'C' : ' ';
            e = fmtdesc.flags & 2 ? 'E' : ' ';
            printf("  %s: %c%c %s\n", fourcc, c, e, fmtdesc.description);
            fmtdesc.index++;
        }

        struct v4l2_format fmt = {0};
        fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if (-1 == xioctl(pairs.second, VIDIOC_G_FMT, &fmt)) {
            perror("Querying Pixel Format\n");
            return 1;
        }
        strncpy(fourcc, (char*)&fmt.fmt.pix.pixelformat, 4);
        printf(
            "Selected Camera Mode:\n"
            "  Width: %d\n"
            "  Height: %d\n"
            "  PixFmt: %s\n"
            "  Field: %d\n",
            fmt.fmt.pix.width, fmt.fmt.pix.height, fourcc, fmt.fmt.pix.field);

        struct v4l2_streamparm streamparm = {0};
        streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if (-1 == xioctl(pairs.second, VIDIOC_G_PARM, &streamparm)) {
            perror("Querying Frame Rate\n");
            return 1;
        }
        printf("Frame Rate:  %f\n====================\n",
               (float)streamparm.parm.capture.timeperframe.denominator /
                   (float)streamparm.parm.capture.timeperframe.numerator);

        struct v4l2_control control_g = {0};

        // 曝光时间
        control_g.id = V4L2_CID_EXPOSURE_ABSOLUTE;
        if (xioctl(pairs.second, VIDIOC_G_CTRL, &control_g) < 0) {
            printf("Get Exposure error\n");
        } else {
            printf("EXPOSURE MODE:%d\n", control_g.value);
        }
        // 色彩饱和度
        control_g.id = V4L2_CID_SATURATION;
        if (xioctl(pairs.second, VIDIOC_G_CTRL, &control_g) < 0) {
            printf("Get Saturation error\n");
        } else {
            printf("Saturation:%d\n", control_g.value);
        }
        // 对比度
        control_g.id = V4L2_CID_CONTRAST;
        if (xioctl(pairs.second, VIDIOC_G_CTRL, &control_g) < 0) {
            printf("Get Contrast error\n");
        } else {
            printf("Contrast:%d\n", control_g.value);
        }
        // 增益
        control_g.id = V4L2_CID_GAIN;
        if (xioctl(pairs.second, VIDIOC_G_CTRL, &control_g) < 0) {
            printf("Get GAIN error\n");
        } else {
            printf("GAIN:%d\n", control_g.value);
        }
        // 伽玛值
        control_g.id = V4L2_CID_GAMMA;
        if (xioctl(pairs.second, VIDIOC_G_CTRL, &control_g) < 0) {
            printf("Get Gamma error\n");
        } else {
            printf("Gamma:%d\n", control_g.value);
        }
        // 亮度
        control_g.id = V4L2_CID_BRIGHTNESS;
        if (xioctl(pairs.second, VIDIOC_G_CTRL, &control_g) < 0) {
            printf("Get Brightness error\n");
        } else {
            printf("Brightness:%d\n", control_g.value);
        }
        // 白平衡
        control_g.id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
        if (xioctl(pairs.second, VIDIOC_G_CTRL, &control_g) < 0) {
            printf("Get White Balance error\n");
        } else {
            printf("White Balance:%d\n", control_g.value);
        }
        // 色调
        control_g.id = V4L2_CID_HUE;
        if (xioctl(pairs.second, VIDIOC_G_CTRL, &control_g) < 0) {
            printf("Get HUE error\n");
        } else {
            printf("HUE:%d\n", control_g.value);
        }
        // 逆光补偿
        control_g.id = V4L2_CID_BACKLIGHT_COMPENSATION;
        if (xioctl(pairs.second, VIDIOC_G_CTRL, &control_g) < 0) {
            printf("Get BACKLIGHT_COMPENSATION error\n");
        } else {
            printf("BACKLIGHT_COMPENSATION:%d\n", control_g.value);
        }
        // 清晰度
        control_g.id = V4L2_CID_SHARPNESS;
        if (xioctl(pairs.second, VIDIOC_G_CTRL, &control_g) < 0) {
            printf("Get SHARPNESS error\n");
        } else {
            printf("SHARPNESS:%d\n", control_g.value);
        }
    }
    return 0;
}

}  // namespace wmj
