#include "smart_camera.hpp"

static int camera_source_ioctl(int fd, int req, void* arg)
{
    struct timespec poll_time;
    int ret;

    while ((ret = ioctl(fd, req, arg))) {
        if (ret == -1 && (EINTR != errno && EAGAIN != errno)) {
            // LOG_ERROR("ret = %d, errno %d", ret, errno);
            break;
        }
        // 10 milliseconds
        poll_time.tv_sec = 0;
        poll_time.tv_nsec = 10000000;
        nanosleep(&poll_time, NULL);
    }

    return ret;
}

int SmartCamera::CameraSourceInit(const char *device, uint32_t bufcnt, uint32_t width, uint32_t height, uint32_t fmt)
{
    struct v4l2_capability     cap;
    struct v4l2_format         vfmt;
    struct v4l2_requestbuffers req;
    struct v4l2_buffer         buf;
    enum   v4l2_buf_type       type;
    uint32_t i;
    uint32_t buf_len = 0;
    CamSource_t *ctx = &m_camSource;

    /* 打开视频设备 */
    ctx->bufcnt = bufcnt;
    ctx->fd = open(device, O_RDWR | O_CLOEXEC | O_NONBLOCK, 0);
    if (ctx->fd < 0) {
        perror("open cam device");
        goto FAIL;
    }

    /* 枚举视频输入设备 */
    {
        struct v4l2_input input;

        input.index = 0;
        while (!camera_source_ioctl(ctx->fd, VIDIOC_ENUMINPUT, &input)) {
            log_debug("input devices:%s\n", input.name);
            ++input.index;
        }
    }

    /* 确定fd是否是V4L2设备 */
    if (0 != camera_source_ioctl(ctx->fd, VIDIOC_QUERYCAP, &cap)) {
        log_debug("Not v4l2 compatible\n");
        goto FAIL;
    }

    /* 确定是否设备支持视频捕获功能 */
    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) && !(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)) {
        log_debug("Capture not supported\n");
        goto FAIL;
    }

    /* 确定是否设备支持流控制 */
    if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
        log_debug("Streaming IO Not Supported\n");
        goto FAIL;
    }

    /* 保留v4l2-ctl设置的原始设置 */
    vfmt = (struct v4l2_format) {0};
    vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
        vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;

    vfmt.fmt.pix.width = width;
    vfmt.fmt.pix.height = height;

    /* 枚举设备支持的视频格式 */
    {
        struct v4l2_fmtdesc fmtdesc;

        fmtdesc.index = 0;
        fmtdesc.type = vfmt.type;
        while (!camera_source_ioctl(ctx->fd, VIDIOC_ENUM_FMT, &fmtdesc)) {
            log_debug("fmt name: [%s]\n", fmtdesc.description);
            log_debug("fmt pixelformat: '%c%c%c%c', description = '%s'\n", fmtdesc.pixelformat & 0xFF,
                    (fmtdesc.pixelformat >> 8) & 0xFF, (fmtdesc.pixelformat >> 16) & 0xFF,
                    (fmtdesc.pixelformat >> 24) & 0xFF, fmtdesc.description);
            fmtdesc.index++;
        }
    }

    /* 设置视频捕获设备的格式和参数 */
    vfmt.fmt.pix.pixelformat = fmt;

    if (!vfmt.fmt.pix.pixelformat)
        vfmt.fmt.pix.pixelformat = V4L2_PIX_FMT_NV12;

    type = (enum v4l2_buf_type)vfmt.type;
    ctx->type = (enum v4l2_buf_type)vfmt.type;

    if (-1 == camera_source_ioctl(ctx->fd, VIDIOC_S_FMT, &vfmt)) {
        log_debug("VIDIOC_S_FMT\n");
        goto FAIL;
    }

    if (-1 == camera_source_ioctl(ctx->fd, VIDIOC_G_FMT, &vfmt)) {
        log_debug("VIDIOC_G_FMT\n");
        goto FAIL;
    }

    log_debug("get width %d height %d", vfmt.fmt.pix.width, vfmt.fmt.pix.height);

    /* 请求内存映射缓冲区 */
    req = (struct v4l2_requestbuffers) {0};
    req.count  = ctx->bufcnt;
    req.type   = type;
    req.memory = V4L2_MEMORY_MMAP;
    if (-1 == camera_source_ioctl(ctx->fd, VIDIOC_REQBUFS, &req)) {
        log_debug("Device does not support mmap\n");
        goto FAIL;
    }

    if (req.count != ctx->bufcnt) {
        log_debug("Device buffer count mismatch\n");
        goto FAIL;
    }

    /* 映射缓冲区到用户空间内存中 */
    ctx->buf = (struct CamFrame_t *)calloc(ctx->bufcnt, sizeof(struct CamFrame_t));

    for (i = 0 ; i < ctx->bufcnt; i++) {
        buf = (struct v4l2_buffer) {0};
        struct v4l2_plane planes[FMT_NUM_PLANES];

        memset(&buf, 0, sizeof(buf));
        memset(&planes, 0, sizeof(planes));

        buf.type = ctx->type;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = i;

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type) {
            buf.m.planes = planes;
            buf.length = FMT_NUM_PLANES;
        }

        if (-1 == camera_source_ioctl(ctx->fd, VIDIOC_QUERYBUF, &buf)) {
            log_debug("ERROR: VIDIOC_QUERYBUF\n");
            goto FAIL;
        }

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == buf.type) {
            // tmp_buffers[n_buffers].length = buf.m.planes[0].length;
            ctx->buf[i].length = buf.m.planes[0].length;
            ctx->buf[i].start =
                mmap(NULL /* start anywhere */,
                     buf.m.planes[0].length,
                     PROT_READ | PROT_WRITE /* required */,
                     MAP_SHARED /* recommended */,
                     ctx->fd, buf.m.planes[0].m.mem_offset);
        }
        else {
            ctx->buf[i].length = buf.length;
            ctx->buf[i].start =
                mmap(NULL /* start anywhere */,
                     buf.length,
                     PROT_READ | PROT_WRITE /* required */,
                     MAP_SHARED /* recommended */,
                     ctx->fd, buf.m.offset);
        }

        if (MAP_FAILED == ctx->buf[i].start) {
            log_debug("ERROR: Failed to map device frame buffers\n");
            goto FAIL;
        }
    }
    
    /* 将缓冲区放入设备的输入队列 */
    for (i = 0; i < ctx->bufcnt; i++ ) {
        struct v4l2_plane planes[FMT_NUM_PLANES];

        buf = (struct v4l2_buffer) {0};
        buf.type    = type;
        buf.memory  = V4L2_MEMORY_MMAP;
        buf.index   = i;

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type) {
            buf.m.planes = planes;
            buf.length = FMT_NUM_PLANES;
        }

        if (-1 == camera_source_ioctl(ctx->fd, VIDIOC_QBUF, &buf)) {
            log_debug("ERROR: VIDIOC_QBUF %d\n", i);
            goto FAIL;
        }
    }
    
    /* 启动视频流传输 */
    if (-1 == camera_source_ioctl(ctx->fd, VIDIOC_STREAMON, &type)) {
        log_debug("ERROR: VIDIOC_STREAMON\n");
        goto FAIL;
    }

    /* 开始时跳过一些帧 */
    // for (i = 0; i < ctx->bufcnt; i++ ) {
    //     int idx = CameraSourceGetFrame();
    //     if (idx >= 0)
    //         CameraSourcePutFrame(idx);
    // }
    log_debug("init seccess.");
    return 0;

FAIL:
    CameraSourceDeinit();
    exit(EXIT_FAILURE);
}

int SmartCamera::CameraSourceDeinit(void)
{
    struct v4l2_buffer buf;
    enum v4l2_buf_type type;
    uint32_t i;
    CamSource_t *ctx = &m_camSource;

    if (ctx->fd < 0)
        return 0;

    /* 停止从视频设备获取数据流 */
    type = ctx->type;
    camera_source_ioctl(ctx->fd, VIDIOC_STREAMOFF, &type);

    /* 取消缓冲区映射 */
    if (ctx->buf != nullptr) {
        for (i = 0 ; i < ctx->bufcnt; i++) {
            buf = (struct v4l2_buffer) {0};
            buf.type    = type;
            buf.memory  = V4L2_MEMORY_MMAP;
            buf.index   = i;
            camera_source_ioctl(ctx->fd, VIDIOC_QUERYBUF, &buf);
            munmap(ctx->buf[buf.index].start, ctx->buf[buf.index].length);
        }
        free(ctx->buf);
    }
    
    /* 关闭V4L2设备 */
    if (ctx->fd >= 0) {
        close(ctx->fd);
        ctx->fd = -1;
    }
    log_debug("deinit seccess.");
    return 0;
}

int SmartCamera::CameraSourceGetFrame(void)
{
    struct v4l2_buffer buf;
    enum v4l2_buf_type type;
    CamSource_t *ctx = &m_camSource;

    type = ctx->type;
    buf = (struct v4l2_buffer) {0};
    buf.type   = type;
    buf.memory = V4L2_MEMORY_MMAP;

    struct v4l2_plane planes[FMT_NUM_PLANES];
    if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type) {
        buf.m.planes = planes;
        buf.length = FMT_NUM_PLANES;
    }

    /* 从输出队列中取出一个已经填充了数据的缓冲区 */
    if (-1 == camera_source_ioctl(ctx->fd, VIDIOC_DQBUF, &buf)) {
        log_debug("VIDIOC_DQBUF\n");
        return -1;
    }

    if (buf.index > ctx->bufcnt) {
        log_debug("buffer index out of bounds\n");
        return -1;
    }

    if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type)
        buf.bytesused = buf.m.planes[0].bytesused;

    return buf.index;
}

int SmartCamera::CameraSourcePutFrame(int32_t idx)
{
    struct v4l2_buffer buf;
    enum v4l2_buf_type type;
    CamSource_t *ctx = &m_camSource;

    if (idx < 0)
        return 0;

    type = ctx->type;
    buf = (struct v4l2_buffer) {0};
    buf.type   = type;
    buf.memory = V4L2_MEMORY_MMAP;
    buf.index  = idx;

    struct v4l2_plane planes[FMT_NUM_PLANES];
    if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type) {
        buf.m.planes = planes;
        buf.length = FMT_NUM_PLANES;
    }

    /* 告诉内核可以覆盖此帧 */
    if (-1 == camera_source_ioctl(ctx->fd, VIDIOC_QBUF, &buf)) {
        log_debug("VIDIOC_QBUF\n");
        return 0;
    }

    return 0;
}

int SmartCamera::CameraFrameToBuf(int32_t idx, uint8_t *buf, uint32_t len)
{
    CamSource_t *ctx = &m_camSource;

    if (len < ctx->buf[idx].length) {
        log_debug("The container is not long enough.");
        return -1;
    }
    memcpy(buf, ctx->buf[idx].start, ctx->buf[idx].length);
    
    return ctx->buf[idx].length;
}

int SmartCamera::CameraFrameToBufCallBack(std::function<void(uint8_t *, uint32_t)> callback)
{
    int idx;
    CamSource_t *ctx = &m_camSource;

    int epoll_fd;
    struct epoll_event event;
    struct epoll_event events;
    int num_events;

    /* 创建epoll实例 */
    epoll_fd = epoll_create(1);
    if (-1 == epoll_fd) {
        perror("camera epoll_create");
        exit(EXIT_FAILURE);
    }

    /* 设置监听事件 */
    event.events = EPOLLIN;
    event.data.fd = ctx->fd;
    if (-1 == epoll_ctl(epoll_fd, EPOLL_CTL_ADD, ctx->fd, &event)) {
        perror("camera epoll_ctl");
        exit(EXIT_FAILURE);
    }

    m_loop_run = true;
    while (true == m_loop_run) {
        num_events = epoll_wait(epoll_fd, &events, 1, 3000);
        if (0 == num_events) {
            log_debug("cammera fd epoll timeout.\n");
            continue;
        }
        else if ((1 != num_events) || (event.data.fd != ctx->fd)) {
            log_debug("cammera fd epoll error.\n");
            continue;
        }

        idx = CameraSourceGetFrame();
        if (idx >= 0 && idx < ctx->bufcnt) {
            callback((uint8_t *)ctx->buf[idx].start, ctx->buf[idx].length);
        }
        CameraSourcePutFrame(idx);
    }

    close(epoll_fd);
    return 0;
}