/**
 * @file DecoderProcess.h
 * @author xag (xag@xa.com)
 * @brief
 * @version 1.0.10
 * @date 2023-08-09
 *
 * @copyright Copyright (c) 2007-2023, Guangzhou Xaircraft Technology Co., Ltd.
 *
 */
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <stdint.h>
#include <inttypes.h>
#include <assert.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <malloc.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <signal.h>
#include <poll.h>
#include <dirent.h>
#include <asm/types.h>

#include "DecoderProcess.h"
#include "XagLog.h"
#include "XagTypes.h"

#ifdef DRMMODE
#include "include/xf86drmMode.h"
#else
#ifdef DRM
#include <drm.h>
#endif /* DRM */
#endif /* DRMMODE */

// #define DEBUG
#ifdef DEBUG
#define debug_printf(fmt, arg...) XAG_LOG_I("\ndec : " fmt, ##arg);
// #define debug_printf(fmt, arg...) printf("\ndec : " fmt, ##arg);
#else
#define debug_printf(fmt, arg...)
#endif

/* Enable the below option for converting YUV422 decoded output to NV12 format*/
#define CC_YUV422PLANAR_TO_NV12

namespace kapok_hardware_tidecoder {

DecoderProcess::DecoderProcess() {
}

DecoderProcess::~DecoderProcess() {
}

void DecoderProcess::errno_exit(const char *s) {
    debug_printf("%s error %d, %s\n", s, errno, strerror(errno));
    exit(EXIT_FAILURE);
}

int DecoderProcess::handle_outbuf(int fd, int index, int rdfd, struct buffer buff,
    struct stream_context *str, int nframes, int sleep_time, char *h264_data, int h264_size) {
    struct v4l2_buffer buf;
    struct v4l2_plane buf_planes[1];
    struct v4l2_decoder_cmd cmd = {};
    int ret = 0;
    static int fs_ind = 1;

    debug_printf("handle_outbuf Entry, index=%d, nframes=%d, fs_ind=%d\n", index, nframes, fs_ind);

    memcpy(reinterpret_cast<char*>(buff.mapped),
            h264_data,
            h264_size);

    memzero(buf);
    memzero(buf_planes[0]);

    buf_planes[0].bytesused = h264_size;
    buf_planes[0].m.mem_offset = buff.offset;
    buf_planes[0].data_offset = 0;

    buf.index = index;
    buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    buf.memory = V4L2_MEMORY_MMAP;
    buf.m.planes = buf_planes;
    buf.length = 1;

    ret = ioctl(fd, VIDIOC_QBUF, &buf);
    if (ret < 0) {
        debug_printf("[fd%d] handle_outbuf QBUF failed ret=%d err=%s\n",
                fd, ret, strerror(errno));
    } else {
        debug_printf("[fd%d] handle_outbuf buf.index:%d QBUF success\n", fd, buf.index);
    }

    return ret;
}

int DecoderProcess::handle_capbuf(int fd, int wrfd, int index, struct buffer buff[],
        int save, struct stream_context *str, struct output_format fmt, int usedrmbuff,
        char *yuv_buf, int yuv_buf_size) {
    struct v4l2_buffer buf;
    struct v4l2_plane buf_planes[3];
    int ret = 0;
    int i;
    int h = ALIGN(str->height, HW_ALIGN);
    int s = ALIGN(str->width, HW_ALIGN) * str->num_bytes_per_pix;
    char *pyuv = yuv_buf;

    if (save && (wrfd >= 0)) {
        switch (fmt.fourcc) {
            case (V4L2_PIX_FMT_NV12):
            case (V4L2_PIX_FMT_TI1210):
                debug_printf("index=%d save:%d per_pix:%d h:%d s:%d usedrmbuff:%d fmt.fourcc:%d\n",
                        index, save, str->num_bytes_per_pix, h, s, usedrmbuff, fmt.fourcc);
                #if 1
                memset(pyuv, 0, yuv_buf_size);
                for (i = 0; i < str->height; i++) {
                    memcpy(yuv_buf,
                           reinterpret_cast<char*>(buff[index].mapped) + (i * s),
                           (str->width * str->num_bytes_per_pix));
                    yuv_buf += (str->width * str->num_bytes_per_pix);
                }

                for (i = 0; i < (str->height / 2); i++) {
                    memcpy(yuv_buf,
                           reinterpret_cast<char*>(buff[index].mapped) + (h * s) + (i * s),
                           (str->width * str->num_bytes_per_pix));
                    yuv_buf += (str->width * str->num_bytes_per_pix);
                }

                if (wrfd > 0) {
                    write(wrfd, pyuv, yuv_buf_size);
                    close(wrfd);
                }
                #endif
                break;
            case (V4L2_PIX_FMT_NV16):
            case (V4L2_PIX_FMT_TI1610):
                debug_printf("handle_capbuf V4L2_PIX_FMT_NV16\n");
#ifdef CC_YUV422PLANAR_TO_NV12
                debug_printf("handle_capbuf V4L2_PIX_FMT_NV16 1\n");
                for (i = 0; i < str->height; i++)
                    write(wrfd, reinterpret_cast<char*>(buff[index].mapped) + (i * s),
                        str->width * str->num_bytes_per_pix);
                for (i = 0; i < str->height; i+=2)
                    write(wrfd, reinterpret_cast<char*>(buff[index].mapped) + (h * s) +
                            (i * s), str->width * str->num_bytes_per_pix);

#else
                debug_printf("handle_capbuf V4L2_PIX_FMT_NV16 2\n");
                for (i = 0; i < str->height; i++)
                    write(wrfd, reinterpret_cast<char*>(buff[index].mapped) + (i * s),
                            2* str->width * str->num_bytes_per_pix);
#endif
                break;
            case V4L2_PIX_FMT_YUV420M:
                debug_printf("handle_capbuf V4L2_PIX_FMT_YUV420M\n");
                for (i = 0; i < str->height; i++)
                    write(wrfd, reinterpret_cast<char*>(buff[(index*3)].mapped) + (i * s),
                        (str->width *
                         str->num_bytes_per_pix));


                for (i = 0; i < (str->height / 2); i++)
                    write(wrfd, reinterpret_cast<char*>(buff[(index*3)+1].mapped) + (i * s),
                        (str->width *
                        str->num_bytes_per_pix) / 2);

                for (i = 0; i < (str->height / 2); i++)
                    write(wrfd, reinterpret_cast<char*>(buff[(index*3)+2].mapped) + (i * s),
                        (str->width *
                        str->num_bytes_per_pix) / 2);
                break;
            case V4L2_PIX_FMT_YUV422M:
                for (i = 0; i < str->height; i++)
                    write(wrfd, reinterpret_cast<char*>(buff[(index*3)].mapped) + (i * s),
                        (str->width *
                         str->num_bytes_per_pix));

                for (i = 0; i < (str->height); i++)
                    write(wrfd, reinterpret_cast<char*>(buff[(index*3)+1].mapped) + (i * s),
                        (str->width *
                        str->num_bytes_per_pix) / 2);

                for (i = 0; i < (str->height); i++)
                    write(wrfd, reinterpret_cast<char*>(buff[(index*3)+2].mapped) + (i * s),
                        (str->width *
                        str->num_bytes_per_pix) / 2);
                break;
            default:
                break;
        }
    }

    memzero(buf);
    memzero(buf_planes[0]);

    if (str->codec == AV_CODEC_ID_MJPEG) {
        memzero(buf_planes[1]);
        memzero(buf_planes[2]);
    }

    if (str->codec == AV_CODEC_ID_MJPEG) {
        buf_planes[0].m.fd = buff[(index*3)].dbuf_fd;
        buf_planes[0].length = buff[(index*3)].length;

        buf_planes[1].m.fd = buff[((index*3) + 1)].dbuf_fd;
        buf_planes[1].length = buff[((index*3) + 1)].length;

        buf_planes[2].m.fd = buff[((index*3) + 2)].dbuf_fd;
        buf_planes[2].length = buff[((index*3) + 2)].length;
    } else {
        buf_planes[0].m.fd = buff[index].dbuf_fd;
        buf_planes[0].length = buff[index].length;
    }

    buf.index = index;
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    if (usedrmbuff == 0) {
        /* Using v4l2 buffers for capture */
        buf.memory = V4L2_MEMORY_MMAP;
    } else {
        /* Using drm buffers for capture */
        buf.memory = V4L2_MEMORY_DMABUF;
    }
    buf.m.planes = buf_planes;
    if (str->codec == AV_CODEC_ID_MJPEG)
        buf.length = 3;
    else
        buf.length = 1;

    ret = ioctl(fd, VIDIOC_QBUF, &buf);
    if (ret < 0) {
        debug_printf("[fd%d] handle_capbuf QBUF failed ret=%d err=%s\n",
                fd, ret, strerror(errno));
    } else {
        debug_printf("[fd%d] handle_capbuf buf.index:%d QBUF success\n", fd, buf.index);
    }

    return ret;
}

int DecoderProcess::mainloop(void * ptr_decode_info) {
    int type, i, ret = 0;
    uint32_t flags = 0;
    struct v4l2_buffer buf;
    struct v4l2_plane buf_planes[3];
    struct pollfd pfd;
    struct timeval times;
    long curr_time = 0;         /* NOLINT */
    static long prev_time = 0;  /* NOLINT */
    struct v4l2_event event;
    static int ac_cap = 0;

    DECODE_INFO_T *p_decode_info = reinterpret_cast<DECODE_INFO_T *>(ptr_decode_info);
    debug_printf("[fd%d] Enter mainloop n_outbufs=%d n_capbufs=%d\n",
                    p_decode_info->fd, p_decode_info->n_outbufs, p_decode_info->n_capbufs);

    pfd.fd = p_decode_info->fd;
    pfd.events = POLLIN | POLLOUT | POLLPRI;
    pfd.revents = 0;

    for (i = 0; i < p_decode_info->n_outbufs; i++)
        handle_outbuf(p_decode_info->fd, i, p_decode_info->rdfd, p_decode_info->outbufs[i],
                                &p_decode_info->str, p_decode_info->nframes, i,
                                p_decode_info->h264_data, p_decode_info->h264_data_size);

    if (ac_cap == 0) {
        if (p_decode_info->str.codec == AV_CODEC_ID_MJPEG) {
            for (i = 0; i < (p_decode_info->n_capbufs / 3); i++)
                handle_capbuf(p_decode_info->fd, p_decode_info->wrfd, i, p_decode_info->capbufs, 0,
                                    &p_decode_info->str, p_decode_info->fmt, p_decode_info->usedrmbuff,
                                    p_decode_info->yuv_buf, p_decode_info->yuv_buf_len);
        } else {
            for (i = 0; i < p_decode_info->n_capbufs; i++)
                handle_capbuf(p_decode_info->fd, p_decode_info->wrfd, i, p_decode_info->capbufs, 0,
                                    &p_decode_info->str, p_decode_info->fmt, p_decode_info->usedrmbuff,
                                    p_decode_info->yuv_buf, p_decode_info->yuv_buf_len);
        }
        ac_cap = 1;
    }

    debug_printf("[fd%d] enter while\n", p_decode_info->fd);
    i = 0;
    while (!(flags & V4L2_BUF_FLAG_LAST)) {
        pfd.revents = 0;
        debug_printf("flags=0x%x & V4L2_BUF_FLAG_LAST=0x%x\n", flags, V4L2_BUF_FLAG_LAST);
        /* Poll for any event for 100ms */
        ret = poll(&pfd, 1, 100);
        if (ret < 0) {
            debug_printf("poll had an error %d: %s\n",
                errno, strerror(errno));
        } else if (ret > 0) {
            if (pfd.revents & POLLOUT) {
                while (1) {
                    /* Check for OUTPUT buffer */
                    debug_printf("POLLOUT while\n");
                    memzero(buf);
                    buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
                    buf.memory = V4L2_MEMORY_MMAP;
                    buf.m.planes = buf_planes;
                    buf.length = 1;
                    ret = ioctl(p_decode_info->fd, VIDIOC_DQBUF, &buf);
                    if (ret < 0) {
                        debug_printf("POLLOUT ret < 0\n");
                        if (errno != EAGAIN) {
                            debug_printf("[fd%d] OUTPUT VIDIOC_DQBUF failed: ret=%d errno=%d: %s\n",
                                p_decode_info->fd, ret, errno,
                                strerror(errno));
                        } else {
                            debug_printf("[fd%d] OUTPUT EAGAIN\n", p_decode_info->fd);
                            break;
                        }
                    } else {
                        debug_printf("POLLOUT handle_outbuf\n");
                        handle_outbuf(p_decode_info->fd, buf.index, p_decode_info->rdfd,
                                p_decode_info->outbufs[buf.index],
                                &p_decode_info->str, p_decode_info->nframes,
                                p_decode_info->sleep_time,
                                p_decode_info->h264_data, p_decode_info->h264_data_size);
                    }
                }
            }
            if (pfd.revents & POLLIN) {
                while (1) {
                    /* Check for CAPTURE buffer */
                    debug_printf("POLLIN while\n");
                    memzero(buf);
                    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
                    if (p_decode_info->usedrmbuff == 0) {
                        /* Using v4l2 bufffers for capture */
                        buf.memory = V4L2_MEMORY_MMAP;
                    } else {
                        /* Using drm buffers for capture */
                        buf.memory = V4L2_MEMORY_DMABUF;
                    }
                    buf.m.planes = buf_planes;
                    if (p_decode_info->str.codec == AV_CODEC_ID_MJPEG)
                        buf.length = 3;
                    else
                        buf.length = 2;
                    ret = ioctl(p_decode_info->fd, VIDIOC_DQBUF, &buf);
                    if (ret < 0) {
                        debug_printf("POLLIN ret < 0\n");
                        if (errno != EAGAIN) {
                            debug_printf("[fd%d] CAPTURE VIDIOC_DQBUF failed: ret=%d errno=%d: %s\n",
                                p_decode_info->fd, ret, errno,
                                strerror(errno));
                        } else {
                            debug_printf("[fd%d] CAPTURE EAGAIN\n", p_decode_info->fd);
                            break;
                        }
                    } else {
                        if (p_decode_info->enable_prof) {
                            gettimeofday(&times, NULL);
                            curr_time =
                            (times.tv_sec * 1000000 + times.tv_usec);

                            debug_printf("Picture buffer dequeue time is %ld us\n",
                                    (curr_time - prev_time));

                            prev_time = curr_time;
                        }
                        debug_printf("[fd%d] CAPTURE VIDIOC_DQBUF bytesused=%d\n",
                                p_decode_info->fd, buf.m.planes[0].bytesused);
                        if (buf.m.planes[0].bytesused) {
                            debug_printf("POLLIN handle_capbuf\n");
                            handle_capbuf(p_decode_info->fd, p_decode_info->wrfd,
                                      buf.index,
                                      p_decode_info->capbufs,
                                      1, &p_decode_info->str,
                                      p_decode_info->fmt, p_decode_info->usedrmbuff,
                                      p_decode_info->yuv_buf, p_decode_info->yuv_buf_len);
                            return 0;
                        }
                        flags = buf.flags;
                        debug_printf("[fd%d] CAPTURE VIDIOC_DQBUF buffer %d flags=%08x FLAG_LAST=%08x\n",
                                p_decode_info->fd, buf.index,
                                flags,
                                V4L2_BUF_FLAG_LAST);
                        if (buf.flags & V4L2_BUF_FLAG_LAST)
                            break;
                    }
                }
            }
            if (pfd.revents & POLLPRI) {
                /* Check for events */
                debug_printf("POLLPRI\n");
                memzero(event);
                ret = ioctl(p_decode_info->fd, VIDIOC_DQEVENT, &event);
                if (ret < 0) {
                    debug_printf("[fd%d] VIDIOC_DQEVENT failed:: ret=%d errno=%d: %s\n",
                            p_decode_info->fd, ret, errno,
                            strerror(errno));
                } else if (event.type == V4L2_EVENT_EOS) {
                    debug_printf("[fd%d] GOT EVENT\n", p_decode_info->fd);
                } else {
                    debug_printf("[fd%d] VIDIOC_DQEVENT got unexpected event %d\n",
                            p_decode_info->fd, event.type);
                }
            }
        }
    }

    return ret;
}

void DecoderProcess::uninit_device(int fd, struct buffer outbufs[],
        struct buffer capbufs[], int *n_outbufs, int *n_capbufs, int usedrmbuff) {
#ifdef DRM
    struct drm_mode_destroy_dumb gem_destroy;
#endif
    int i, ret = 0;
    struct v4l2_event_subscription sub;

    debug_printf("[fd%d] uninit_device\n", fd);

    for (i = 0; i < *n_outbufs; i++) {
        debug_printf("[fd%d] munmap outbuf %d mapped=0x%p length=%d\n",
                fd, i, outbufs[i].mapped, outbufs[i].length);
        ret = munmap(outbufs[i].mapped, outbufs[i].length);
        if (ret) {
            debug_printf("[fd%d] munmap failed for outbuf %d: %d %s\n",
                    fd, i, errno, strerror(errno));
        }
    }

    /* For unint capbufs, check which was used, drm or v4l2*/
    if (usedrmbuff == 0) {
        /* Using v4l2 bufffers for capture */
        for (i = 0; i < *n_capbufs; i++) {
            debug_printf("[fd%d] munmap capbuf %d mapped=0x%p length=%d\n",
                                fd, i, capbufs[i].mapped, capbufs[i].length);
            ret = munmap(capbufs[i].mapped, capbufs[i].length);
            if (ret) {
                debug_printf("[fd%d] munmap failed for capbuf %d: %d %s\n", fd, i, errno, strerror(errno));
            }
        }
    } else {
#ifdef DRM
        /* Using drm bufffers for capture */
        for (i = 0; i < *n_capbufs; i++) {
            debug_printf("[fd%d] munmap capbuf %d mapped=0x%p length=%d\n",
                            fd, i, capbufs[i].mapped, capbufs[i].length);
            ret = munmap(capbufs[i].mapped, capbufs[i].length);
            if (ret) {
                debug_printf("[fd%d] munmap failed for  %d: %d %s\n", fd, i, errno, strerror(errno));
            }
            debug_printf("[fd%d] destroy gem capbuf %d handle=%d\n",
                    fd, i, capbufs[i].bo_handle);
            memset(&gem_destroy, 0, sizeof(gem_destroy));
            gem_destroy.handle = capbufs[i].bo_handle;
            ret = ioctl(drmfd1, DRM_IOCTL_MODE_DESTROY_DUMB, &gem_destroy);
            if (ret)
                debug_printf("    DRM_IOCTL_MODE_DESTROY_DUMB failed\n");
        }
#endif
    }

    memset(&sub, 0, sizeof(sub));
    sub.type = V4L2_EVENT_ALL;

    debug_printf("[fd%d] Calling V4L2 IOCTL VIDIOC_SUBSCRIBE_EVENT\n", fd);
    ret = ioctl(fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
    if (ret != 0) {
        debug_printf("[fd%d] Failed to unsubscribe to events: err: %d %s\n",
                fd, errno, strerror(errno));
    }
}

#ifdef DRM
int create_drm_buffer(struct buffer *b,
    unsigned int width, unsigned int height) {
    struct drm_mode_create_dumb gem;
    struct drm_mode_map_dumb gem_map;
    struct drm_mode_destroy_dumb gem_destroy;
    int ret;

    memset(&gem, 0, sizeof gem);
    debug_printf("drm width:%d drm sheight %d\n", width, height);
    gem.width = width;
    gem.height = height;
    gem.bpp = 8;

    ret = ioctl(drmfd1, DRM_IOCTL_MODE_CREATE_DUMB, &gem);
    if (ret) {
        debug_printf("    DRM_IOCTL_MODE_CREATE_DUMB failed\n");
        return ret;
    }

    b->bo_handle = gem.handle;

    struct drm_prime_handle prime;
    memset(&prime, 0, sizeof prime);
    prime.handle = b->bo_handle;

    ret = ioctl(drmfd1, DRM_IOCTL_PRIME_HANDLE_TO_FD, &prime);
    if (ret) {
        debug_printf("    DRM_IOCTL_PRIME_HANDLE_TO_FD failed\n");
        goto fail_gem;
    }
    b->dbuf_fd = prime.fd;

    memset(&gem_map, 0, sizeof(gem_map));
    gem_map.handle = gem.handle;

    ret = ioctl(drmfd1, DRM_IOCTL_MODE_MAP_DUMB, &gem_map);
    if (ret) {
        debug_printf("    DRM_IOCTL_MODE_MAP_DUMB failed\n");
        goto fail_gem;
    }

    b->mapped = mmap(NULL, (size_t)gem.size, PROT_READ,     /* NOLINT */
            MAP_SHARED, drmfd1, gem_map.offset);
    if (MAP_FAILED == b->mapped) {
        debug_printf("    mmap failed %d: %s\n", errno, strerror(errno));
        goto fail_gem;
    }
    b->offset = gem_map.offset;
    b->length = gem.size;

    return 0;
fail_gem:
    memset(&gem_destroy, 0, sizeof gem_destroy);
    gem_destroy.handle = b->bo_handle;
    ret = ioctl(drmfd1, DRM_IOCTL_MODE_DESTROY_DUMB, &gem_destroy);

    return ret;
}
#endif

int DecoderProcess::init_device(int fd, int rdfd,
        struct stream_context *str, struct buffer outbufs[],
        struct buffer capbufs[], int *n_outbufs, int *n_capbufs,
        struct output_format format, int usedrmbuff) {
    struct v4l2_format fmt;
    struct v4l2_requestbuffers reqbuf;
    struct v4l2_buffer buffer;
    struct v4l2_plane buf_planes[1];
    int ret = 0;
    int j;
    unsigned i;
    /*for v4l2 based capture buffers*/
    struct v4l2_buffer buffer_cap;

    debug_printf("[fd%d] init_device\n", fd);

    memzero(fmt);
    fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    fmt.fmt.pix_mp.width = str->width;
    fmt.fmt.pix_mp.height = str->height;
    fmt.fmt.pix_mp.plane_fmt[0].sizeimage = (str->width * str->height);
    fmt.fmt.pix_mp.num_planes = 1;

    if (str->codec == AV_CODEC_ID_H264) {
        fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_H264;
        debug_printf("str->codec : %d\n", str->codec);
    }
    if (str->codec == AV_CODEC_ID_HEVC)
        fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_HEVC;
    if (str->codec == AV_CODEC_ID_MJPEG)
        fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_MJPEG;

    ret = ioctl(fd, VIDIOC_S_FMT, &fmt);
    if (ret != 0) {
        debug_printf("[fd%d] VIDIOC_S_FMT errorno %d, %s\n",
            fd, errno, strerror(errno));
        return ret;
    }

    debug_printf("[fd%d] After S_FMT on OUTPUT\n", fd);

    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    fmt.fmt.pix_mp.pixelformat = format.fourcc;

    if (str->codec == AV_CODEC_ID_MJPEG) {
        fmt.fmt.pix_mp.plane_fmt[0].sizeimage =
                ((fmt.fmt.pix_mp.width * fmt.fmt.pix_mp.height) * (str->num_bytes_per_pix));
        fmt.fmt.pix_mp.plane_fmt[1].sizeimage =
                ((fmt.fmt.pix_mp.width * fmt.fmt.pix_mp.height) * (str->num_bytes_per_pix)) / 2;
        fmt.fmt.pix_mp.plane_fmt[2].sizeimage =
                ((fmt.fmt.pix_mp.width * fmt.fmt.pix_mp.height) * (str->num_bytes_per_pix)) / 2;

        fmt.fmt.pix_mp.width = ALIGN(str->width, HW_ALIGN);
        fmt.fmt.pix_mp.height = ALIGN(str->height, HW_ALIGN);
        fmt.fmt.pix_mp.num_planes = 3;
        fmt.fmt.pix_mp.plane_fmt[0].bytesperline = ALIGN(str->width, HW_ALIGN) * str->num_bytes_per_pix;
        fmt.fmt.pix_mp.plane_fmt[1].bytesperline = ALIGN(str->width, HW_ALIGN) * str->num_bytes_per_pix;
        fmt.fmt.pix_mp.plane_fmt[2].bytesperline = ALIGN(str->width, HW_ALIGN) * str->num_bytes_per_pix;
    } else {
        fmt.fmt.pix_mp.plane_fmt[0].sizeimage =
                ((fmt.fmt.pix_mp.width * fmt.fmt.pix_mp.height) *
                        format.size_num) * (str->num_bytes_per_pix) / format.size_den;
        fmt.fmt.pix_mp.width = ALIGN(str->width, HW_ALIGN);
        fmt.fmt.pix_mp.height = ALIGN(str->height, HW_ALIGN);
        fmt.fmt.pix_mp.num_planes = 1;
        fmt.fmt.pix_mp.plane_fmt[0].bytesperline = ALIGN(str->width, HW_ALIGN) * str->num_bytes_per_pix;
    }

    ret = ioctl(fd, VIDIOC_S_FMT, &fmt);
    if (ret) {
        debug_printf("[fd%d] VIDIOC_S_FMT errorno %d, %s\n",
            fd, errno, strerror(errno));
        return ret;
    }

    debug_printf("[fd%d] After S_FMT on CAPTURE\n", fd);

    fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;

    ret = ioctl(fd, VIDIOC_G_FMT, &fmt);
    if (ret) {
        debug_printf("[fd%d] VIDIOC_G_FMT errorno %d, %s\n",
            fd, errno, strerror(errno));
        return ret;
    }

    debug_printf("[fd%d] After G_FMT on OUTPUT\n", fd);
    debug_printf("[fd%d] After G_FMT fmt.fmt.pix_mp.pixelformat = %c%c%c%c numplanes %d\n",
            fd, fmt.fmt.pix_mp.pixelformat & 0xff,
            (fmt.fmt.pix_mp.pixelformat >> 8) & 0xff,
            (fmt.fmt.pix_mp.pixelformat >>16) & 0xff,
            (fmt.fmt.pix_mp.pixelformat >> 24) & 0xff,
            fmt.fmt.pix_mp.num_planes);

    debug_printf("[fd%d] fmt.fmt.pix_mp.width %d fmt.fmt.pix_mp.height %d"
            " sizeimage %d bytesperline %d\n",
            fd, fmt.fmt.pix_mp.width, fmt.fmt.pix_mp.height,
            fmt.fmt.pix_mp.plane_fmt[0].sizeimage,
            fmt.fmt.pix_mp.plane_fmt[0].bytesperline);

    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;

    ret = ioctl(fd, VIDIOC_G_FMT, &fmt);
    if (ret) {
        debug_printf("[fd%d] VIDIOC_G_FMT errorno %d, %s\n",
            fd, errno, strerror(errno));
        return ret;
    }

    debug_printf("[fd%d] After G_FMT on CAPTURE\n", fd);
    debug_printf("[fd%d] After G_FMT fmt.fmt.pix_mp.pixelformat = %c%c%c%c numplanes %d\n",
            fd, fmt.fmt.pix_mp.pixelformat & 0xff,
            (fmt.fmt.pix_mp.pixelformat >> 8) & 0xff,
            (fmt.fmt.pix_mp.pixelformat >>16) & 0xff,
            (fmt.fmt.pix_mp.pixelformat >> 24) & 0xff,
            fmt.fmt.pix_mp.num_planes);

    debug_printf("[fd%d] fmt.fmt.pix_mp.width %d fmt.fmt.pix_mp.height %d"
            " sizeimage %d bytesperline %d\n",
            fd, fmt.fmt.pix_mp.width, fmt.fmt.pix_mp.height,
            fmt.fmt.pix_mp.plane_fmt[1].sizeimage,
            fmt.fmt.pix_mp.plane_fmt[1].bytesperline);

    /* Setup Decoder OUTPUT (SRC buffer) through VIDIOC_REQBUFS */
    debug_printf("[fd%d] Setup decoding OUTPUT with VIDIOC_REQBUFS buffer size %u\n",
        fd, fmt.fmt.pix_mp.plane_fmt[0].sizeimage);

    memzero(reqbuf);
    reqbuf.count = *n_outbufs;
    reqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    reqbuf.memory = V4L2_MEMORY_MMAP;

    ret = ioctl(fd, VIDIOC_REQBUFS, &reqbuf);
    if (ret) {
        debug_printf("[fd%d] Err REQBUFS failed on OUTPUT queue ret %d errno %d\n",
            fd, ret, errno);
        return ret;
    }
    debug_printf("[fd%d] After VIDIOC_REQBUFS getting buf_cnt %d\n",
        fd, reqbuf.count);
    *n_outbufs = reqbuf.count;

    /* QUERYBUF on OUTPUT - memory of V4L2_MEMORY_MMAP */
    for (j = 0; j < *n_outbufs; j++) {
        memset(&buffer, 0, sizeof(buffer));
        buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
        buffer.index = j;
        buffer.m.planes = buf_planes;
        buffer.length = 1;

        ret = ioctl(fd, VIDIOC_QUERYBUF, &buffer);
        if (ret < 0) {
            debug_printf("[fd%d] CANNOT QUERY BUFFERS ret = %d\n",
                fd, ret);
            return -1;
        }

    /*debug_printf("[fd%d] query buf, buffer %p plane 0 = %d buffer.length %d data_offset %d mem_offset %d\n",
                fd, &buffer, buffer.m.planes[0].length,
                buffer.length, buffer.m.planes[0].data_offset,
                buffer.m.planes[0].m.mem_offset);*/

        outbufs[j].mapped = mmap(NULL, buffer.m.planes[0].length,
                PROT_READ | PROT_WRITE, MAP_SHARED,
                fd, buffer.m.planes[0].m.mem_offset);
        outbufs[j].offset = buffer.m.planes[0].m.mem_offset;
        outbufs[j].length = buffer.m.planes[0].length;

        debug_printf("[fd%d] After mmap -> outbufs[%d].mapped = 0x%p\n",
            fd, j, outbufs[j].mapped);

        if (MAP_FAILED == outbufs[j].mapped) {
            while (j > 0) {
                /* Unmap all previous buffers */
                j--;
                munmap(outbufs[j].mapped, fmt.fmt.pix_mp.plane_fmt[0].sizeimage);
                outbufs[j].mapped = NULL;
            }
            debug_printf("[fd%d] Cant mmap buffers Y", fd);
            return -1;
        }
    }

    /* Setup Decoder CAPTURE (DST buffer) through VIDIOC_REQBUFS */

    if (usedrmbuff == 0) {
        /* Setup Decoder CAPTURE (DST buffer) through VIDIOC_REQBUFS */
        debug_printf("[fd%d] Setup decoding CAPTURE with VIDIOC_REQBUFS\n", fd);
        debug_printf("[fd%d] buffer(y) size %u buffer(uv) size %u\n",
                    fd, fmt.fmt.pix_mp.plane_fmt[0].sizeimage,
                    fmt.fmt.pix_mp.plane_fmt[1].sizeimage);

        memzero(reqbuf);
        reqbuf.count = *n_capbufs;
        reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        reqbuf.memory = V4L2_MEMORY_MMAP;

        ret = ioctl(fd, VIDIOC_REQBUFS, &reqbuf);
        if (ret) {
            debug_printf("[fd%d] Err REQBUFS failed on CAPTURE queue ret %d errno %d\n",
                fd, ret, errno);
            return ret;
        }
        debug_printf("[fd%d] After VIDIOC_REQBUFS getting buf_cnt %d\n",
                        fd, reqbuf.count);
        *n_capbufs = reqbuf.count;
        /* Creating Capbuffers with v4l2 BUFFERS*/
        /* QUERYBUF on Capture - memory of V4L2_MEMORY_MMAP */
        if (str->codec == AV_CODEC_ID_MJPEG) {
            struct v4l2_plane buf_planes_cap[FMT_NUM_MJPEG_PLANES];

            for (j = 0; j < (*n_capbufs / 3); j++) {
                memset(&buffer_cap, 0, sizeof(buffer_cap));
                buffer_cap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
                buffer_cap.index = j;
                buffer_cap.m.planes = buf_planes_cap;
                buffer_cap.length = FMT_NUM_MJPEG_PLANES;
                buffer_cap.memory = V4L2_MEMORY_MMAP;


                ret = ioctl(fd, VIDIOC_QUERYBUF, &buffer_cap);
                if (ret < 0) {
                    debug_printf("[fd%d] CANNOT QUERY BUFFERS for Capture ret = %d\n", fd, ret);
                    return -1;
                }
                for (i = 0; i < buffer_cap.length; i++) {
                /*debug_printf("[fd%d] query buf, buffer %p plane %d = %d buffer_cap.length %d data_offset %d mem_offset %d\n",
                        fd, &buffer_cap, i, buffer_cap.m.planes[i].length,
                        buffer_cap.length, buffer_cap.m.planes[i].data_offset,
                        buffer_cap.m.planes[i].m.mem_offset);*/

                capbufs[(j*3)+i].mapped = mmap(NULL, buffer_cap.m.planes[i].length,
                        PROT_READ, MAP_SHARED,
                        fd, buffer_cap.m.planes[i].m.mem_offset);
                capbufs[(j*3)+i].offset = buffer_cap.m.planes[i].m.mem_offset;
                capbufs[(j*3)+i].length = buffer_cap.m.planes[i].length;

                debug_printf("[fd%d] After mmap -> capbufs[%d].mapped = 0x%p\n",
                    fd, (j*3)+i, capbufs[(j*3)+i].mapped);

                if (MAP_FAILED == capbufs[(j*3)+i].mapped) {
                    while (j >= 0) {
                        /* Unmap all previous buffers */
                        j--;
                        munmap(capbufs[(j*3)+i].mapped,
                             fmt.fmt.pix_mp.plane_fmt[0].sizeimage);
                             capbufs[(j*3)+i].mapped = NULL;
                    }
                    debug_printf("[fd%d] Cant mmap capture buffers Y\n", fd);
                    return -1;
                }
                }
            }
        } else {
            struct v4l2_plane buf_planes_cap[FMT_NUM_PLANES];
            for (j = 0; j < *n_capbufs; j++) {
                memset(&buffer_cap, 0, sizeof(buffer_cap));
                buffer_cap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
                buffer_cap.index = j;
                buffer_cap.m.planes = buf_planes_cap;
                buffer_cap.length = FMT_NUM_PLANES;
                buffer_cap.memory = V4L2_MEMORY_MMAP;


                ret = ioctl(fd, VIDIOC_QUERYBUF, &buffer_cap);
                if (ret < 0) {
                    debug_printf("[fd%d] CANNOT QUERY BUFFERS for Capture ret = %d\n", fd, ret);
                    return -1;
                }
                /*debug_printf("[fd%d] query buf, buffer %p plane 0 = %d buffer_cap.length %d data_offset %d mem_offset %d\n",
                        fd, &buffer_cap,
                        buffer_cap.m.planes[0].length,
                        buffer_cap.length,
                        buffer_cap.m.planes[0].data_offset,
                        buffer_cap.m.planes[0].m.mem_offset);*/

                capbufs[j].mapped = mmap(NULL, buffer_cap.m.planes[0].length,
                        PROT_READ, MAP_SHARED,
                        fd, buffer_cap.m.planes[0].m.mem_offset);
                capbufs[j].offset = buffer_cap.m.planes[0].m.mem_offset;
                capbufs[j].length = buffer_cap.m.planes[0].length;

                debug_printf("[fd%d] After mmap -> capbufs[%d].mapped = 0x%p\n", fd, j, capbufs[j].mapped);

                if (MAP_FAILED == capbufs[j].mapped) {
                    while (j > 0) {
                        /* Unmap all previous buffers */
                        j--;
                        munmap(capbufs[j].mapped, fmt.fmt.pix_mp.plane_fmt[0].sizeimage);
                        capbufs[j].mapped = NULL;
                    }
                    debug_printf("[fd%d] Cant mmap capture buffers Y\n", fd);
                    return -1;
                }
            }
        }
    } else {
#ifdef DRM
        debug_printf("[fd%d] Setup decoding CAPTURE with VIDIOC_REQBUFS\n", fd);
        debug_printf("[fd%d] buffer(y) size %u buffer(uv) size %u\n",
                fd, fmt.fmt.pix_mp.plane_fmt[0].sizeimage,
                fmt.fmt.pix_mp.plane_fmt[1].sizeimage);

        memzero(reqbuf);
        reqbuf.count = *n_capbufs;
        reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        reqbuf.memory = V4L2_MEMORY_DMABUF;

        ret = ioctl(fd, VIDIOC_REQBUFS, &reqbuf);
        if (ret) {
            debug_printf("[fd%d] Err REQBUFS failed on CAPTURE queue ret %d errno %d\n",
                    fd, ret, errno);
            return ret;
        }
        debug_printf("[fd%d] After VIDIOC_REQBUFS getting buf_cnt %d\n",
                fd, reqbuf.count);
        *n_capbufs = reqbuf.count;

        /* Create DRM buffers */
        for (i = 0; i < *n_capbufs; i++) {
            ret = create_drm_buffer(&capbufs[i],
                    ALIGN(fmt.fmt.pix_mp.width, HW_ALIGN),
                    ((str->num_bytes_per_pix *
                        fmt.fmt.pix_mp.height * format.size_num) /
                     format.size_den));
            if (ret) {
                debug_printf("[fd%d] failed to create drm buffers\n", fd);
                return -1;
            }
            debug_printf("[fd%d] Create_DRM_BUFFERS drm_y_buffer[%d].dbuf_fd 0x%x, length %d offset %d\n",
                    fd, i, capbufs[i].dbuf_fd, capbufs[i].length, capbufs[i].offset);
        }
#else
        debug_printf("Cannot allocate DRM buffers\n");
        return -1;
#endif
    }
    return ret;
}

void DecoderProcess::close_device(int fd) {
    debug_printf("[fd%d] close_device\n", fd);
    if (-1 == close(fd))
        errno_exit("close");
    fd = -1;
}

int DecoderProcess::open_device(char *dev_name) {
    struct v4l2_capability cap;
    struct v4l2_fmtdesc argp;
    struct v4l2_event_subscription sub;
    int ret = 0, fd = -1;

    fd = open(dev_name, O_RDWR | O_NONBLOCK, 0);
    if (-1 == fd) {
        debug_printf("Cannot open '%s': %d, %s\n",
                dev_name, errno, strerror(errno));
        return -1;
    }

    ret = ioctl(fd, VIDIOC_QUERYCAP, &cap);
    if (ret != 0) {
        debug_printf("Failed to verify capabilities\n");
        return -1;
    }

    debug_printf("[fd%d] Info (%s): driver\"%s\" bus_info=\"%s\" card=\"%s\" fd=0x%x\n",
            fd, dev_name, cap.driver, cap.bus_info, cap.card, fd);

    debug_printf("[fd%d] Info (%s): capabilities\"0x%x\" device_caps=\"0x%x\" \n",
            fd, dev_name, cap.capabilities, cap.device_caps);

    memset(&argp, 0, sizeof(argp));
    argp.index = 0;
    argp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;

    debug_printf("[fd%d] Calling V4L2 IOCTL VIDIOC_ENUM_FMT on CAPTURE\n",
            fd);
    while ((ret = ioctl(fd, VIDIOC_ENUM_FMT, &argp)) == 0) {
        debug_printf("[fd%d] argp.index = %d, {pixelformat = %c%c%c%c}, description = '%s'\n",
                fd, argp.index, argp.pixelformat & 0xff,
                (argp.pixelformat >> 8) & 0xff,
                (argp.pixelformat >>16) & 0xff,
                (argp.pixelformat >> 24) & 0xff,
                argp.description);
        argp.index++;
    }

    argp.index = 0;
    argp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;

    debug_printf("[fd%d] Calling V4L2 IOCTL VIDIOC_ENUM_FMT on OUTPUT\n",
            fd);
    while ((ret = ioctl(fd, VIDIOC_ENUM_FMT, &argp)) == 0) {
        debug_printf("[fd%d] argp.index = %d, {pixelformat = %c%c%c%c}, description = '%s'\n",
                fd, argp.index, argp.pixelformat & 0xff,
                (argp.pixelformat >> 8) & 0xff,
                (argp.pixelformat >>16) & 0xff,
                (argp.pixelformat >> 24) & 0xff,
                argp.description);
        argp.index++;
    }

    memset(&sub, 0, sizeof(sub));
    sub.type = V4L2_EVENT_EOS;

    debug_printf("[fd%d] Calling V4L2 IOCTL VIDIOC_SUBSCRIBE_EVENT\n", fd);
    ret = ioctl(fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
    if (ret != 0) {
        debug_printf("[fd%d] Failed to subscribe to events: err: %d %s\n",
                fd, errno, strerror(errno));
    }

    return fd;
}

int DecoderProcess::find_device(char *dev_name) {
    const char *default_dev_path = "/dev/";
    const char *dev_name_mask = "videox";
    const char *driver_name = "vxd-dec";
    struct v4l2_capability cap = {0};
    char name[512] = "";
    DIR *d;
    struct dirent *dir;
    int fd = -1;

    d = opendir(default_dev_path);
    if (!d) {
        debug_printf("Failed to open device path %s %d %s\n",
               default_dev_path, errno, strerror(errno));
        return -1;
    }

    while ((dir = readdir(d)) != NULL) {
        if (strncmp(dir->d_name, dev_name_mask, 5) == 0) {
            strncpy(name, default_dev_path, sizeof(name)-1);
            strncat(name, dir->d_name, sizeof(name) - 1);

            fd = open(name, O_RDWR | O_NONBLOCK, 0);
            if (fd < 0) {
                debug_printf("Failed to open device %s %d %s\n",
                       name, errno, strerror(errno));
                continue;
            }

            memset(&cap, 0, sizeof(cap));

            if (ioctl(fd, VIDIOC_QUERYCAP, &cap)) {
                debug_printf("VIDIOC_QUERYCAP failed on device %s %d %s\n",
                       name, errno, strerror(errno));
                close(fd);
                continue;
            }

            if (strcmp((const char *)cap.driver, driver_name) == 0) {
                close(fd);
                fd = -1;
                debug_printf("No device specified, using %s\n",
                         name);
                snprintf(dev_name, sizeof(name), "%s", name);
                return 0;
            }

            close(fd);
            fd = -1;
        }
    }

    debug_printf("Failed to find device in %s\n", default_dev_path);
    return -1;
}

int DecoderProcess::DecoderInit(int width, int height) {
    decode_info[0].str.width = width;
    decode_info[0].str.height = height;

    DecoderIniter(decode_info[0].str.width, decode_info[0].str.height, AV_PIX_FMT_YUV420P, AV_CODEC_ID_H264);

    return 0;
}

int DecoderProcess::DecoderIniter(int width, int height, int pix_fmt, int codec) {
    char dev_name[256] = "";
    int i = 0;
    int ret = 0, type;

#ifdef DRMMODE
    const char *dir_path = "/dev/dri/";
    DIR *d;
    struct dirent *dir;
    drmModeResPtr res;
#endif

    if (strlen(dev_name) == 0) {
        ret = find_device(dev_name);
        if (ret)
            return EXIT_FAILURE;
    }

    for (i = 0; i < num_devs; i++) {
        debug_printf("*** Calling open_device for device %d \n", i);
        decode_info[i].fd = fds[i] = open_device(dev_name);
        decode_info[i].sleep_time = 10;
        decode_info[i].enable_prof = 1;
        if (fds[i] < 0) {
            debug_printf("Failed to open device %s\n", dev_name);
            return EXIT_FAILURE;
        }
        debug_printf("*** device %d is fd %d\n", i, fds[i]);

        if (pix_fmt == AV_PIX_FMT_YUV420P || pix_fmt == AV_PIX_FMT_YUV422P)
        decode_info[i].str.num_bytes_per_pix = 1;
        if (pix_fmt == AV_PIX_FMT_YUV420P10LE || pix_fmt == AV_PIX_FMT_YUV422P10LE)
        decode_info[i].str.num_bytes_per_pix = 2;
    }

    if (use_drm_capbuff == 0) {
        debug_printf("\nUsing v4l2 for capture buffers\n");
    } else {
        if (strlen(drm_file_name) > 0) {
#ifdef DRM
            drmfd1 = open(drm_file_name, O_CLOEXEC);
            if (drmfd1 < 0) {
                debug_printf("Failed to open drm device\n");
                return EXIT_FAILURE;
            }
#else
            debug_printf("DRM not supported with current build arguments\n");
            return EXIT_FAILURE;
#endif
        } else {
#ifdef DRMMODE
            d = opendir(dir_path);
            if (!d) {
                debug_printf("Failed to open drm device directory\n");
                return EXIT_FAILURE;
            }
            while ((dir = readdir(d)) != NULL) {
                if (strncmp(dir->d_name, DEVICE_NAME, 4) == 0) {
                    strcpy(drm_file_name, dir_path);        /* NOLINT */
                    strncat(drm_file_name, dir->d_name, sizeof(DEVICE_NAME));
                    drmfd1 = open(drm_file_name, O_CLOEXEC);
                    if (drmfd1 < 0) {
                        debug_printf("Failed to open drm device %s\n",
                                drm_file_name);
                    }
                    debug_printf("No drm device specified, testing %s\n",
                            drm_file_name);
                    res = drmModeGetResources(drmfd1);
                    if (res && res->count_crtcs > 0 &&
                            res->count_connectors > 0 &&
                            res->count_encoders > 0) {
                        debug_printf("No drm device specified, using %s\n",
                                drm_file_name);
                        break;
                    }
                    close(drmfd1);
                    drmfd1 = -1;
                }
            }
#else
            debug_printf("Can't search DRM directory\n");
            return EXIT_FAILURE;
#endif
        }
        #ifdef DRM
        if (drmfd1 < 0) {
            debug_printf("Failed to open drm device\n");
            return EXIT_FAILURE;
        }
        #endif
    }

    for (i = 0; i < num_devs; i++) {
        switch (pix_fmt) {
            case AV_PIX_FMT_YUV420P:
                decode_info[i].fmt.fourcc = V4L2_PIX_FMT_NV12;
                decode_info[i].fmt.size_num = 3;
                decode_info[i].fmt.size_den = 2;
                break;
            case AV_PIX_FMT_YUV422P:
                decode_info[i].fmt.fourcc = V4L2_PIX_FMT_NV16;
                decode_info[i].fmt.size_num = 2;
                decode_info[i].fmt.size_den = 1;
                break;
            case AV_PIX_FMT_YUV420P10LE:
                decode_info[i].fmt.fourcc = V4L2_PIX_FMT_TI1210;
                decode_info[i].fmt.size_num = 3;
                decode_info[i].fmt.size_den = 2;
                break;
            case AV_PIX_FMT_YUV422P10LE:
                decode_info[i].fmt.fourcc = V4L2_PIX_FMT_TI1610;
                decode_info[i].fmt.size_num = 2;
                decode_info[i].fmt.size_den = 1;
                break;
            case AV_PIX_FMT_YUVJ420P:
                decode_info[i].fmt.fourcc = V4L2_PIX_FMT_YUV420M;
                decode_info[i].fmt.size_num = 1;
                decode_info[i].fmt.size_den = 1;
                break;
            case AV_PIX_FMT_YUVJ422P:
                decode_info[i].fmt.fourcc = V4L2_PIX_FMT_YUV422M;
                decode_info[i].fmt.size_num = 1;
                decode_info[i].fmt.size_den = 1;
                break;
            default:
                debug_printf("Invalid pixel format detected\n");
                return -1;
        }

        decode_info[i].str.width  =  width;
        decode_info[i].str.height =  height;
        // decode_info[i].str.bitdepth =  8; // or 10
        decode_info[i].str.pix_fmt = (AVPixelFormat)pix_fmt;
        decode_info[i].str.codec  =  (AVCodecID)codec;
        decode_info[i].nframes    = 1;
        decode_info[i].usedrmbuff = 0;
        decode_info[i].n_outbufs  = 1;
        /* Request number of output buffers based on h264 spec
         * + display delay */
        decode_info[i].n_capbufs = D_MIN(MAX_CAPBUFS_H264, (32768 /
                    ((decode_info[i].str.width / 16) *
                    (decode_info[i].str.height / 16)))) +
                    DISPLAY_LAG;
        debug_printf("n_outbufs %d n_capbufs %d\n", decode_info[i].n_outbufs, decode_info[i].n_capbufs);
        decode_info[i].rdfd = 0;

        ret = init_device(fds[i], decode_info[i].rdfd, &decode_info[i].str, decode_info[i].outbufs,
                            decode_info[i].capbufs, &decode_info[i].n_outbufs, &decode_info[i].n_capbufs,
                            decode_info[i].fmt, decode_info[i].usedrmbuff);
        if (ret) {
            debug_printf("init_device failed with ret %d for device %d [fd%d]\n",
                    ret, i, fds[i]);
            return EXIT_FAILURE;
        }
    }
    #if 1
    type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    ret = ioctl(fds[0], VIDIOC_STREAMON, &type);
    if (ret) {
        debug_printf("[fd%d] OUTPUT VIDIOC_STREAMON failed with ret %d\n",
            fds[0], ret);
        return ret;
    }

    type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    ret = ioctl(fds[0], VIDIOC_STREAMON, &type);
    if (ret)
        debug_printf("[fd%d] CAPTURE VIDIOC_STREAMON failed with ret %d\n",
            fds[0], ret);
    #endif

    return 0;
}

int DecoderProcess::DecoderUnInit() {
    int i = 0, type = 0, ret = 0;

    #if 1
    debug_printf("Stream ended, calling STREAMOFF\n");
    type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    ret = ioctl(fds[0], VIDIOC_STREAMOFF, &type);
    if (ret) {
        debug_printf("[fd%d] VIDIOC_STREAMOFF on OUTPUT failed with ret %d\n",
                fds[0], ret);
        return ret;
    }

    type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    ret = ioctl(fds[0], VIDIOC_STREAMOFF, &type);
    if (ret)
        debug_printf("[fd%d] VIDIOC_STREAMOFF on CAPTURE failed with ret %d\n",
                fds[0], ret);
    #endif

    for (i = 0; i < num_devs; i++) {
        uninit_device(fds[i], decode_info[i].outbufs, decode_info[i].capbufs,
            &decode_info[i].n_outbufs, &decode_info[i].n_capbufs, use_drm_capbuff);

        close(fds[i]);
        close(decode_info[i].rdfd);
        if (decode_info[i].wrfd > 0) {
            close(decode_info[i].wrfd);
        }
    }

    return 0;
}

int DecoderProcess::DecoderH264(char *h264_data, int size, char *yuv_buf, int len, uint64_t m_u64Pts) {
    int ret = -1;

    decode_info[0].h264_data = h264_data;
    decode_info[0].h264_data_size = size;
    decode_info[0].yuv_buf = yuv_buf;
    decode_info[0].yuv_buf_len = len;
    decode_info[0].wrfd = -1;

    #if 0
    snprintf(output_file, sizeof(output_file),
            "/data/ac/%ld.yuv", m_u64Pts);
    decode_info[0].wrfd = open(output_file,
            O_CREAT | O_RDWR,
            0777);
    debug_printf("wrfd = %d\n", decode_info[0].wrfd);

    if (decode_info[0].wrfd <= 0) {
        debug_printf("Failed to open output file %s\n", output_file);
        return EXIT_FAILURE;
    } else {
        debug_printf("Opened output file %s\n", output_file);
    }
    #endif

    ret = mainloop(decode_info);
    if (ret < 0) {
        debug_printf("decoder fail...\n");
    } else {
        debug_printf("decoder successfuly...\n");
    }

    return ret;
}

}  // namespace kapok_hardware_tidecoder
