// Blog: https://blog.csdn.net/fengbingchun/article/details/103101344
#include "CameraGrab.h"
#include "libyuv.h"
#include <iostream>
#include <algorithm>
#include <set>
#include "v4l2_common.hpp"
#include "img_conv.h"

#ifdef LINUX_AARCH64


#endif

//static const __u32 v4l2_pixel_format_map[4] = {875967048, 0, 1196444237, 1448695129};
static const __u32 v4l2_pixel_format_map[] = {V4L2_PIX_FMT_H264, 0, V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_YUYV};

int v4l2_is_v4l_dev(const char *name)
{
    return !strncmp(name, "video", 5) ||
        !strncmp(name, "radio", 5) ||
        !strncmp(name, "vbi", 3) ||
        !strncmp(name, "v4l-subdev", 10);
}

int device_open(const char* device_path)
{
    int fd = open(device_path, O_RDWR, 0);
    if (fd < 0) {
        fprintf(stderr, "Error: cannot open video device %s\n", device_path);
        goto fail;
    }

    struct v4l2_capability cap;
    if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
        fprintf(stderr, "Error: cam_info: can't open device: %s\n", device_path);
        goto fail;
    }

    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
        fprintf(stderr, "Error: Not a video capture device\n");
        goto fail;
    }

    if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
            fprintf(stderr, "Error: The device does not support the streaming I/O method.\n");
        goto fail;
    }

    return fd;

fail:
    close(fd);
    return -1;
}

const struct fmt_map ff_fmt_conversion_table[] = {
    //ff_fmt              codec_id              v4l2_fmt
    { AV_PIX_FMT_YUV420P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV420  },
    { AV_PIX_FMT_YUV420P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YVU420  },
    { AV_PIX_FMT_YUV422P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV422P },
    { AV_PIX_FMT_YUYV422, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUYV    },
    { AV_PIX_FMT_UYVY422, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_UYVY    },
    { AV_PIX_FMT_YUV411P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV411P },
    { AV_PIX_FMT_YUV410P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV410  },
    { AV_PIX_FMT_YUV410P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YVU410  },
    { AV_PIX_FMT_RGB555LE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB555  },
    { AV_PIX_FMT_RGB555BE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB555X },
    { AV_PIX_FMT_RGB565LE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB565  },
    { AV_PIX_FMT_RGB565BE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB565X },
    { AV_PIX_FMT_BGR24,   AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_BGR24   },
    { AV_PIX_FMT_RGB24,   AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB24   },
#ifdef V4L2_PIX_FMT_XBGR32
    { AV_PIX_FMT_BGR0,    AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_XBGR32  },
    { AV_PIX_FMT_0RGB,    AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_XRGB32  },
    { AV_PIX_FMT_BGRA,    AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_ABGR32  },
    { AV_PIX_FMT_ARGB,    AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_ARGB32  },
#endif
    { AV_PIX_FMT_BGR0,    AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_BGR32   },
    { AV_PIX_FMT_0RGB,    AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB32   },
    { AV_PIX_FMT_GRAY8,   AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_GREY    },
#ifdef V4L2_PIX_FMT_Y16
    { AV_PIX_FMT_GRAY16LE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_Y16     },
#endif
    { AV_PIX_FMT_NV12,    AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_NV12    },
    { AV_PIX_FMT_NONE,    AV_CODEC_ID_MJPEG,    V4L2_PIX_FMT_MJPEG   },
    { AV_PIX_FMT_NONE,    AV_CODEC_ID_MJPEG,    V4L2_PIX_FMT_JPEG    },
#ifdef V4L2_PIX_FMT_H264
    { AV_PIX_FMT_NONE,    AV_CODEC_ID_H264,     V4L2_PIX_FMT_H264    },
#endif
#ifdef V4L2_PIX_FMT_MPEG4
    { AV_PIX_FMT_NONE,    AV_CODEC_ID_MPEG4,    V4L2_PIX_FMT_MPEG4   },
#endif
#ifdef V4L2_PIX_FMT_CPIA1
    { AV_PIX_FMT_NONE,    AV_CODEC_ID_CPIA,     V4L2_PIX_FMT_CPIA1   },
#endif
#ifdef V4L2_PIX_FMT_SRGGB8
    { AV_PIX_FMT_BAYER_BGGR8, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_SBGGR8 },
    { AV_PIX_FMT_BAYER_GBRG8, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_SGBRG8 },
    { AV_PIX_FMT_BAYER_GRBG8, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_SGRBG8 },
    { AV_PIX_FMT_BAYER_RGGB8, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_SRGGB8 },
#endif
    { AV_PIX_FMT_NONE,    AV_CODEC_ID_NONE,     0                    },
};

enum AVCodecID ff_fmt_v4l2codec(uint32_t v4l2_fmt)
{
    for (int i = 0; ff_fmt_conversion_table[i].codec_id != AV_CODEC_ID_NONE; i++) {
        if (ff_fmt_conversion_table[i].v4l2_fmt == v4l2_fmt) {
            return ff_fmt_conversion_table[i].codec_id;
        }
    }

    return AV_CODEC_ID_NONE;
}

enum AVPixelFormat ff_fmt_v4l2ff(uint32_t v4l2_fmt, enum AVCodecID codec_id)
{
    for (int i = 0; ff_fmt_conversion_table[i].codec_id != AV_CODEC_ID_NONE; i++) {
        if (ff_fmt_conversion_table[i].v4l2_fmt == v4l2_fmt &&
            ff_fmt_conversion_table[i].codec_id == codec_id) {
            return ff_fmt_conversion_table[i].ff_fmt;
        }
    }

    return AV_PIX_FMT_NONE;
}



int test_v4l2_get_device_list(std::map<std::string, std::string>& device_list)
{
    device_list.clear();

    const char* dir_name = "/dev";
    DIR* dir = opendir(dir_name);
    if (!dir) {
        fprintf(stderr, "Error: couldn't open the directory: %s\n", dir_name);
        return -1;
    }

    struct dirent* entry = nullptr;
    int fd;

    while ((entry = readdir(dir))) {
        char device_name[512];
        if (!v4l2_is_v4l_dev(entry->d_name))
            continue;

        snprintf(device_name, sizeof(device_name), "/dev/%s", entry->d_name);
        if ((fd = device_open(device_name)) < 0)
            continue;

        struct v4l2_capability cap;
        if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
            fprintf(stderr, "Error: cam_info: can't open device: %s\n", device_name);
            goto fail;
        }

        device_list[device_name] = reinterpret_cast<char*>(cap.card);

        close(fd);
        continue;

        fail:
            if (fd >= 0) close(fd);
            break;
    }

    closedir(dir);
    return 0;
}

int test_v4l2_get_codec_type_list(const std::string& device_name, std::vector<int>& codec_list)
{
    codec_list.clear();

    int fd = device_open(device_name.c_str());
    if (fd < 0) {
        fprintf(stderr, "Error: fail to open device: %s\n", device_name.c_str());
        return -1;
    }

    struct v4l2_capability cap;
    if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
        fprintf(stderr, "Error: cam_info: can't open device: %s\n", device_name.c_str());
        return -1;
    }

    struct v4l2_fmtdesc vfd;
    vfd.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    vfd.index = 0;

    while(!ioctl(fd, VIDIOC_ENUM_FMT, &vfd)) {
        enum AVCodecID codec_id = ff_fmt_v4l2codec(vfd.pixelformat);
        enum AVPixelFormat pix_fmt = ff_fmt_v4l2ff(vfd.pixelformat, codec_id);

        vfd.index++;


    }

    std::sort(codec_list.begin(), codec_list.end());
    close(fd);
    return 0;
}

int test_v4l2_get_video_size_list(const std::string& device_name, int codec_type, std::vector<std::string>& size_list)
{
    size_list.clear();
    if (codec_type < 0 || codec_type > 3) return -1;

    int fd = device_open(device_name.c_str());
    if (fd < 0) {
        fprintf(stderr, "Error: fail to open device: %s\n", device_name.c_str());
        return -1;
    }

    struct v4l2_capability cap;
    if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
        fprintf(stderr, "Error: cam_info: can't open device: %s\n", device_name.c_str());
        return -1;
    }

    struct v4l2_frmsizeenum vfse;
    vfse.pixel_format = v4l2_pixel_format_map[codec_type];
    vfse.index = 0;

    std::set<std::vector<unsigned int>> list;
    while(!ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &vfse)) {
        switch (vfse.type) {
            case V4L2_FRMSIZE_TYPE_DISCRETE:
            list.insert({vfse.discrete.width, vfse.discrete.height});
            break;
        }
        vfse.index++;
    }

    for (auto it = list.cbegin(); it != list.cend(); ++it) {
        std::string str = std::to_string((*it)[0]);
        str +="x";
        str += std::to_string((*it)[1]);
        size_list.emplace_back(str);
    }

    close(fd);
    return 0;
}

int test_v4l2_get_video_device_info()
{
    std::map<std::string, std::string> device_list;
    test_v4l2_get_device_list(device_list);
    fprintf(stdout, "device count: %d\n", (int)device_list.size());
    for (auto it = device_list.cbegin(); it != device_list.cend(); ++it) {
        fprintf(stdout, "device name: %s, description: %s\n", (*it).first.c_str(), (*it).second.c_str());

        std::vector<int> codec_list;
        test_v4l2_get_codec_type_list((*it).first, codec_list);
        for (auto it2 = codec_list.cbegin(); it2 != codec_list.cend(); ++it2) {
            fprintf(stdout, " support codec type(0: h264; 1: h265; 2: mjpeg; 3: rawvideo):%d\n", (*it2));

            std::vector<std::string> size_list;
            test_v4l2_get_video_size_list((*it).first, (*it2), size_list);
            fprintf(stdout, "  support video size(width*height):\n");
            for (auto it3 = size_list.cbegin(); it3 != size_list.cend(); ++it3) {
                fprintf(stdout, "   %s\n", (*it3).c_str());
            }
        }
    }

    return 0;
}


uint8_t   colorTab[256] = {0};


__inline int  CheckValue(int value)
{
    return (value & ~0xff) == 0? value : value > 255? 255 : 0;
}

void InitColorTab(int bright,int contrast,int threshold)
{

    float bv = bright <= -255? -1.0f : bright / 255.0f;
    if (bright > 0 && bright < 255)
        bv = 1.0f / (1.0f - bv) - 1.0f;

    float cv = contrast <= -255? -1.0f : contrast / 255.0f;
    if (contrast > 0 && contrast < 255)
        cv = 1.0f / (1.0f - cv) - 1.0f;


    for (int  i = 0; i < 256; i ++)
    {
        int v = contrast > 0? CheckValue(i + (int)(i * bv + 0.5f)) : i;
        if (contrast >= 255)
            v = v >= threshold? 255 : 0;
        else
            v = CheckValue(v + (int)((v - threshold) * cv + 0.5f));
        colorTab[i] = contrast <= 0? CheckValue(v + (int)(v * bv + 0.5f)) : v;
    }


}



bool hasInitTab = false;
///////////////////////////////////////////////////////////
// Blog: https://blog.csdn.net/fengbingchun/article/details/95984569
void LineBrightAndContrast(uint8_t *data, int bright, int contrast)
{

    if (bright == 0 && contrast == 0)
            return;
    if(hasInitTab == false)
    {
        hasInitTab = true;
       InitColorTab(25,35,255);
    }

}

int       maxRed = 0;
int       minRed = 255;
int       maxBlue = 0;
int       minBlue = 255;




void resizeByNN2(uchar *input, uchar *output, int height_in, int width_in, int channels, int height_out, int width_out,int xOffSet,int yOffSet)
{

    uchar *data_source = input + yOffSet*width_out*channels + xOffSet*channels;
    uchar *data_out = output;

    int bpl_source = width_in*3;
    int bpl_dst = width_out*3;

    int pos = 0;
    int sep = 0;
    uchar *sr = nullptr;
    uchar *hr = nullptr;
    float step = 0.0;
    float step_x = float(width_in)/float(width_out);
    float step_y = float(height_in)/float(height_out);

    for (int i = 0; i < height_out; i++)
    {
        for (int j = 0; j < width_out; j++) {
            sep = int(step_y*i);
            step = int(j*step_x);
            sr = data_source + sep*bpl_source;
            hr = data_out + i*bpl_dst +j*channels;
            pos = step*channels;

            *hr     = *(sr+pos);
            *(hr+1) = *(sr+pos+1);
            *(hr+2) = *(sr+pos+2);
            //memcpy(hr, sr+pos, channels);


        }
    }
    return;
}


void  ScaleFrameRGA(cv::Mat&  vidFrame,cv::Mat&  resizeFrame,uint8_t* pScaleYUV,int nScalVal)
{
    int ret;
    int   nDstHeight  = vidFrame.rows/nScalVal;
    int   nDstWidth  = vidFrame.cols/nScalVal;
#ifdef   LINUX_AARCH64
    //
    rga_buffer_t src = {0,0,0,0,0,0,0,0};
    rga_buffer_t dst = {0,0,0,0,0,0,0,0};
    rga_buffer_t dst2 = {0,0,0,0,0,0,0,0};

    im_rect src_rect = {0,0,0,0};
    im_rect dst_rect = {0,0,0,0};


    rga_buffer_handle_t src_handle, dst_handle,dst_handle2;

    im_handle_param_t param;
    param.width = vidFrame.cols;
    param.height = vidFrame.rows;
    param.format = RK_FORMAT_RGB_888;


    src_handle = importbuffer_virtualaddr(vidFrame.data, &param);

    param.width = nDstWidth;
    param.height = nDstHeight;
    param.format = RK_FORMAT_RGB_888;
    dst_handle = importbuffer_virtualaddr(pScaleYUV, &param);
    if (src_handle == 0 || dst_handle == 0) {
           printf("importbuffer failed!\n");
           return ;
    }

    src_rect.x = nDstWidth/2;
    src_rect.y = nDstHeight/2;
    src_rect.width = nDstWidth;
    src_rect.height = nDstHeight;

    src = wrapbuffer_handle(src_handle, vidFrame.cols, vidFrame.rows, RK_FORMAT_RGB_888);
    dst = wrapbuffer_handle(dst_handle, nDstWidth, nDstHeight, RK_FORMAT_RGB_888);

    IM_STATUS STATUS = imcrop(src, dst,src_rect);
    if(IM_STATUS_SUCCESS != STATUS)
    {
       log_i("alg rga scale1 fail");
    }


    param.width = vidFrame.cols;
    param.height = vidFrame.rows;
    param.format = RK_FORMAT_RGB_888;
    dst_handle2 = importbuffer_virtualaddr(resizeFrame.data, &param);

    dst2 = wrapbuffer_handle(dst_handle2, vidFrame.cols, vidFrame.rows, RK_FORMAT_RGB_888);
    STATUS = imresize_t(dst, dst2,2.0,2.0,INTER_LINEAR,1);
    if(IM_STATUS_SUCCESS != STATUS)
    {
       log_i("alg rga scale2 fail");
    }
    releasebuffer_handle(src.handle);
    releasebuffer_handle(dst.handle);
    releasebuffer_handle(dst2.handle);
#endif
}


void process_visframe(uint8_t* pV4l2Data, stVFrame*  pFrame,uint8_t nDispCtrl,uint8_t* pScaleYUV,uint8_t*  pTmpRGB)
{
    uint8_t   tmpBlue;
    uint8_t   tmpRed;
    uint8_t   tmpGreen;
    uint8_t  * pDataTmp = pV4l2Data;

    int i = 0;
    //for(i=0; i< pChannel->nCapWidth*pChannel->nCapHeight; i++)
    //{
    //    tmpBlue = *(pDataTmp + 0);
    //    *(pDataTmp + 1) = *(pDataTmp + 2);
    //    *(pDataTmp + 2) = tmpBlue;
    //    pDataTmp += 3;
    //}

    //  QImage pImg2 = QImage(pV4l2Data, pChannel->nCapWidth,
    //                       pChannel->nCapHeight, QImage::Format_RGB888);
    //   pImg2.save("./debugrgb.png");
    // rgaFmtConvert(pChannel->nCapWidth,pChannel->nCapHeight,RK_FORMAT_BGR_888,pV4l2Data,
    //            pFrame->nWidth,pFrame->nHeight,RK_FORMAT_RGB_888,pFrame->ucData);
    //ldMemCpy(pFrame->ucData,(pV4l2Data + 1),(pFrame->nWidth*pFrame->nHeight*3 -1));

    //B = B + (B-(R+G)/2)*r;
    //G = G + (G-(R+B)/2)*r;
    //R = R + (R-(B+G)/2)*r;


    LineBrightAndContrast(pFrame->ucData,30,0);




        if(IS_VIS_CHN(nDispCtrl) && GET_BIT(nDispCtrl,6))
        {//可见光并要求电子放大

        }
        else
        {
            uint8_t* pSrc = pV4l2Data + 1;
            uint8_t* pDest = pFrame->ucData;
            memcpy(pFrame->ucData,pV4l2Data,1920*1080*3);
        }

}



enResVal  process_infraframe(stChnV4L2*  pChannel,uint8_t* pV4l2Data, stVFrame*  pFrame,uint8_t nDispCtrl,uint8_t*  pScaleYUV)
{
    int k;
    pFrame->nWidth  = pChannel->m_nAlgWidth;
    pFrame->nHeight = pChannel->m_nAlgHeight;

    //裁剪720x576 到 640x480
    //   |---------------------720--------------------------------|
    //   |                      |                                 |
    //   |                      30                                |
    //   |                      |                                 |
    //   |---40---|-------640---------------------------|----40---|
    //   |        |             |                       |         |
    //   |        |            512                      |         |
    uint8_t*  pStartData = pChannel->m_pTmpFrame + 48*720*3  + 40*3;
    uint8_t*  pDest  = pFrame->ucData;






//无电子放大
        for(k=0; k<pFrame->nHeight; k++) // pFrame->nHeight = 512
        {
            memcpy(pDest,pStartData,640*3);
            pDest += 640*3;
            pStartData += 720*3;
        }

        int NoDataFrame = 0;
        for(k=0; k<128; k++)
        {
            if(pFrame->ucData[32 + k*640*3] == 0x10)
            {
                continue;
            }
            else
            {
                break;
            }
        }

        if(IS_INFRA_CHN(nDispCtrl) && GET_BIT(nDispCtrl,7))
        {//红外并要求电子放大

            cv::Mat  imgInfrared = cv::Mat(480,640, CV_8UC3, pFrame->ucData);

          //红外视频的宽度不是8倍数，rga不支持
           cv::Rect roiRect = cv::Rect(imgInfrared.cols/4,imgInfrared.rows/4,imgInfrared.cols/2,imgInfrared.rows/2);

           cv::Mat  imgScaled = cv::Mat(480, 640, CV_8UC3, pScaleYUV);
           cv::resize(imgInfrared(roiRect),imgScaled,cv::Size(imgInfrared.cols,imgInfrared.rows));
           memcpy(pFrame->ucData,pScaleYUV,640*480*3);
       }
       return (k < 128)?LD_RES_OK:LD_RES_CAMERA_ERR;

}


int  process_image(stChnV4L2*  pChannel,uint8_t* pV4l2Data,int nLen, stVFrame*  pFrame,int  nTopField,uint8_t nDispCtrl,uint8_t* pScaleYUV,uint8_t* pTempRGB)
{

    gettimeofday(&pFrame->nTimeStamp,NULL);
    /*if (out_buf)
        fwrite(p, size, 1, stdout);

    fflush(stderr);
    fprintf(stderr, ".");
    fflush(stdout);*/



    //if(pChannel->nCapWidth == pFrame->nWidth  && pChannel->nCapHeight == pFrame->nHeight &&
    //   pChannel->fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24)
    //{//capture is equ alg need  ,resize ignore
     //   //驱动问题，格式设置为rgb24， 实际数据是brg ，没办法，只能把每点的数据调整成rgb
     //    process_visframe(pV4l2Data,pFrame,nDispCtrl,pScaleYUV,pTempRGB);
      //   return LD_RES_OK;
    //}

   // rgaFmtConvert(pChannel->nCapWidth,pChannel->nCapHeight,RK_FORMAT_YCbCr_420_SP,pV4l2Data,
    //              pFrame->nWidth,pFrame->nHeight,RK_FORMAT_RGB_888,pFrame->ucData);
    memcpy(pFrame->ucData,pV4l2Data,pChannel->nCapWidth*pChannel->nCapHeight*3);

   pFrame->fMinScale = pChannel->m_fMinScale;


    return LD_RES_OK;
}

int read_frame(stChnV4L2*  pChannel,stVFrame*  pFrame,uint8_t   nDispCtrl,uint8_t* pTmpRGB,uint8_t* pScaleYUV)
{
    int  nResOk = 0;
    static int nLastError = 0;
    struct v4l2_plane planes = {0};

    CLEAR(pChannel->one_buf);
    pChannel->one_buf.type = pChannel->buff_type;
    pChannel->one_buf.memory = V4L2_MEMORY_MMAP;
    pChannel->one_buf.length = 1;
    pChannel->one_buf.m.planes = &planes;

    /* 出队 */
    if(0 > ioctl(pChannel->v4l2_fd,VIDIOC_DQBUF,&pChannel->one_buf)){
        if(nLastError == 0)
        {
            nLastError = -1;
            log_e("出队失败\n");
        }
        return 0;
    }


    int nTopField = 0;

   /* if(pChannel->nChannel == CHN_INFRARED)
    {
        if(GET_BIT(nDispCtrl,2))
        {
           // nTopField = pChannel->one_buf.sequence%2;
            nTopField = 1;

        }
        else
        {
            //nTopField = (pChannel->one_buf.sequence +1)%2;
            nTopField = 0;
        }
    }
    */


    int nRes = process_image(pChannel,(uint8_t *)pChannel->buffer_infos[pChannel->one_buf.index].start,pChannel->one_buf.m.planes->bytesused,pFrame,nTopField,nDispCtrl,pScaleYUV,pTmpRGB);
    if(nRes < LD_RES_OK)
    {
        nResOk = 0;
    }
    else
    {
        nResOk = 1;
    }
   //  ldMemCpy(pFrame->ucData,(char *)pChannel->buffer_infos[pChannel->one_buf.index].start,pChannel->one_buf.bytesused);//bytesused 表示buf中已经使用的字节数
   // pFrame->nDataLen = pChannel->one_buf.bytesused;

   // check and debug for lost frame
   // log_e("write seq %d ",pChannel->one_buf.sequence);
    /* 再次入队*/
    if (0 > ioctl(pChannel->v4l2_fd, VIDIOC_QBUF, &pChannel->one_buf)) {
        log_e("入队失败\n");
        return 0;
    }

    return nResOk;
}



void stop_capturing(stChnV4L2*  pChannel)
{
    if(pChannel->v4l2_fd < 0)
        return;

    enum v4l2_buf_type              type;
    type = pChannel->buff_type;
    if (0 > ioctl(pChannel->v4l2_fd, VIDIOC_STREAMOFF, &type))
    {
        log_e("close stream failed\n");
    }
    for(int i = 0; i < MAX_FRAME_BUFF_CNT; i++)
    {
        munmap(pChannel->buffer_infos[i].start,pChannel->buffer_infos[i].length);
    }


   // if (-1 == close(pChannel->))
    //	errno_exit("close");

    //free(pChannel->buffers);
    if(pChannel->fOut)
    {
        fclose(pChannel->fOut);
    }

    if(pChannel->buffer_infos)
     {
        free(pChannel->buffer_infos);
    }

    close(pChannel->v4l2_fd);

}

void start_cap_dev(stChnV4L2*  pChannel)
{
    int buf_type=0;
    /* 打开设备 */

    pChannel->v4l2_fd  = open(pChannel->cChnName, O_RDWR | O_NONBLOCK, 0);
    if(pChannel->v4l2_fd  < 0){
        log_e("open camera  %s failed\n",pChannel->cChnName);
        return;
    }


#ifdef LINUX_AARCH64

#ifdef HUI_SHI_RK3588_DEV_BRD
   if(pChannel->nChannel == CHN_INFRARED)
   {
        tvp5150_i2c(pChannel,"/dev/i2c-5");
        tvp5150_Config(pChannel->m_I2cFd);
   }
#endif

    imconfig(IM_CONFIG_SCHEDULER_CORE,1);
#endif

    /* 获取摄像头能力 */
    ioctl(pChannel->v4l2_fd, VIDIOC_QUERYCAP, &pChannel->cap);
    /* 判断是否是视频采集设备 */
    if (!(V4L2_CAP_VIDEO_CAPTURE & pChannel->cap.capabilities))
    {
        if(!(V4L2_CAP_VIDEO_CAPTURE_MPLANE & pChannel->cap.capabilities))
        {
           log_i("Error:dev %s is not capture video device!\n",pChannel->cChnName);
           return;
        }
        else
        {
            pChannel->buff_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
        }
    }
    else
    {
        pChannel->buff_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    }

    log_i("driver name: %s card name: %s",pChannel->cap.driver,pChannel->cap.card);

    /* 查询摄像头所支持的所有像素格式 */
    pChannel->fmtdesc.index = 0;
    pChannel->fmtdesc.type = pChannel->buff_type;
    CLEAR(pChannel->cam_fmts);
    while (0 == ioctl(pChannel->v4l2_fd, VIDIOC_ENUM_FMT, &pChannel->fmtdesc)) {
        // 将枚举出来的格式以及描述信息存放在数组中
        pChannel->cam_fmts[pChannel->fmtdesc.index].pixelformat = pChannel->fmtdesc.pixelformat;
        memcpy(pChannel->cam_fmts[pChannel->fmtdesc.index].description, pChannel->fmtdesc.description,sizeof(pChannel->fmtdesc.description));
        pChannel->fmtdesc.index++;
    }

    pChannel->frmsize.type = pChannel->buff_type;
    pChannel->frmival.type = pChannel->buff_type;

    for(int i = 0; pChannel->cam_fmts[i].pixelformat;i++){
        //log_i("format<0x%x>, description<%s>\n", pChannel->cam_fmts[i].pixelformat, pChannel->cam_fmts[i].description);

        /* 枚举出摄像头所支持的所有视频采集分辨率 */
        pChannel->frmsize.index = 0;
        pChannel->frmsize.pixel_format = pChannel->cam_fmts[i].pixelformat;
        pChannel->frmival.pixel_format = pChannel->cam_fmts[i].pixelformat;
        while (0 == ioctl(pChannel->v4l2_fd, VIDIOC_ENUM_FRAMESIZES, &pChannel->frmsize)) {

            pChannel->frmsize.index++;

            /* 获取摄像头视频采集帧率 */
            pChannel->frmival.index = 0;
            pChannel->frmival.width = pChannel->frmsize.stepwise.max_width;
            pChannel->frmival.height = pChannel->frmsize.stepwise.max_height;
            while (0 == ioctl(pChannel->v4l2_fd, VIDIOC_ENUM_FRAMEINTERVALS, &pChannel->frmival)) {
                log_i("size<%d*%d> <%dfps>", pChannel->frmsize.stepwise.max_width,pChannel->frmsize.stepwise.max_height,pChannel->frmival.discrete.denominator / pChannel->frmival.discrete.numerator);
                pChannel->frmival.index++;
            }
        }
    }

    /*set fps
    struct v4l2_streamparm param;
    memset(&param,0,sizeof(param));
    param.parm.capture.timeperframe.numerator = 1;
    param.parm.capture.timeperframe.denominator  = 30;
    param.type = pChannel->buff_type;
    //param.parm.capture.capturemode = 0;

    if(0 > ioctl(pChannel->v4l2_fd,VIDIOC_G_PARM,&param))
    {
        log_e("get fps failed\n");
        return;
    }

    if (0 > ioctl(pChannel->v4l2_fd, VIDIOC_S_PARM, &param)) {
        log_e("set fps failed\n");
        return;
    }
    */
    /* 设置采集格式 */
    CLEAR(pChannel->fmt);
    pChannel->fmt.type = pChannel->buff_type;

    /*
    pChannel->fmt.fmt.pix.width  = pChannel->frmival.width;
    pChannel->fmt.fmt.pix.height = pChannel->frmival.height;
    */
	
   if(pChannel->nChannel != 0)
   {
       pChannel->fmt.fmt.pix.field = V4L2_FIELD_INTERLACED_TB;
       pChannel->fmt.fmt.pix.width = pChannel->nCapWidth ;
       pChannel->fmt.fmt.pix.height = pChannel->nCapHeight/2;
   }
   else
   {
      pChannel->fmt.fmt.pix.width = pChannel->nCapWidth ;
      pChannel->fmt.fmt.pix.height = pChannel->nCapHeight;
   }


#ifdef HUI_SHI_RK3588_DEV_BRD
    pChannel->fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;   //选择yuv420,
#else
    pChannel->fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;   //选择yuv420,
#endif
    ///pChannel->fmt.fmt.pix.field =

    if (0 > ioctl(pChannel->v4l2_fd, VIDIOC_S_FMT, &pChannel->fmt)) {
        log_e("set format failed\n");
        return;
    }

    /* 判断是否已经设置为我们要求的MJPEG像素格式,否则表示该设备不支持yuv420像素格式 */
    if (V4L2_PIX_FMT_NV12 == pChannel->fmt.fmt.pix.pixelformat) {
        log_i("channel %d support V4L2_PIX_FMT_NV12 ",pChannel->nChannel);
    }
	
    if (V4L2_PIX_FMT_RGB24 == pChannel->fmt.fmt.pix.pixelformat) {
        log_i("channel %d support V4L2_PIX_FMT_RGB24 ",pChannel->nChannel);
    }
	
    /* 获取实际的帧宽高度 */
    log_i("当前视频帧大小<%d * %d>, 颜色空间:%d ", pChannel->fmt.fmt.pix.width, pChannel->fmt.fmt.pix.height,pChannel->fmt.fmt.pix.colorspace);

    pChannel->reqbuf.count = MAX_FRAME_BUFF_CNT;       //帧缓冲的数量
    pChannel->reqbuf.type = pChannel->buff_type;
    pChannel->reqbuf.memory = V4L2_MEMORY_MMAP;
    if (0 > ioctl(pChannel->v4l2_fd, VIDIOC_REQBUFS, &pChannel->reqbuf)) {
        log_e("request buffer failed\n");
        return;
    }


    /* calloc函数为buffer_infos动态分配内存空间并初始化为0*/
    pChannel->buffer_infos = (struct buffer_info*)calloc(MAX_FRAME_BUFF_CNT,sizeof(struct buffer_info));
    /* 建立内存映射 */
    int n_buffers  = 0;
    for(n_buffers = 0;n_buffers < MAX_FRAME_BUFF_CNT;n_buffers++){
        CLEAR(pChannel->buf);
        struct v4l2_plane planes = {0};
        pChannel->buf.index = n_buffers;
        pChannel->buf.type = pChannel->buff_type;
        pChannel->buf.memory = V4L2_MEMORY_MMAP;
        pChannel->buf.m.planes = &planes;
        pChannel->buf.length = 1;

        if(0 > ioctl(pChannel->v4l2_fd, VIDIOC_QUERYBUF, &pChannel->buf)){
            log_e("VIDIOC_QUERYBUF failed\n");
            return;
        }

        pChannel->buffer_infos[n_buffers].length = pChannel->buf.m.planes->length;
        pChannel->buffer_infos[n_buffers].offset = (size_t)pChannel->buf.m.planes->m.mem_offset;
        pChannel->buffer_infos[n_buffers].start  = (unsigned char *)mmap(NULL, pChannel->buffer_infos[n_buffers].length ,PROT_READ | PROT_WRITE, MAP_SHARED,pChannel->v4l2_fd, pChannel->buffer_infos[n_buffers].offset);

        if (MAP_FAILED == pChannel->buffer_infos[n_buffers].start) {
            log_e("mmap error\n");
            return;
        }
    }


    /* 入队 */
    for (pChannel->buf.index = 0; pChannel->buf.index < MAX_FRAME_BUFF_CNT; pChannel->buf.index++) {
        if (0 > ioctl(pChannel->v4l2_fd, VIDIOC_QBUF, &pChannel->buf)) {
            log_e("入队失败\n");
            return;
        }
    }


    //sprintf(cFileName,"./save_%d.yuv",pChannel->nChannel);
    //pChannel->fOut = fopen(cFileName, "w");
    //if (!pChannel->fOut) {
    //    log_e("fail to open file");
    //}
}



/* 开启视频流 */
void start_cap_stream(stChnV4L2*  pChannel)
{
   if(pChannel->v4l2_fd < 0)
       return;

   log_i("open %s stream prepare ==if block is deivce not connect or bad,please check ",pChannel->cChnName);
   if (0 > ioctl(pChannel->v4l2_fd, VIDIOC_STREAMON, &pChannel->buff_type)) {
        log_e("open %s stream failed !!!!\n",pChannel->cChnName);
        return;
   }
   log_i("open %s stream success ====",pChannel->cChnName);
}

/*
void  V4l2MainLoop(stChnCfg* pChnCfg,int nChnCnt)
{
    int i;
    fd_set fds;
    struct timeval tv;
    int r;
    int  fd_max = 0;


    FD_ZERO(&fds);


    for(i=0; i<nChnCnt; i++)
    {
       FD_SET(pChnCfg[i].fd, &fds);
       if(pChnCfg[i].fd > fd_max)
       {
           fd_max = pChnCfg[i].v4l2_fd;
       }
    }

    // Timeout
    tv.tv_sec = 2;
    tv.tv_usec = 0;

    r = select(fd_max + 1, &fds, NULL, NULL, &tv);
    if (-1 == r) {
        if (EINTR == errno)
            return;

    }

    if (0 == r) {
         return;
    }

    for(i=0; i<nChnCnt; i++)
    {
         if (FD_ISSET( pChnCfg[i].v4l2_fd, &fds ))
         {
             read_frame(&pChnCfg[i]);
         }
    }
}
*/
