/**
 * @file VideoCapture.cpp
 * @author your name (you@domain.com)
 * @brief
 * @version 0.1
 * @date 2022-10-25
 *
 * @copyright Copyright (c) 2005-2022 XAG Co., Ltd. All Rights Reserved
 *
 */

#include "VideoCapture.h"
#include <poll.h>  // NOLINT
#include <chrono>
#include <fstream>
#include <thread>
#include "DataTransmitter.h"
#include "SystemCalls.h"
#include "TimeUtil.h"
#include "XagLog.h"

// 全局变量,编码同步需要
int g_hwtimer_sync_fd = -1;

// #define TEST_TIME_STOCK

namespace kapok_hardware_rv1126 {
std::atomic<bool> VideoCapture::m_bInit = false;

#ifdef TEST_TIME_STOCK
std::ofstream g_objSave("/tmp/time_save.txt");

void TestWriteToFile(std::ofstream& _ofsFile,
                     const std::string& _strTitle,
                     uint64_t _u64File) {
    if (!_ofsFile) {
        std::cerr << "Error opening file!" << std::endl;
    }
    /*使用流操作符 << 写入字符串 */
    _ofsFile << _strTitle << " " << _u64File << std::endl;
}
#endif

bool VideoCapture::Init(void) {
    if (access(UPDATE_SYSTEM_MARK, F_OK) != 0) {
        system("touch " UPDATE_SYSTEM_MARK);
        XAG_LOG_D("update ok....\n");
    }

    if (m_bInit) {
        return true;
    }
    m_bInit = true;

    // init snap
    RkV4l2_Init_Device(&m_objCamSnapSource[CAMERA_BOTTOM_ID],
                       CAM_BOTTOM_SNAP_DEVNODE, CAMERA_BOTTOM_ID,
                       CAM_BOTTOM_SNAP_IMAGEWIDTH, CAM_BOTTOM_SNAP_IMAGEHEIGH);

    RkV4l2_Init_Device(&m_objCamSnapSource[CAMERA_FRONT_ID],
                       CAM_FRONT_SNAP_DEVNODE, CAMERA_FRONT_ID,
                       CAM_FRONT_SNAP_IMAGEWIDTH, CAM_FRONT_SNAP_IMAGEHEIGH);

    Start_Capturing(&m_objCamSnapSource[CAMERA_FRONT_ID],
                    CAM_FRONT_SNAP_DEVNODE, 0);

    // Start_Capturing(&m_objCamSnapSource[CAMERA_BOTTOM_ID],
    //                                CAM_BOTTOM_SNAP_DEVNODE, 1);

    m_s8Yuvbuffer = (unsigned char*)malloc(CAM_BOTTOM_SNAP_IMAGEWIDTH *
                                           CAM_BOTTOM_SNAP_IMAGEHEIGH * 4);
    if (m_s8Yuvbuffer == nullptr) {
        XAG_LOG_E("malloc error, capture error");
        return false;
    }

    vpu_encode_jpeg_init(&m_vpu_encode, CAM_FRONT_SNAP_IMAGEWIDTH,
                         CAM_FRONT_SNAP_IMAGEHEIGH, 10, MPP_FMT_YUV420SP);

    int ret = rga_control_buffer_init_nocache(&m_vpu_enc_bo, &m_vpu_jpeg_fd,
                                              CAM_FRONT_SNAP_IMAGEWIDTH,
                                              CAM_FRONT_SNAP_IMAGEHEIGH, 16);

    ret = rga_control_buffer_init(&m_vpu_rga_bo, &m_vpu_rga_fd,
                                  CAM_FRONT_SNAP_IMAGEWIDTH,
                                  CAM_FRONT_SNAP_IMAGEHEIGH, 24);

    if (ret != 0) {
        XAG_LOG_D("RGA  Init fail\n");
    }

    std::thread id3(std::bind(&VideoCapture::WorkThread_Snap, this));
    m_thdSnapWork = std::move(id3);  // NOLINT
    XAG_LOG_D("SnapWork Init success!\n");

#ifdef SEND_YUV_TODEAL_TEST
    DataTransmitterInit();
#endif
    mkdir(STD_IMG_SAVEPATH, 777);
    m_objStdFileNnamePro.SetMaxSize(100);
    m_objYuvFileNnamePro.SetMaxSize(100);

    std::thread id4(std::bind(&VideoCapture::WorkThread_SnapSync, this));
    m_thdSnapSyncWork = std::move(id4);  // NOLINT
    XAG_LOG_D("SnapWork Sync Init success!\n");

    return true;
}

bool VideoCapture::Uninit(void) {
    if (!m_bInit) {
        return true;
    }
    m_thdSnapRunFlg = false;
    m_thdSnapWork.join();
    // m_thdSnapSyncRunFlg = false;
    m_thdSnapSyncWork.join();
    Rk_Ispv4l2_exit(&m_objCamSnapSource[CAMERA_FRONT_ID]);
    Rk_Ispv4l2_exit(&m_objCamSnapSource[CAMERA_BOTTOM_ID]);

    if (m_s8Yuvbuffer != nullptr) {
        free(m_s8Yuvbuffer);
    }

    m_bInit = false;
#ifdef SEND_YUV_TODEAL_TEST
    DataTransmitterRelease();
#endif
    return true;
}

int VideoCapture::xioctl(int fh, int request, void* arg) {
    int r;
    do {
        r = ioctl(fh, request, arg);
    } while (-1 == r && EINTR == errno);
    return r;
}

int VideoCapture::Start_Capturing(CamSource_S* objCamSoure,
                                  const char* node,
                                  int sync) {
    unsigned int i, n_buffers;
    enum v4l2_buf_type type;
    struct v4l2_plane planes[FMT_NUM_PLANES];

    n_buffers = objCamSoure->bufcnt;
    for (i = 1; i < n_buffers; ++i) {
        struct v4l2_buffer buf;

        CLEAR(buf);
        buf.type = objCamSoure->type;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = i;

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == objCamSoure->type) {
            buf.m.planes = planes;
            buf.length = FMT_NUM_PLANES;
        }
        if (-1 == xioctl(objCamSoure->fd, VIDIOC_QBUF, &buf)) {
            XAG_LOG_D("VIDIOC_QBUF fail\n");
            return -1;
        }
    }

#if 1
    /**/ type = objCamSoure->type;
    if (-1 == xioctl(objCamSoure->fd, VIDIOC_STREAMON, &type)) {
        XAG_LOG_D("VIDIOC_STREAMON falil\n");
        return -1;
    }
#endif

    return 0;
}

void VideoCapture::Sync_Init() {
    int type = 0;
    g_hwtimer_sync_fd = -1;
    g_hwtimer_sync_fd = open(HRTIMER_SYNC_DEV, O_RDWR, 0);

    if (g_hwtimer_sync_fd < 0) {
        XAG_LOG_D("SC230AI_SYNC Fail\n");
        return;
    }

    if (g_hwtimer_sync_fd != -1) {
        if (access(HDR_SYSTEM_MARK, 0) == 0) {
            type = 1;
            if (-1 == xioctl(g_hwtimer_sync_fd, SC230AI_SYNC_HDR_CMD, &type)) {
                XAG_LOG_D("SC230AI_SYNC_HDR_CMD Fail\n");
                return;
            }
            m_hdr_mode = 1;
        }

        if (-1 == xioctl(g_hwtimer_sync_fd, SC230AI_SYNC_CMD, &type)) {
            XAG_LOG_D("SC230AI_SYNC_CMD Fail\n");
            return;
        }
    }

    XAG_LOG_D("Sync_Init Ok\n");
}

void VideoCapture::Stop_Capturing(CamSource_S* objCamSoure) {
    enum v4l2_buf_type type;

    type = objCamSoure->type;
    if (-1 == xioctl(objCamSoure->fd, VIDIOC_STREAMOFF, &type))
        XAG_LOG_D("VIDIOC_STREAMOFF\n");
}

int VideoCapture::Init_Mmap(CamSource_S* objCamSoure, const char* node) {
    struct v4l2_requestbuffers req;

    CLEAR(req);

    req.count = BUFFER_COUNT;
    req.type = objCamSoure->type;
    req.memory = V4L2_MEMORY_MMAP;

    if (-1 == xioctl(objCamSoure->fd, VIDIOC_REQBUFS, &req)) {
        if (EINVAL == errno) {
            return -1;
        } else {
            return -1;
        }
    }

    if (req.count < 2) {
        return -1;
    }

    objCamSoure->bufcnt = req.count;

    for (int n_buffers = 0; n_buffers < objCamSoure->bufcnt; ++n_buffers) {
        struct v4l2_buffer buf;
        struct v4l2_plane planes[FMT_NUM_PLANES];
        CLEAR(buf);
        CLEAR(planes);

        buf.type = objCamSoure->type;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = n_buffers;

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == objCamSoure->type) {
            buf.m.planes = planes;
            buf.length = FMT_NUM_PLANES;
        }

        if (-1 == xioctl(objCamSoure->fd, VIDIOC_QUERYBUF, &buf)) {
            XAG_LOG_D("VIDIOC_QUERYBUF  fail\n");
            return -1;
        }

        if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == objCamSoure->type) {
            objCamSoure->fbuf[n_buffers].length = buf.m.planes[0].length;
            objCamSoure->fbuf[n_buffers].start =
                mmap(NULL /* start anywhere */, buf.m.planes[0].length,
                     PROT_READ | PROT_WRITE /* required */,
                     MAP_SHARED /* recommended */, objCamSoure->fd,
                     buf.m.planes[0].m.mem_offset);
        } else {
            objCamSoure->fbuf[n_buffers].length = buf.length;
            objCamSoure->fbuf[n_buffers].start = mmap(
                NULL /* start anywhere */, buf.length,
                PROT_READ | PROT_WRITE /* required */,
                MAP_SHARED /* recommended */, objCamSoure->fd, buf.m.offset);
        }

        if (MAP_FAILED == objCamSoure->fbuf[n_buffers].start) {
            XAG_LOG_D("mmap falil\n");
            return -1;
        }

        /**/ struct v4l2_exportbuffer expbuf = (struct v4l2_exportbuffer){0};
        // xcam_mem_clear (expbuf);
        expbuf.type = buf.type;
        expbuf.index = n_buffers;
        expbuf.flags = O_CLOEXEC;
        if (xioctl(objCamSoure->fd, VIDIOC_EXPBUF, &expbuf) < 0) {
            XAG_LOG_D("get dma buf failed\n");
        } else {
            XAG_LOG_D("get dma buf(%d)-fd: %d\n", n_buffers, expbuf.fd);
            MppBufferInfo info;
            memset(&info, 0, sizeof(MppBufferInfo));
            info.type = MPP_BUFFER_TYPE_EXT_DMA;
            info.fd = expbuf.fd;
            info.size = buf.length & 0x07ffffff;
            info.index = (buf.length & 0xf8000000) >> 27;
            mpp_buffer_import(&objCamSoure->fbuf[n_buffers].buffer, &info);
        }
        objCamSoure->fbuf[n_buffers].export_fd = expbuf.fd;
    }

    XAG_LOG_D("%s: v4l2 mmp ok %s !\n", __func__, node);

    return 0;
}

int VideoCapture::RkV4l2_Init_Device(CamSource_S* objCamSoure,
                                     const char* node,
                                     int index,
                                     int width,
                                     int height) {
    struct v4l2_capability cap;
    struct v4l2_format fmt;
    objCamSoure->fd = open(node, O_RDWR /* required */ /*| O_NONBLOCK*/, 0);

    if (-1 == objCamSoure->fd) {
        XAG_LOG_D("%s: open device error %s !\n", __func__, node);
        return -1;
    }

    if (index == 0) {
        objCamSoure->subfd = open(CAM_FRONT_SUBDEVNODE,
                                  O_RDWR /* required */ /*| O_NONBLOCK*/, 0);
    } else {
        objCamSoure->subfd = open(CAM_BOTTOM_SUBDEVNODE,
                                  O_RDWR /* required */ /*| O_NONBLOCK*/, 0);
    }
    if (-1 == objCamSoure->fd) {
        XAG_LOG_D("%s: open sub device error %s !\n", __func__, node);
    }

    if (-1 == xioctl(objCamSoure->fd, VIDIOC_QUERYCAP, &cap)) {
        XAG_LOG_D("%s: querycap error %s !\n", __func__, node);
        if (EINVAL == errno) {
            return -1;
        } else {
            return -1;
        }
    }

    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) &&
        !(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)) {
        return -1;
    }

    if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
        return -1;
    }

    if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
        objCamSoure->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    } else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
        objCamSoure->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    }
    CLEAR(fmt);
    fmt.type = objCamSoure->type;
    fmt.fmt.pix.width = width;
    fmt.fmt.pix.height = height;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_NV12;
    fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
    fmt.fmt.pix.quantization = V4L2_QUANTIZATION_FULL_RANGE;
    if (-1 == xioctl(objCamSoure->fd, VIDIOC_S_FMT, &fmt)) {
        XAG_LOG_D("VIDIOC_S_FMT FAIL %s \n", node);
        return -1;
    }

    Init_Mmap(objCamSoure, node);

    XAG_LOG_D("%s: v4l2 device ok %s !\n", __func__, node);
    /* stream on */
    // Start_Capturing(objCamSoure);

    return 0;
}

int VideoCapture::RkIspRoutine() {
    int ret;
    const char* iq_dir = IQFILES_PATH;
    ret = SAMPLE_COMM_ISP_Init(CAMERA_FRONT_ID, RK_AIQ_WORKING_MODE_NORMAL,
                               RK_TRUE, iq_dir);
    if (ret) {
        XAG_LOG_W("SAMPLE_COMM_ISP_Init %d FAIL\n", CAMERA_FRONT_ID);
        return -1;
    }
    SAMPLE_COMM_ISP_Run(CAMERA_FRONT_ID);
    ret = SAMPLE_COMM_ISP_Init(CAMERA_BOTTOM_ID, RK_AIQ_WORKING_MODE_NORMAL,
                               RK_TRUE, iq_dir);

    if (ret) {
        XAG_LOG_W("SAMPLE_COMM_ISP_Init %d FAIL\n", CAMERA_BOTTOM_ID);
        return -1;
    }
    SAMPLE_COMM_ISP_Run(CAMERA_BOTTOM_ID);

    //  SAMPLE_COMM_ISP_SetFrameRate(CAMERA_FRONT_ID, 30);
    // SAMPLE_COMM_ISP_SetFrameRate(CAMERA_BOTTOM_ID, 30);

    return ret;
}

void VideoCapture::Rk_Ispv4l2_exit(CamSource_S* objCamSoure) {
    unsigned int i, n_buffers;

    /* mainloop exit */

    /* stream off */
    Stop_Capturing(objCamSoure);

    /* deinit aiq */

    /* release buffers */
    n_buffers = objCamSoure->bufcnt;
    for (i = 0; i < n_buffers; ++i) {
        if (-1 == munmap(objCamSoure->fbuf[n_buffers].start,
                         objCamSoure->fbuf[n_buffers].length))

            close(objCamSoure->fbuf[n_buffers].export_fd);
    }
    free(objCamSoure->fbuf[n_buffers].buffer);

    /* close device */
    if (-1 == close(objCamSoure->fd))

        objCamSoure->fd = -1;
}

int VideoCapture::Rk_Ispv4l2_Get_Frame(CamSource_S* objCamSoure) {
    struct v4l2_buffer buf;
    enum v4l2_buf_type type;

    struct pollfd pfds;
    int ret = -1;

    /* wait (poll) for a frame event */
    pfds.fd = objCamSoure->fd;
    pfds.events = POLLIN;
    pfds.revents = 0;
    ret = poll(&pfds, 1, 100);

    if (ret == 0) {
        return -1;
    } else if (ret < 0) {
        return -2;
    } else {
        // XAG_LOG_I("Frame ready (ret = %d)", ret);
    }

    type = objCamSoure->type;
    buf = (struct v4l2_buffer){0};
    buf.type = type;
    buf.memory = V4L2_MEMORY_MMAP;

    struct v4l2_plane planes[FMT_NUM_PLANES];
    if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type) {
        buf.m.planes = planes;
        buf.length = FMT_NUM_PLANES;
    }

    if (-1 == xioctl(objCamSoure->fd, VIDIOC_DQBUF, &buf)) {
        return -1;
    }

    if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type)
        buf.bytesused = buf.m.planes[0].bytesused;

    objCamSoure->fbuf[buf.index].timestamp = /* NOLINT */
        1000 * buf.timestamp.tv_sec + buf.timestamp.tv_usec / 1000.0;

    //  objCamSoure->bufcnt = buf.sequence;
    objCamSoure->fbuf[buf.index].sequence = buf.sequence;
    objCamSoure->s32CurIndex = buf.index;
    return buf.index;
}

int VideoCapture::Rk_Ispv4l2_Set_frame(CamSource_S* objCamSoure, RK_S32 idx) {
    struct v4l2_buffer buf;
    enum v4l2_buf_type type;

    if (idx < 0)
        return -1;

    type = objCamSoure->type;
    buf = (struct v4l2_buffer){0};
    buf.type = type;
    buf.memory = V4L2_MEMORY_MMAP;
    buf.index = idx;

    struct v4l2_plane planes[FMT_NUM_PLANES];
    if (V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE == type) {
        buf.m.planes = planes;
        buf.length = FMT_NUM_PLANES;
    }

    // Tell kernel it's ok to overwrite this frame
    if (-1 == xioctl(objCamSoure->fd, VIDIOC_QBUF, &buf)) {
        return -1;
    }

    return 1;
}

#if 0
static int32_t _callBackRelease(void* _pArgs) {
    if (nullptr != _pArgs) {
        ReleaseInfo_ST* pInfo = reinterpret_cast<ReleaseInfo_ST*>(_pArgs);

        if (nullptr != pInfo->m_pu8Data) {
            free(pInfo->m_pu8Data);
            pInfo->m_pu8Data = nullptr;
        }
    }
    return 0;
}
#endif

#define DATA_FILL_TOSEND(a, b, c, d, e, f, g, h) \
    {                                            \
        a.m_bFill = true;                        \
        a.m_s16DeviceId = b;                     \
        a.m_eType = c;                           \
        a.m_s32Width = d;                        \
        a.m_s32Height = e;                       \
        a.m_u32DataSize = f;                     \
        a.m_u64Time = g;                         \
        a.m_u32Reserve = 0;                      \
        a.m_pu8Data = h;                         \
    }

void VideoCapture::_GapDebug(int32_t _s32Index, uint64_t _u64Time) {
    if (0 == _s32Index && _u64Time - m_u64Gap > 47 &&
        _u64Time - m_u64Gap < 123) {
        XAG_LOG_W("[down] jump ,gap[%llu]\n", _u64Time - m_u64Gap);
        m_u64Gap = _u64Time;
    } else if (0 == _s32Index) {
        m_u64Gap = _u64Time;
    }

    if (1 == _s32Index && _u64Time - m_u64GapFront > 47 &&
        _u64Time - m_u64GapFront < 123) {
        XAG_LOG_W("[front] jump ,gap[%llu]\n", _u64Time - m_u64GapFront);
        m_u64GapFront = _u64Time;
    } else if (1 == _s32Index) {
        m_u64GapFront = _u64Time;
    }
}

int32_t VideoCapture::SendSyncData(int32_t _s32Index,
                                   int32_t _s32Buf,
                                   uint8_t* _pImu,
                                   uint32_t _u32Size,
                                   uint64_t _u64Time) {
    DataInfo_ST stInfo[2];

    _GapDebug(_s32Index, _u64Time);

    std::lock_guard<std::mutex> uLock(m_GapMutex);
    if (0 == _s32Index && 0 < m_s32DownGap) {
        if (m_s32DownCount >= (m_s32DownGap - 1)) {
            RK_Rga_Resize((unsigned char*)m_objCamSnapSource[_s32Index]
                              .fbuf[_s32Buf]
                              .start,
                          m_objCamSnapSource[_s32Index].fbuf[_s32Buf].export_fd,
                          (unsigned char*)m_vpu_rga_bo.ptr, m_vpu_rga_fd,
                          CAM_BOTTOM_SNAP_IMAGEWIDTH,
                          CAM_BOTTOM_SNAP_IMAGEHEIGH, m_s32DownW, m_s32DownH,
                          1);

            DATA_FILL_TOSEND(stInfo[0], 0, E_TRANS_DATA_YUV420, m_s32DownW,
                             m_s32DownH, m_s32DownW * m_s32DownH * 3 / 2,
                             _u64Time, (unsigned char*)m_vpu_rga_bo.ptr);
            DATA_FILL_TOSEND(stInfo[1], 4, E_TRANS_DATA_IMU, _u32Size, 1,
                             _u32Size, _u64Time, _pImu);
#ifdef TEST_TIME_STOCK
            uint64_t u64Time = 0;
            kapok_common_basic::TimeUtility::GetClockTToMs(u64Time);
            TestWriteToFile(g_objSave, std::string("recv-st"), u64Time);
            TestWriteToFile(g_objSave, std::string("recv-gap"),
                            u64Time - _u64Time);
#endif
            TransferSyncData(stInfo, 2);
#ifdef TEST_TIME_STOCK
            kapok_common_basic::TimeUtility::GetClockTToMs(u64Time);
            TestWriteToFile(g_objSave, std::string("send-end"), u64Time);
            TestWriteToFile(g_objSave, std::string("send-gap"),
                            u64Time - _u64Time);
#endif
            m_s32DownCount = 0;
        } else {
            m_s32DownCount++;
            DATA_FILL_TOSEND(stInfo[0], 4, E_TRANS_DATA_IMU, _u32Size, 1,
                             _u32Size, _u64Time, _pImu);
            TransferSyncData(stInfo, 1);
        }
    } else if (1 == _s32Index && 0 < m_s32FrontGap) {
        if (m_s32FrontCount >= (m_s32FrontGap - 1)) {
            ReleaseInfo_ST stReInfo;

            stReInfo.m_pu8Data = (unsigned char*)m_vpu_rga_bo.ptr;
            //     (uint8_t*)malloc(m_s32FrontW * m_s32FrontH * 3); /* NOLINT */
            RK_Rga_Resize((unsigned char*)m_objCamSnapSource[_s32Index]
                              .fbuf[_s32Buf]
                              .start,
                          m_objCamSnapSource[_s32Index].fbuf[_s32Buf].export_fd,
                          (unsigned char*)m_vpu_rga_bo.ptr, m_vpu_rga_fd,
                          CAM_FRONT_SNAP_IMAGEWIDTH, CAM_FRONT_SNAP_IMAGEHEIGH,
                          m_s32FrontW, m_s32FrontH, 1);
            DATA_FILL_TOSEND(stInfo[0], 1, E_TRANS_DATA_YUV420, m_s32FrontW,
                             m_s32FrontH, m_s32FrontW * m_s32FrontH * 3 / 2,
                             _u64Time, (unsigned char*)m_vpu_rga_bo.ptr);
            DATA_FILL_TOSEND(stInfo[1], 5, E_TRANS_DATA_IMU, _u32Size, 1,
                             _u32Size, _u64Time, _pImu);
            //   s32Ret = TranferAsyncData(stInfo, 1, &stReInfo,
            //                           sizeof(ReleaseInfo_ST),
            //                           _callBackRelease);
            TransferSyncData(stInfo, 2);
            // if (XAG_SUCCESS != s32Ret)
            //       free(stReInfo.m_pu8Data);
            m_s32FrontCount = 0;
        } else {
            m_s32FrontCount++;
            DATA_FILL_TOSEND(stInfo[0], 5, E_TRANS_DATA_IMU, _u32Size, 1,
                             _u32Size, _u64Time, _pImu);
            TransferSyncData(stInfo, 1);
        }
    }
    return 0;
}

int VideoCapture::WorkThread_Snap(void) {
    XAG_LOG_D("WorkThread_Snap\n");
    int i;
    int buf_index;
    m_thdSnapRunFlg = 1;
    m_strImgPath = STD_IMG_SAVEPATH;
    bool bSyncSend = false;
    time_t timep;
    kapok_common_basic::SetThreadName("YuvSnap");

    while (m_thdSnapRunFlg) {
        if ((access(SNAPFLAG_PATH, 0) == 0) || (m_SnapFlag == SNAPTRIGETR)) {
            m_SnapFlag = SNAPING;

            if (g_hwtimer_sync_fd != -1) {
                if ((access(SNAPFLAG_PATH, 0) == 0)) {  // test sanp cmd
                    m_snap_count++;
                    m_triger_flag[0] = 1;
                    m_triger_flag[1] = 1;
                    if (m_snap_count >= 5000)
                        m_snap_count = 0;
                    xioctl(g_hwtimer_sync_fd, SC230AI_SANP_CMD,
                           (void*)m_snap_count); /* NOLINT */
                    XAG_LOG_D("recv_test sanp cmd Snap  Sync %d \n",
                              m_snap_count);
                }
            } else {
                XAG_LOG_D("recv_Snap :%d\n", m_snap_count);
            }
        }

        for (i = 0; i < CMAERA_NUMNER; i++) {
            int try_count = 0;
            APP_FRAME_IMU_SYNC_INFO app_syncifo = {0};
        try_again:
            buf_index = Rk_Ispv4l2_Get_Frame(&m_objCamSnapSource[i]);
            if (buf_index <= 0) {
                Rk_Ispv4l2_Set_frame(&m_objCamSnapSource[i], buf_index);
                if (++try_count <= 3) {
                    goto try_again;
                } else {
                    continue;
                }
            }

#if SYNC_INFO
            if (g_hwtimer_sync_fd != -1) {
                //   XAG_LOG_D("SC230AI_SYNC_QUERY_CMD fsize:%d\n",
                //        sizeof(APP_FRAME_IMU_SYNC_INFO));
                app_syncifo.frame_sequence =
                    m_objCamSnapSource[i].fbuf[buf_index].sequence;
                app_syncifo.frame_timestamp =
                    m_objCamSnapSource[i].fbuf[buf_index].timestamp;
                app_syncifo.type = i;
                time(&timep);
                //     printf("%d\n",timep);
                app_syncifo.frame_systimestamp = last_frame_timestamp[i];
                if (-1 == xioctl(g_hwtimer_sync_fd,
                                 SC230AI_IMU_TIMESTAMP_QUERY_CMD,
                                 &app_syncifo)) {
                    XAG_LOG_D("SC230AI_SYNC_QUERY_CMD fail\n");
                    last_frame_systimestamp[i] = +40;  // fix
                    app_syncifo.frame_systimestamp =
                        last_frame_systimestamp[i];  // fix
                } else {
                    unsigned long long timems = 0; /* NOLINT */
                    kapok_common_basic::TimeUtility::GetClockTToMs(timems);
                    if (i == 1 && app_syncifo.frame_sequence == 100) {
                        Start_Capturing(&m_objCamSnapSource[CAMERA_BOTTOM_ID],
                                        CAM_BOTTOM_SNAP_DEVNODE, 1);
                    }

                    /* if (i == 0 || i == 1) {
                     XAG_LOG_D(
                         "cam :[%d],frame "
                         " sys time:%llu \n",
                          i,app_syncifo.frame_systimestamp);
                     }*/
                    // if ( i == 1 || i == 0) {
                    //          XAG_LOG_D(
                    //              "cam :[%d],frame "
                    //               "sys time:%llu\n",
                    //              i,  app_syncifo.frame_systimestamp);
                    // XAG_LOG_D("imu f count :%d\n",
                    //  app_syncifo.imu_count);
                    //     for(int i = 0; i<app_syncifo.imu_count;i++) {

                    //         XAG_LOG_D( "imu f time :%llu\n",
                    //         app_syncifo.imu_frontdata[i].sys_timestamp);
                    //     }
                    //          //    last_time[i] = timems;
                    // }
                    /*   XAG_LOG_D("sizeof:%d\n",sizeof(APP_FRAME_IMU_SYNC_INFO));
                         XAG_LOG_D(
                                "cam :[%d],frame "
                             "id:%d,v4times:%lld,timesptame:%lld,sys_"
                              "timesptame:%llu imu time :%llu,imu front gyro"
                            "x:0x%x:0x%x,z:0x%x, imu bottom gyrox:0x%x "
                              ",y: 0x%x, z:0x%x,imu front acc x "
                              ":0x%x,y:0x%x, z:0x%x,imubottom "
                               "accx:0x%x,y:0x%x,z:0x%x,sizeof:%d\n",
                                i, app_syncifo.frame_sequence,
                            m_objCamSnapSource[i].fbuf[buf_index].timestamp,
                               app_syncifo.frame_timestamp,
                              app_syncifo.frame_systimestamp,
                            app_syncifo.imu_frontdata[10].sys_timestamp,
                              app_syncifo.imu_frontdata[10].gyro.x,
                              app_syncifo.imu_frontdata[10].gyro.y,
                               app_syncifo.imu_frontdata[10].gyro.z,
                              app_syncifo.imu_bottomdata[10].gyro.x,
                           app_syncifo.imu_bottomdata[10].gyro.y,
                              app_syncifo.imu_bottomdata[10].gyro.z,
                               app_syncifo.imu_frontdata[10].acc.x,
                             app_syncifo.imu_frontdata[10].acc.y,
                            app_syncifo.imu_frontdata[10].acc.z,
                             app_syncifo.imu_bottomdata[10].acc.x,
                                app_syncifo.imu_bottomdata[10].acc.y,
                              last_frame_systimestamp[i],
                               app_syncifo.imu_bottomdata[10].acc.z,
                                sizeof(APP_FRAME_IMU_SYNC_INFO));*/
                    last_frame_systimestamp[i] = app_syncifo.frame_systimestamp;
                    last_frame_timestamp[i] = app_syncifo.frame_timestamp;
                }
            }
#endif

#ifdef SEND_YUV_TODEAL_TEST
            SendSyncData(i, buf_index, reinterpret_cast<uint8_t*>(&app_syncifo),
                         sizeof(APP_FRAME_IMU_SYNC_INFO),
                         app_syncifo.frame_systimestamp);
#endif

#if RK_SAVE_JPEG
            ProcessSaveImg(i, buf_index, m_snap_count, app_syncifo);
#endif
            /*   XAG_LOG_D(
                     "WorkThread_Snap_Get_Frame cam id:
               %d,frmid:%u,index:%d,timesp "
                     ": % lld \n ",
                     i,
               m_objCamSnapSource[i].fbuf[buf_index].sequence,
               buf_index,
               m_objCamSnapSource[i].fbuf[buf_index].timestamp);*/
            if (!bSyncSend) {
                int ret =
                    Rk_Ispv4l2_Set_frame(&m_objCamSnapSource[i], buf_index);
                if (ret < 0) {
                    // XAG_LOG_D("WorkThread_Snap_Get_Frame fail %d\n",
                    // i);
                }
            }
            bSyncSend = false;
        }
#if RK_SAVE_JPEG
        if (m_SnapFlag == SNAPING) {
            m_SnapFlag = SNAPWAIT; /* NOLINT */
            remove(SNAPFLAG_PATH);
            sync();
        }
#endif
    }
    XAG_LOG_W("work thread exit!");
    return 0;
}

int VideoCapture::Snap(const std::string& strImgPath, int mode) {
    if (strImgPath.empty()) {
        m_strImgPath = "/userdata/pic/";
        mkdir(m_strImgPath.c_str(), 777);
        //  XAG_LOG_W("Save path is empty, Use default path : %s",
        //  strSaveFileName.c_str());
        // return false;
    } else {
        m_strImgPath = strImgPath;
    }

    m_SnapFlag = SNAPTRIGETR;

    if (g_hwtimer_sync_fd != -1) {
        m_hwtimer_snap_flag = 0;

        m_snap_count++;
        m_triger_flag[0] = 1;
        m_triger_flag[1] = 1;
        if (m_snap_count >= 5000)
            m_snap_count = 0;
        if (mode == SANP_ASYNCMODE) {
            m_MapAsyncSnapInfomode.insert(
                std::make_pair(m_snap_count, m_AsyncSnapInfmode)); /* NOLINT */
            //      xioctl(g_hwtimer_sync_fd, SC230AI_SANP_CMD,
            //           (void*)m_snap_count); /* NOLINT */
            XAG_LOG_D("recv_Snap ASync %d \n", m_snap_count);

        } else {
            //  xioctl(g_hwtimer_sync_fd, SC230AI_SANP_CMD,
            //       (void*)m_snap_count); /* NOLINT */
            XAG_LOG_D("recv_Snap Sync %d \n", m_snap_count);
        }
    }
    int timeout = 0;
    while (1) {
        if (g_hwtimer_sync_fd != -1 && m_hwtimer_snap_flag >= 2) {
            break;
        } else if (m_SnapFlag == SNAPWAIT) {
            break;
        }

        if (timeout >= 20) {
            XAG_LOG_D("sanp timeout\n");
            return false;
        }
        usleep(1000000);
        timeout++;
    }
    return true;
}

bool VideoCapture::AsyncSnap(AsyncSnapInfo_ST& _rInfo) {
    m_AsyncSnapInfmode.async_Info = _rInfo;
    m_AsyncSnapInfmode.async_flag = 0;

    return Snap("", SANP_ASYNCMODE);
}

int32_t VideoCapture::ControlSensorProperty(SensorProperty_ST& _rProperty) {
    struct v4l2_control ctrl;
    int id = 0, ret = -1;
    switch (_rProperty.m_eCmd) {
        case PropertyCtrl::E_CTRL_SYNCGAP: {
            std::lock_guard<std::mutex> uLock(m_GapMutex);
            m_s32DownGap = _rProperty.unProperty.m_stGap[0].m_s32Down;
            m_s32FrontGap = _rProperty.unProperty.m_stGap[0].m_s32Front;
        } break;

        case PropertyCtrl::E_SET_SENSOR_EXP: {
            //   XAG_LOG_D("\n_rProperty.m_s32Size:%d,\n",
            //   _rProperty.m_s32Size);
            for (int i = 0; i < _rProperty.m_s32Size; i++) {
                if (i >= CMAERA_NUMNER)
                    break;
                ctrl.id = V4L2_CID_EXPOSURE_NEWMAX;
                ctrl.value = _rProperty.unProperty.m_stInfo[i].m_s32Exp;
                id = _rProperty.unProperty.m_stInfo[i].m_s32SensorId;
                if (ctrl.value > 0)
                    ret = ioctl(m_objCamSnapSource[id].subfd, VIDIOC_S_CTRL,
                                &ctrl);
                if (ret < 0) {
                    ;  // XAG_LOG_D("set exposure auto Type failed\n"); /* // NOLINT */
                }
                if (_rProperty.unProperty.m_stInfo[i].m_s32ExpType >=
                    1) {  // manunl exp Type
                    ctrl.id = V4L2_CID_EXPOSURE_NEWMIN;
                    ctrl.value = _rProperty.unProperty.m_stInfo[i].m_s32Exp;
                    id = _rProperty.unProperty.m_stInfo[i].m_s32SensorId;
                    ret = ioctl(m_objCamSnapSource[id].subfd, VIDIOC_S_CTRL,
                                &ctrl);

                    ctrl.id = V4L2_CID_GAIN_NEWMAX;
                    ctrl.value = 1;
                    id = _rProperty.unProperty.m_stInfo[i].m_s32SensorId;
                    ret = ioctl(m_objCamSnapSource[id].subfd, VIDIOC_S_CTRL,
                                &ctrl);

                    XAG_LOG_D("\nid :%d Set Exposure manunl Type:[%d]\n", id,
                              ctrl.value);
                } else {
                    XAG_LOG_D("\nid :%d Set Exposure Auto Type:[%d]\n", id,
                              ctrl.value);
                }
            }
        } break;
        case PropertyCtrl::E_SET_GAIN_EXP: {
            for (int i = 0; i < _rProperty.m_s32Size; i++) {
                if (i >= CMAERA_NUMNER)
                    break;
                ctrl.id = V4L2_CID_GAIN_NEWMAX;
                ctrl.value = _rProperty.unProperty.m_stInfo[i].m_s32AGain;
                id = _rProperty.unProperty.m_stInfo[i].m_s32SensorId;
                if (ret < 0) {
                    XAG_LOG_D("set gain auto Type failed\n");
                }
                XAG_LOG_D("\nid : %d Set Gain Auto Type:[%d]\n", i, ctrl.value);
            }
        } break;
        case PropertyCtrl::E_SET_DATA_SAVE: {
            XAG_LOG_D("\nSet Save Type:\n");
            for (int i = 0; i < _rProperty.m_s32Size; i++) {
                if (i >= CMAERA_NUMNER)
                    break;
                id = _rProperty.unProperty.m_stSave[i].m_s32SensorId;
                m_snapyuv_savefps[id] =
                    _rProperty.unProperty.m_stSave[i].m_s32Gap;
                XAG_LOG_D("\nid : %d Set Save Type:[%d]\n", id,
                          m_snapyuv_savefps[id]);
            }
        } break;

        case PropertyCtrl::E_SET_RESOLUTION: {
            std::lock_guard<std::mutex> uLock(m_GapMutex);
            for (int i = 0; i < _rProperty.m_s32Size; i++) {
                if (i >= CMAERA_NUMNER)
                    break;
                if (0 == _rProperty.unProperty.m_stRes[i].m_s32SensorId) {
                    m_s32DownW = (_rProperty.unProperty.m_stRes[i].m_s32W <
                                          CAM_BOTTOM_SNAP_IMAGEWIDTH
                                      ? _rProperty.unProperty.m_stRes[i].m_s32W
                                      : CAM_BOTTOM_SNAP_IMAGEWIDTH);
                    m_s32DownH = (_rProperty.unProperty.m_stRes[i].m_s32H <
                                          CAM_BOTTOM_SNAP_IMAGEHEIGH
                                      ? _rProperty.unProperty.m_stRes[i].m_s32H
                                      : CAM_BOTTOM_SNAP_IMAGEHEIGH);
                    XAG_LOG_D("Down Set resolution:[%d x %d]\n", m_s32DownW,
                              m_s32DownH);
                } else if (1 ==
                           _rProperty.unProperty.m_stRes[i].m_s32SensorId) {
                    m_s32FrontW = (_rProperty.unProperty.m_stRes[i].m_s32W <
                                           CAM_FRONT_SNAP_IMAGEWIDTH
                                       ? _rProperty.unProperty.m_stRes[i].m_s32W
                                       : CAM_FRONT_SNAP_IMAGEWIDTH);
                    m_s32FrontH = (_rProperty.unProperty.m_stRes[i].m_s32H <
                                           CAM_FRONT_SNAP_IMAGEHEIGH
                                       ? _rProperty.unProperty.m_stRes[i].m_s32H
                                       : CAM_FRONT_SNAP_IMAGEHEIGH);
                    XAG_LOG_D("Front Set resolution:[%d x %d]\n", m_s32FrontW,
                              m_s32FrontH);
                }
            }
        } break;

        default:
            break;
    }

    return 0;
}

#if 0
int32_t VideoCapture::SetSyncGap(int32_t _s32Down, int32_t _s32Front) {
    std::lock_guard<std::mutex> uLock(m_GapMutex);
    m_s32DownGap = _s32Down;
    m_s32FrontGap = _s32Front;
    return 0;
}
#endif

int VideoCapture::RK_Rga_Resize(unsigned char* src_buf,
                                int fd,
                                unsigned char* dst_buf,
                                int dst_fd,
                                int width,
                                int height,
                                int _s32DestW,
                                int s32DestH,
                                int warpfd) {
    //  XAG_LOG_D("RGA Resize handle\n");
    int ret;
    im_rect src_rect;
    im_rect tmp_rect;
    im_rect dst_rect;

    rga_buffer_t src;
    rga_buffer_t dst;

    memset(&src_rect, 0, sizeof(src_rect));
    memset(&dst_rect, 0, sizeof(tmp_rect));
    memset(&dst_rect, 0, sizeof(dst_rect));

    // src =
    // wrapbuffer_virtualaddr(src_buf, width, height, RK_FORMAT_YCbCr_420_SP);
    src = wrapbuffer_fd(fd, width, height, RK_FORMAT_YCbCr_420_SP);
    if (warpfd == 1) {
        dst = wrapbuffer_virtualaddr(dst_buf, _s32DestW, s32DestH,
                                     RK_FORMAT_YCbCr_420_SP);
    } else {
        dst =
            wrapbuffer_fd(dst_fd, _s32DestW, s32DestH, RK_FORMAT_YCbCr_420_SP);
    }
    if (src.width == 0 || dst.width == 0) {
        XAG_LOG_D("%s\n", imStrError());
        return -1;
    }

    src.format = RK_FORMAT_YCbCr_420_SP;
    dst.format = RK_FORMAT_YCbCr_420_SP;
    ret = imcheck(src, dst, src_rect, dst_rect);
    if (IM_STATUS_NOERROR != ret) {
        XAG_LOG_D("%d, check error! %s", __LINE__, imStrError((IM_STATUS)ret));
        return -1;
    }

    // imcvtcolor(src, dst, src.format, dst.format);
    imresize(src, dst);
    // imrotate(tmp, dst, 180);

    return 1;
}

int VideoCapture::ProcessSaveImg(int i,
                                 int buf_index,
                                 unsigned int snap_count,
                                 APP_FRAME_IMU_SYNC_INFO& syncinfo) {  // NOLINT
#ifdef RK_SAVE_YUV
    if (m_snapyuv_savefps[i] != 0) {
        if (syncinfo.sync == 0) {
            XAG_LOG_I("skip frame %ld\n",
                      m_objCamSnapSource[i].fbuf[buf_index].sequence);
            return 0;
        }
        if (m_objCamSnapSource[i].fbuf[buf_index].sequence %
                m_snapyuv_savefps[i] ==
            0) {
            int32_t s32W = (i == 0 ? m_s32DownW : m_s32FrontW);
            int32_t s32H = (i == 0 ? m_s32DownH : m_s32FrontH);

            RK_Rga_Resize((unsigned char*)m_objCamSnapSource[i]
                              .fbuf[buf_index]
                              .start,  // NOLINT
                          m_objCamSnapSource[i].fbuf[buf_index].export_fd,
                          m_s8Yuvbuffer, m_vpu_rga_fd,
                          CAM_FRONT_SNAP_IMAGEWIDTH, CAM_FRONT_SNAP_IMAGEHEIGH,
                          s32W, s32H, 1);

            strYuvImgPath[i] = m_strImgPath;
            strYuvImgPath[i] = strYuvImgPath[i] + "XAG_" +
                               std::to_string(syncinfo.frame_systimestamp) +
                               "_" + std::to_string(i) + ".yuv";
            FILE* fp_yuv = fopen(strYuvImgPath[i].c_str(), "w");
            if (fp_yuv) {
                fwrite(
                    //  (unsigned
                    //  char*)m_objCamSnapSource[i].fbuf[buf_index].start,
                    m_s8Yuvbuffer, s32W * s32H * 3 / 2, 1, fp_yuv);
                fflush(fp_yuv);
                XAG_LOG_I("snap yuv ok, save to %s\n",
                          strYuvImgPath[i].c_str());
                fclose(fp_yuv);
            }
        }
    }
#endif

#if RK_SAVE_JPEG
    // 判断是否拍照帧
    if (m_SnapFlag == SNAPING || m_triger_flag[i] != 0) {
        // 判断是命令触发,同步模式下是否拍照帧
        /*if (g_hwtimer_sync_fd != -1) {
            if (syncinfo.framemode == 0) {
                if (m_triger_flag[i] == 0) {
                    return 0;
                }
            }
        }*/
        // strImgPath[i].clear();
        // strYuvImgPath[i].clear();
        strImgPath[i] = m_strImgPath;
        strYuvImgPath[i] = m_strImgPath;
        // 生成照片名字
        /*if (i == 0) {
            m_objStdFileNnamePro.ProduceFileName(
                0, kapok_functionals::SaveFileType::E_SAVE_JPG, strImgPath[0],
                STD_IMG_SAVEPATH);
            m_objYuvFileNnamePro.ProduceFileName(
                0, kapok_functionals::SaveFileType::E_SAVE_YUV, strImgPath[0],
                STD_IMG_SAVEPATH);
            m_objStdFileNnamePro.ProduceFileName(
                1, kapok_functionals::SaveFileType::E_SAVE_JPG, strImgPath[1],
                STD_IMG_SAVEPATH);
            m_objYuvFileNnamePro.ProduceFileName(
                1, kapok_functionals::SaveFileType::E_SAVE_YUV, strImgPath[1],
                STD_IMG_SAVEPATH);
        }*/

        XAG_LOG_D(
            "WorkThread_Snap cam id: %d,frmid:%u,index:%d "
            "timesp :%lu,systime:%lu,\n",
            i, m_objCamSnapSource[i].fbuf[buf_index].sequence, buf_index,
            1000 * m_objCamSnapSource[i].fbuf[buf_index].timestamp,
            syncinfo.frame_systimestamp);
        strImgPath[i] = strImgPath[i] + "XAG_" + std::to_string(snap_count) +
                        "_" + std::to_string(i) + ".jpg";

        strYuvImgPath[i] = strYuvImgPath[i] + "XAG_" +
                           std::to_string(snap_count) + "_" +
                           std::to_string(i) + ".yuv";
        /* RK_Rga((unsigned char*)m_objCamSnapSource[i].fbuf[buf_index].start,
                 m_s8Jpegbuffer, CAM_FRONT_SNAP_IMAGEWIDTH,
                 CAM_FRONT_SNAP_IMAGEHEIGH);*/

        /*        int ret = kapok_functionals::TurboJpeg::CompressFromMem(
                   strImgPath[i].c_str(), m_s8Jpegbuffer, 0, 100,
                   CAM_FRONT_SNAP_IMAGEWIDTH, CAM_FRONT_SNAP_IMAGEHEIGH);
                   if (ret != 0) {
                        XAG_LOG_E("snap jpeg error, save %s error\n",
                           strImgPath[i].c_str());
                     } else {
                          XAG_LOG_I("snap jpeg ok, save to %s\n",
     strImgPath[i].c_str());
                     }*/
        int size = 0;
        if (i == 0) {
            size =
                CAM_BOTTOM_SNAP_IMAGEWIDTH * CAM_BOTTOM_SNAP_IMAGEHEIGH * 3 / 2;

        } else {
            size =
                CAM_FRONT_SNAP_IMAGEWIDTH * CAM_FRONT_SNAP_IMAGEHEIGH * 3 / 2;
        }

        /*    RK_Rga((unsigned
           char*)m_objCamSnapSource[i].fbuf[buf_index].start, m_s8Yuvbuffer,
           CAM_FRONT_SNAP_IMAGEWIDTH, CAM_FRONT_SNAP_IMAGEHEIGH);*/
        kapok_functionals::TurboJpeg::NV12_T_RGB(
            CAM_FRONT_SNAP_IMAGEWIDTH, CAM_FRONT_SNAP_IMAGEHEIGH,
            (unsigned char*)m_objCamSnapSource[i].fbuf[buf_index].start,
            (unsigned char*)m_s8Yuvbuffer);
        int ret = kapok_functionals::TurboJpeg::CompressFromMem(
            strImgPath[i].c_str(), m_s8Yuvbuffer, 0, 90,
            CAM_FRONT_SNAP_IMAGEWIDTH, CAM_FRONT_SNAP_IMAGEHEIGH);
        if (ret != 0) {
            XAG_LOG_E("snap jpeg error, save %s error\n",
                      strImgPath[i].c_str());
        } else {
            XAG_LOG_I("snap jpeg ok, save to %s\n", strImgPath[i].c_str());
        }
        sync();
        // vpu_encode_jpeg_done(&m_vpu_encode);
        if (g_hwtimer_sync_fd != -1) {
            SanpSyncInfo_S sanpsync_info;  // NOLINT
            bzero(&sanpsync_info, sizeof(sanpsync_info));
            memcpy(sanpsync_info.filepath, strImgPath[i].c_str(),
                   strlen(strImgPath[i].c_str()));

            sanpsync_info.filepath[strlen(strImgPath[i].c_str())] = 0;
            XAG_LOG_I("%d snap jpeg ok, save to %s size:%d\n", i,
                      sanpsync_info.filepath, strlen(strImgPath[i].c_str()));
            sanpsync_info.imu_sync_info = syncinfo;
            sanpsync_info.imu_sync_info.snap_count = m_snap_count;
            m_SnapSyncList.push_back(sanpsync_info);
            XAG_LOG_D("snap sync  seq: %d size:%d m_triger_flag[i]:%d\n",
                      sanpsync_info.imu_sync_info.snap_count,
                      m_SnapSyncList.size(), m_triger_flag[i]);
            m_hwtimer_snap_flag++;
            m_triger_flag[i] = 0;
        }
    }
#endif

    return 1;
}

int VideoCapture::Thermal_Update() {
    FILE* fp = NULL;
    int buff;
    fp = fopen(THERMAL_ZONE_MARK, "r");
    if (fp == NULL) {
        return -1;
    }

    fscanf(fp, "%d", &buff);
    fclose(fp);
    float thermal = buff / 1000.0;

    XAG_LOG_D("thermal:%f\n", thermal);

    return 1;
}

int VideoCapture::WorkThread_SnapSync(void) {
    m_thdSnapSyncRunFlg = 1;
    int try_count = 0, i = 0, thermal_count = 0;
    SanpSyncInfo_S snap_syncinfo;
    std::list<SanpSyncInfo_S>::iterator iter_Sync;
    kapok_functionals::EXIF2_EXIF_GPS_S stGPS;
    kapok_functionals::EXIF2_XMP_S stXMP;
    kapok_functionals::EXIF2_EXIF_IFD0_S stIFD0;
    // kapok_functionals::EXIF2_EXIF_IFD1_S stIFD1;
    kapok_functionals::EXIF2_EXIF_EXIFIFD_S stExifIFD;
    std::map<int, ASanpSyncInfoFlag_S>::iterator asyncino_iter;

    kapok_common_basic::SetThreadName("SnapSync");

    while (m_thdSnapSyncRunFlg) {
        // XAG_LOG_D("snap sync %d\n", m_SnapSyncList.size());
        if (!m_SnapSyncList.empty()) {
            iter_Sync = m_SnapSyncList.begin();
            snap_syncinfo = *iter_Sync;

            try_count = 0;
        try_c:
            if (access(snap_syncinfo.filepath, F_OK) == 0) {
                // EXIF DATA
                sprintf(stIFD0.ImageDescription, "%s",              // NOLINT
                        "XAG Geography");                           // NOLINT
                sprintf(stIFD0.Make, "%s", "XAG");                  // NOLINT
                sprintf(stIFD0.Model, "%s", "Xcam2000DC02");        // NOLINT
                sprintf(stIFD0.Software, "%s", "Version-6.0.2");    // NOLINT
                sprintf(stIFD0.Copyright, "%s", "Copyright 2023");  // NOLINT
                sprintf(stExifIFD.FocalLength, "%s",                // NOLINT
                        (const char*)"2.3");                        // NOLINT
                sprintf(stExifIFD.FNumber, "%s", "2.8");            // NOLINT
                stExifIFD.FocalLength35mmFilm = 13;
                stExifIFD.ExifImageWidth = 960;
                stExifIFD.ExifImageHeight = 540;
                sprintf(stIFD0.Copyright, "%s", "Copyright 2023");  // NOLINT
                stIFD0.Orientation = 1;       // 1:horizontal(normal)\2:mirror
                                              // horizontal \3:rotate 180
                stIFD0.ResolutionUnit = 2;    // 1:none \2:inches \3:cm
                stIFD0.YCbCrPositioning = 1;  // 1:centered \2:co-sited

                // m_SnapSyncInfo.Exiv2Adapter_EXIFandXmp_save(
                //   i, snap_syncinfo.filepath, &stIFD0, &stExifIFD, &stGPS,
                // NULL, &stXMP, NULL);
                XAG_LOG_D("snap sync ok%s\n",
                          snap_syncinfo.filepath);  // NOLINT
            } else {
                if (try_count < 3) {
                    usleep(500000);
                    goto try_c;
                }
                XAG_LOG_D("snap sync timeout %d\n", i);
            }
            // 根据拍照序号查询是否要异步回传
            asyncino_iter = m_MapAsyncSnapInfomode.find(
                snap_syncinfo.imu_sync_info.snap_count);
            if (asyncino_iter != m_MapAsyncSnapInfomode.end()) {
                int id = snap_syncinfo.imu_sync_info.type;
                int seq = snap_syncinfo.imu_sync_info.snap_count;

                XAG_LOG_D("find async [%d] snap info seq:%d\n", id,
                          snap_syncinfo.imu_sync_info.snap_count);

                m_MapAsyncSnapInfomode[seq].async_flag += 1;

                m_MapAsyncSnapInfomode[seq].async_filepath[id] =
                    std::string(snap_syncinfo.filepath);
                if (m_MapAsyncSnapInfomode[seq].async_flag >= CMAERA_NUMNER) {
                    std::vector<CalibSnapData_S> Calibsnap; /* NOLINT */
                    CalibSnapData_S calib;
                    calib.m_s32SensorId = 0;
                    calib.strJpegPath =
                        m_MapAsyncSnapInfomode[seq].async_filepath[0];
                    Calibsnap.push_back(calib);
                    calib.m_s32SensorId = 1;
                    calib.strJpegPath =
                        m_MapAsyncSnapInfomode[seq].async_filepath[1];
                    Calibsnap.push_back(calib);
                    XAG_LOG_D(
                        "push snap info seq:%d filepath0:%s filepath1:%s \n",
                        seq,
                        m_MapAsyncSnapInfomode[seq].async_filepath[0].c_str(),
                        m_MapAsyncSnapInfomode[seq].async_filepath[1].c_str());

                    XAG_LOG_D("\nm_u32Id: %d:bReply%d,\n",
                              m_MapAsyncSnapInfomode[seq].async_Info.m_u32Id,
                              m_MapAsyncSnapInfomode[seq].async_Info.m_bReply);
                    asyncino_iter->second.async_Info.m_pCall(
                        m_MapAsyncSnapInfomode[seq].async_Info.m_u32Id,
                        m_MapAsyncSnapInfomode[seq].async_Info.m_bReply,
                        Calibsnap);
                    XAG_LOG_D(
                        "push snap info seq:%d filepath0:%s filepath1:%s \n",
                        seq,
                        m_MapAsyncSnapInfomode[seq].async_filepath[0].c_str(),
                        m_MapAsyncSnapInfomode[seq].async_filepath[1].c_str());
                    m_MapAsyncSnapInfomode.erase(asyncino_iter);
                }
            }

            usleep(5000);

            m_SnapSyncList.pop_front();
        }
        usleep(500000);
        if (++thermal_count >= 120) {
            thermal_count = 0;
            Thermal_Update();
        }
    }

    return 1;
}

int VideoCapture::RK_Rga(unsigned char* src_buf,
                         unsigned char* dst_buf,
                         int width,
                         int height) {
    XAG_LOG_D("RGA handle\n");
    int ret;
    im_rect src_rect;
    im_rect tmp_rect;
    im_rect dst_rect;

    rga_buffer_t src;
    rga_buffer_t tmp;
    rga_buffer_t dst;

    memset(&src_rect, 0, sizeof(src_rect));
    memset(&dst_rect, 0, sizeof(tmp_rect));
    memset(&dst_rect, 0, sizeof(dst_rect));

    unsigned char* tmp_buf = (unsigned char*)malloc(width * height * 4);
    if (tmp_buf == NULL) {
        return -1;
    }
    src =
        wrapbuffer_virtualaddr(src_buf, width, height, RK_FORMAT_YCbCr_420_SP);
    tmp =
        wrapbuffer_virtualaddr(tmp_buf, width, height, RK_FORMAT_YCbCr_420_SP);
    dst = wrapbuffer_virtualaddr(dst_buf, width, height, RK_FORMAT_RGB_888);
    if (src.width == 0 || dst.width == 0 || tmp.width == 0) {
        XAG_LOG_D("%s\n", imStrError());
        goto exit;
    }

    src.format = RK_FORMAT_YCbCr_420_SP;
    tmp.format = RK_FORMAT_RGB_888;
    dst.format = RK_FORMAT_RGB_888;
    ret = imcheck(src, dst, src_rect, dst_rect);
    if (IM_STATUS_NOERROR != ret) {
        XAG_LOG_D("%d, check error! %s", __LINE__, imStrError((IM_STATUS)ret));
        if (tmp_buf != NULL)
            free(tmp_buf);
        return -1;
    }

    imcvtcolor(src, dst, src.format, dst.format);

    // imrotate(tmp, dst, 180);

exit:
    if (tmp_buf != NULL)
        free(tmp_buf);
    return 1;
}

}  // namespace kapok_hardware_rv1126
