#include "MlCamera.h"

namespace android {
namespace hardware {
namespace camera {
namespace device {
namespace V3_4 {
namespace implementation {

MlCamera*  MlCamera::topcam = nullptr;
MlCamera*  MlCamera::slidecam = nullptr;
MlCamera*  MlCamera::facecam = nullptr;


MlCamera::MlCamera() {
    mPreviewWindow = NULL;
    mFrameCallbackObj = NULL;
    mRequestBandwidth= DEFAULT_BANDWIDTH;
    mFrameWidth = DEFAULT_PREVIEW_WIDTH;
    mFrameHeight = DEFAULT_PREVIEW_HEIGHT;
    frameBytes = DEFAULT_PREVIEW_WIDTH * DEFAULT_PREVIEW_HEIGHT * 2;	// YUYV

    /** preview window 相关**/
    previewBytes = DEFAULT_PREVIEW_WIDTH * DEFAULT_PREVIEW_HEIGHT * PREVIEW_PIXEL_BYTES;
    previewFormat = WINDOW_FORMAT_RGBA_8888;
    mIsRunning = false;
    mPreviewCnt = 0;
    mMaxWaitTime = 0;
    mMaxIOCTLTime = 0;
    mJNICALLTime = 0;
    /**
     * init  mutex and cond
     */
    pthread_cond_init(&preview_sync, NULL);
    pthread_mutex_init(&preview_mutex, NULL);
    pthread_mutex_init(&callback_mutex, NULL);

    /** hwmjpeg init
     * 硬解 初始化
     */
    if (!mDecoder.prepareDecoder()) {
        LOGE("failed to prepare JPEG decoder");
    };

}


MlCamera::~MlCamera() {
    mIsRunning = false;

    if (mPreviewWindow)
        ANativeWindow_release(mPreviewWindow);
    mPreviewWindow = NULL;

    clearPreviewFrame();
    clear_pool();

    pthread_mutex_destroy(&preview_mutex);
    pthread_cond_destroy(&preview_sync);
    pthread_mutex_destroy(&callback_mutex);

}

int MlCamera::cameraInit(int camSeq) {
    char *ptr = NULL;
    DIR *dp;
    struct dirent *direntp;
    int found = 0;
    int rv = -1;
    FILE *fd;
    char buf[32];
    char videoname[64];
    char temp_path[64]="/sys/class/video4linux/";
    int pos = -1;


    if(camSeq <1 || camSeq >4)
    {
        LOGE("invalid camera Seq!!!");
        return -1;
    }

    switch (camSeq) {
        case TOPCAM:
            mProductName = TOPCAMNAME;
            break;
        case SLIDECAM:
            mProductName = SLIDECAMNAME;
            break;
        case FACECAM:
            mProductName = FACECAMNAME;
            break;
        case MICROCAM:
            mProductName = MICROCAMNAME;
            break;
        default:
            break;
    }

    dp= opendir(MLCAMNODEPATH);

    if(dp == NULL)
    {
        LOGE("open path error:%s\n",strerror(errno));
        return -1;
    }

    LOGD("open file dir:%s success,dp:%p\n",MLCAMNODEPATH,dp);

    do{
        direntp = readdir(dp);

        if(direntp ==NULL)
            continue;

        /**
         * 读取/sys/class/video4linux/ 此目录下的文件名  先匹配文件名 匹配文件名中包含 “video” 的文件名 通过found 表示 找到文件
         * 拼接文件，然后打开文件 读取文件内容  找摄像头名字  同时小的节点是capture  大的节点是out
         */

        if(strstr(direntp->d_name,"video")) {
            LOGD("contain video  PATH :%s \n",direntp->d_name);
            strcpy(videoname, direntp->d_name);
            LOGD("videoname :%s \n",videoname);

            CLEAR(temp_path);
            memcpy(temp_path,MLCAMNODEPATH,sizeof(MLCAMNODEPATH) );
            LOGD("MLCAMNODEPATH  temp_path :%s \n",temp_path);
            strncat(temp_path, videoname, sizeof(temp_path));
            LOGD("add videoname temp_path :%s \n",temp_path);
            strncat(temp_path, "/name", sizeof(temp_path));
            LOGD("add /name temp_path :%s \n",temp_path);

            LOGD("after cat  file name:%s\n", temp_path);
            fd = fopen(temp_path, "rb");

            if(fd){
                rv = fread(buf, sizeof(buf),1,fd);
                LOGD("fread contest %s  size:%d",buf,rv);
            }else{
                LOGE("fread failed");
                return -1;
            }

            if (rv < 0) {
                LOGE("read error:%s\n", strerror(errno));
                return -1;
            }

            fclose(fd);

            LOGD("find camera/class  node name :%s\n", buf);
            string bufstr(buf);
            string pname(mProductName);
            //ptr = strstr(buf, mProductName);
            pos = bufstr.find(pname);

            if(pos >= 0) {
                char num[3]={};
                strncpy(num, videoname+5, strlen(videoname)-5);
                if(atoi(num)%2 ==1){
                    //sprintf(mCamDevName,"/dev/%s",videoname);
                    mCamDevName= "/dev/";
                    mCamDevName.append(videoname);
                    LOGD("camera node Name is  %s", mCamDevName.c_str());

                    if(camSeq == TOPCAM )
                    {
                        mTop = true;
                    } else if(camSeq ==SLIDECAM)
                    {
                        mSlide = true;
                    } else if(camSeq == FACECAM)
                    {
                        mFace = true;
                    }else
                    {

                    }

                    break;
                }
            } else{
                LOGD("not support %s", videoname);
            }

        }else{
            LOGD(" NO VIDEO   File node :%s \n",direntp->d_name);
        }

    }while(direntp != NULL);

    if(mTop|| mSlide || mFace)
    {
        return 0;
    }else
    {
        return -1;
    }

}

int MlCamera::openCAM() {
    int camFd = open(mCamDevName.c_str(), O_RDWR);
    if(camFd<0){
        LOGE("open %s failed,erro=%s",mCamDevName.c_str(),strerror(errno));
        return  -1;
    }

    LOGD("open %s success,fd=%d",mCamDevName.c_str(),camFd);
    mCAMFD = camFd;

    return 0;
}

int MlCamera::set_format(int pixelformat, int width, int height) {

    int ret = -1;
    struct v4l2_format format;

    LOGD("width:height=%dx%d,pixelformat=0x%x,fd=%d",width,height,pixelformat,mCAMFD);

    memset(&format, 0, sizeof(struct v4l2_format));
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    format.fmt.pix.width = width;
    format.fmt.pix.height = height;
    format.fmt.pix.field = V4L2_FIELD_ANY;
    format.fmt.pix.pixelformat = pixelformat;
    mFramePixelFormat = pixelformat;
    mWidth = width;
    mHeight = height;

    ret = ioctl(mCAMFD, VIDIOC_S_FMT, &format);
    if (ret < 0) {
        LOGE("ioctl(VIDIOC_S_FMT) failed %d(%s)", errno, strerror(errno));
        return -1;
    }

    return ret;
}



int MlCamera::init_buffer() {

    u8 i;
    int ret;
    struct v4l2_requestbuffers req;
    req.count = BUFFER_COUNT;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;
    ret = ioctl(mCAMFD, VIDIOC_REQBUFS, &req);
    if (ret < 0) {
        LOGE("ioctl(VIDIOC_REQBUFS) failed %d(%s)", errno, strerror(errno));
        return ret;
    }

    LOGD("req.count: %d", req.count);
    if (req.count < BUFFER_COUNT) {
        LOGE("request buffer failed");
        return ret;
    }
    struct v4l2_buffer buffer;
    memset(mVideoBuffer,0x00,sizeof(mVideoBuffer));
    for (i=0; i<req.count; i++) {
        memset(&buffer, 0, sizeof(struct v4l2_buffer));
        buffer.type = req.type;
        buffer.memory = V4L2_MEMORY_MMAP;
        buffer.flags = 0;
        buffer.index = i;

        ret = ioctl (mCAMFD, VIDIOC_QUERYBUF, &buffer);
        if (ret < 0) {
            LOGE("ioctl(VIDIOC_QUERYBUF) failed %d(%s)", errno, strerror(errno));
            return ret;
        }
        LOGD("buffer.length: %d, buffer.m.offset: %d", buffer.length, buffer.m.offset);
        mVideoBuffer[i] = (u8*) mmap(NULL, buffer.length, PROT_READ, MAP_SHARED, mCAMFD, buffer.m.offset);
        if (mVideoBuffer[i] == MAP_FAILED) {
            LOGE("mmap() failed %d(%s)", errno, strerror(errno));
            return -1;
        }
        ret = ioctl(mCAMFD, VIDIOC_QBUF, &buffer);
        if (ret < 0) {
            LOGE("ioctl(VIDIOC_QBUF) failed %d(%s)", errno, strerror(errno));
            return ret;
        }
    }
    mBufferLength=buffer.length;

    return 0;
}

int MlCamera::set_stream(int on) {
    int ret,cmd;
    enum v4l2_buf_type buffer_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    cmd = (on)?VIDIOC_STREAMON:VIDIOC_STREAMOFF;
    LOGD("set stream %s",(on ? "on" : "off"));
    ret = ioctl(mCAMFD, cmd, &buffer_type);
    if (ret < 0) {
        LOGE("cam_set_stream failed %d(%s)", errno, strerror(errno));
    }
    return ret;
}

int MlCamera::startRecord() {

    int result = -1;
    mMaxWaitTime = 0;
    mMaxIOCTLTime = 0;
    mJNICALLTime = 0;

    if (!isRunning()) {
        mIsRunning = true;
        pthread_mutex_lock(&preview_mutex);
        {
            if (mPreviewWindow) {
                result = pthread_create(&preview_thread, NULL, preview_thread_func, (void *)this);
            }else
            {
                result = pthread_create(&preview_thread, NULL, preview_thread_func, (void *)this);
            }
            LOGI("==> startPreview:    preview_thread:%d  ",preview_thread);
        }
        pthread_mutex_unlock(&preview_mutex);

        if (result != 0) {

            LOGW("V4L2Camera::window does not exist/already running/could not create thread etc.");
            mIsRunning = false;
            pthread_mutex_lock(&preview_mutex);
            {
                pthread_cond_signal(&preview_sync);
            }
            pthread_mutex_unlock(&preview_mutex);
        }
    }

    return result;
}

int MlCamera::get_frame(int frameNum, u8 *frame) {

    int ret;
    int length = 0;
    struct v4l2_buffer buffer;
    memset(&buffer, 0, sizeof(struct v4l2_buffer));
    buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buffer.memory = V4L2_MEMORY_MMAP;
    buffer.reserved = 0;

    // LOGD("frameNum=%d",frameNum);
    ret = ioctl(mCAMFD, VIDIOC_DQBUF, &buffer);
    if (ret < 0) {
        LOGE("ioctl(VIDIOC_DQBUF) failed %d(%s)", errno, strerror(errno));
        ret = -1;
        goto FAIL;
    }

    //LOGD("buffer.index=%d",buffer.index);
    if (buffer.index >= BUFFER_COUNT) {
        LOGE("invalid buffer index: %d", buffer.index);
        ret = -1;
        goto FAIL;
    }
    //LOGD("buffer.bytesused=%d---frameNum=%d",buffer.bytesused,frameNum);
    //fwrite((char*)video_buffer_ptr[buffer.index], 1,buffer.bytesused,fp);
    //fflush(fp);
    memcpy(frame, (char*)mVideoBuffer[buffer.index], buffer.bytesused);

    ret = ioctl(mCAMFD, VIDIOC_QBUF, &buffer);
    if (ret < 0) {
        LOGE("ioctl(VIDIOC_QBUF) failed %d(%s)", errno, strerror(errno));
    }

    FAIL:

    return ret;
}


int MlCamera::stopRecord() {

    bool b = isRunning();
    LOGI("stopRecord:   isRunning :%d  ",b);
    if(b) {
        mIsRunning = false;
        pthread_cond_signal(&preview_sync);
        LOGI("stopRecord:   pthread_cond_signal :%d  ",preview_thread);
        if (pthread_join(preview_thread, NULL) != EXIT_SUCCESS) {
            LOGW("V4L2::terminate preview thread: pthread_join failed");
        }else{
            LOGW("V4L2::terminate preview thread: pthread_join Sucess");
        }
        clearDisplay();
    }else{
        LOGW("V4L2::not running");
    }

    clearPreviewFrame();
    pthread_mutex_lock(&preview_mutex);
    if (mPreviewWindow) {
        ANativeWindow_release(mPreviewWindow);
        mPreviewWindow = NULL;
    }else
    {
        LOGW("V4L2::mPreviewWindow is NULL");
    }
    pthread_mutex_unlock(&preview_mutex);
    mPreviewCnt = 0;

    return 0;
}

int MlCamera::uninit_buffer() {
    int ret = 0;
    u8 i;
    for (i=0; i<BUFFER_COUNT; i++) {
        if (mVideoBuffer[i] != NULL) {
            if (munmap((void*)mVideoBuffer[i], mBufferLength) < 0){
                LOGE("mVideoBuffer[%d] munmap failed : %s",i,strerror(errno));
                break;
                ret = -1;
            }
            LOGD("munmap mVideoBuffer[%d] success !",i);
            mVideoBuffer[i] = NULL;
        } else {
            LOGE("mVideoBuffer[%d] is NULL",i);
            break;
        }
    }
    return ret;
}

int MlCamera::closeCAM() {

    LOGE("will close fd ");
    if(mCAMFD >0 )
    {
        close(mCAMFD);
        mCAMFD = -1;
    }

    return 0;
}

void *MlCamera::preview_thread_func(void *vptr_args) {
    int ret;

    MlCamera *mlcam = reinterpret_cast<MlCamera *>(vptr_args);
    if (mlcam) {
        mlcam->do_preview();
    }
    LOGI("===>thread exit !!!");
    pthread_exit(NULL);
}

const bool MlCamera::isRunning() const {
    return mIsRunning;
}

void MlCamera::do_preview() {



    LOGD("do_preview Streaming...");

    int PreviewCnt = 0;
    int ret =0;

    long jniDiffTime =0 ;
    long jniBeginTime =0 ;
    long jniEndTime =0 ;

//    FILE* fp =NULL;
//    time_t now = time(NULL);
//    char timestr[20]={0};
//    strftime(timestr, sizeof(timestr), "%Y-%m-%d-%H-%M", localtime(&now));
//    char filename[64] = {0};
//    snprintf(filename, 63, "/sdcard/exam/apH264/%s.h264", timestr);
//
//    LOGD("open native video file %s",filename );
//    fp = fopen(filename, "wb+");
//    if (fp == NULL) {
//        LOGD("Create %s failed(%s)",filename, strerror(errno));
//    }
    struct v4l2_buffer buffer;

    //取流 和处理流分开
    ret = pthread_create(&mFrame_thread, NULL,
                         reinterpret_cast<void *(*)(void *)>(preview_frame_callback), (void *)this);
    if(ret ==0)
    {
        LOGI("==> startpreview_frame_callback:    mFrame_thread:%d  ",mFrame_thread);
    }else
    {
        LOGE("==> startpreview_frame_callback:    Failed ");
    }

    //处理
    if(!ret) {
        if (mPixelFormat == V4L2_PIX_FMT_MJPEG) {
            // MJPEG mode
            LOGI("Streaming... MJPEG");

            clearPreviewFrame();
            bool decode_ret = false;

            V4L2_frame_t *frame_mjpeg = NULL;
            V4L2_frame_t *frame_nv12 = NULL;
            V4L2_frame_t *frame_rgb = NULL;
            V4L2_frame_t *frame_rgbx = NULL;

            for (; isRunning();) {
                frame_mjpeg = waitPreviewFrame();
                if (frame_mjpeg) {

                    mPreviewCnt++;
                    if (mPreviewCnt % 10 == 0) {
                        LOGI("do_preview  FrameNo:%d  frame_format = %d, data_bytes = %d,  width = %d, height = %d",
                             mPreviewCnt,
                             frame_mjpeg->frame_format,
                             frame_mjpeg->data_bytes,
                             frame_mjpeg->width,
                             frame_mjpeg->height);
                        if (mPreviewCnt > MAXCNT)
                            mPreviewCnt = 0;
                    }

                    memset(&mFrameOut, 0, sizeof(MpiJpegDecoder::OutputFrame_t));
                    decode_ret = mDecoder.decodePacket((char *) frame_mjpeg->data,
                                                       frame_mjpeg->data_bytes, &mFrameOut);
                    V4L2_freeFrame(frame_mjpeg);
                    if (decode_ret != true) {
                        LOGE("failed to decode packet");
                        continue;
                    }

                    /** 3/2 is the size of NV12 */
                    frame_nv12 = V4L2_allocateFrame(
                            frame_mjpeg->width * frame_mjpeg->height * 3 / 2);
                    if (frame_nv12 == NULL) {
                        LOGE("failed to allocate frame_nv12");
                        continue;
                    }

                    frame_nv12->width = frame_mjpeg->width;
                    frame_nv12->height = frame_mjpeg->height;
                    memcpy(frame_nv12->data, mFrameOut.MemVirAddr, mFrameOut.OutputSize);
                    mDecoder.deinitOutputFrame(&mFrameOut);
                    mDecoder.flushBuffer();

                    frame_rgb = V4L2_allocateFrame(
                            frame_nv12->width * frame_nv12->height * PIXEL_BYTES_RGB);
                    if (frame_rgb == NULL) {
                        LOGE("failed to allocate new frame_rgb");
                        V4L2_freeFrame(frame_nv12);
                        continue;
                    }
                    frame_rgb->width = frame_nv12->width;
                    frame_rgb->height = frame_nv12->height;
                    frame_rgb->frame_format = V4L2_PIX_FMT_RGB565;

                    V4L2_NV12TORGB(frame_rgb->width, frame_rgb->height,
                                 (unsigned char *) frame_nv12->data,
                                 (unsigned char *) frame_rgb->data);
                    V4L2_freeFrame(frame_nv12);

                    frame_rgbx = V4L2_allocateFrame(
                            frame_rgb->width * frame_rgb->height * PREVIEW_PIXEL_BYTES);
                    if (frame_rgbx == NULL) {
                        LOGE("failed to allocate new frame_rgbx");
                        V4L2_freeFrame(frame_rgb);
                        continue;
                    }

                    ret = V4L2_RGB2RGBX(frame_rgb, frame_rgbx);
                    V4L2_freeFrame(frame_rgb);
                    if (ret != 0) {
                        LOGE("failed to rgb2rgbx %d", ret);
                        V4L2_freeFrame(frame_rgbx);
                        continue;
                    }

                    pthread_mutex_lock(&preview_mutex);
                    copyToSurface(frame_rgbx, &mPreviewWindow);
                    pthread_mutex_unlock(&preview_mutex);
                    V4L2_freeFrame(frame_rgbx);
                } else {
                    LOGI("do_preview MJEPG  FrameNo:%d    ===>NULL ", mPreviewCnt);
                }
            }
        }
        else if(mPixelFormat==V4L2_PIX_FMT_NV12 ){

        }
        else{
            JavaVM *vm = getVM();
            JNIEnv *env;
            // attach to JavaVM
            vm->AttachCurrentThread(&env, NULL);

            // H264 H265  mode
            LOGI("Streaming... H264");
            clearPreviewFrame();

            V4L2_frame_t *frame_H264 = NULL;

            for (; isRunning();) {
                frame_H264 = waitPreviewFrame();

                if(frame_H264) {
                    /**
                    * callback
                    */
                    //LOGD(" seq:%d=====> callback start  ",PreviewCnt);
                    jniBeginTime = getCurrentTime();
                    pthread_mutex_lock(&callback_mutex);
                    jobject buf = env->NewDirectByteBuffer(frame_H264->data,
                                                           frame_H264->data_bytes);
                    if (mFrameCallbackObj) {
                        env->CallVoidMethod(mFrameCallbackObj, mOnFrame, buf);
                        env->ExceptionClear();
                    } else {
                        LOGI("do_preview  mFrameCallbackObj ===>NULL ");
                    }
                    env->DeleteLocalRef(buf);
                    pthread_mutex_unlock(&callback_mutex);
                    //LOGD(" seq:%d=====> callback end  ",PreviewCnt);
                    if (PreviewCnt % 600 == 0) {
                        LOGD("do_preview  FrameNo:%d  frame_format = %d, data_bytes = %d", PreviewCnt,
                             mFramePixelFormat, frame_H264->data_bytes);
                    }
                    jniEndTime = getCurrentTime();
                    jniDiffTime = jniEndTime - jniBeginTime;
                    if(jniDiffTime > mJNICALLTime)
                    {
                        LOGD("update Max JNICALL Time %d ===> %d ",mJNICALLTime,jniDiffTime);
                        mJNICALLTime = jniDiffTime;
                    }
                    if(frame_H264)
                    {
                        LOGD("free  H264 frame =======");
                        V4L2_freeFrame(frame_H264);
                    }

                }else{

                    LOGE(" frame_H264 is NuLL");
                }

                PreviewCnt++;
            }

            // detach from JavaVM
            vm->DetachCurrentThread();
        }
    }else{
        LOGE("failed MJEPG start_streaming");
    }

    LOGI("Streaming finished");

}

/**
 * get frame_t from frame pool
 * if pool is empty, create new frame
 * this function does not confirm the frame size
 * and you may need to confirm the size
 */
void MlCamera::clearPreviewFrame() {
    pthread_mutex_lock(&preview_mutex);
    {
        for (int i = 0; i < previewFrames.size(); i++)
            V4L2_freeFrame(previewFrames[i]);
        previewFrames.clear();
    }
    pthread_mutex_unlock(&preview_mutex);

}

void MlCamera::recycle_frame(V4L2_frame_t *frame) {
    pthread_mutex_lock(&pool_mutex);
    if ((mFramePool.size() < FRAME_POOL_SZ)) {
        mFramePool.put(frame);
        frame = NULL;
    }
    pthread_mutex_unlock(&pool_mutex);
    if (frame) {
        V4L2_freeFrame(frame);
    }
}

void MlCamera::V4L2_freeFrame(V4L2_frame_t *frame) {
    if(frame->data_bytes >0 && frame->library_owns_data )
        free(frame->data);

    free(frame);
}

void MlCamera::init_pool(size_t data_bytes) {
    clear_pool();
    pthread_mutex_lock(&pool_mutex);
    {
        for (int i = 0; i < FRAME_POOL_SZ; i++) {
            mFramePool.put(V4L2_allocateFrame(data_bytes));
        }
    }
    pthread_mutex_unlock(&pool_mutex);

}

V4L2_frame_t *MlCamera::V4L2_allocateFrame(size_t data_bytes) {
    V4L2_frame_t *frame = static_cast<V4L2_frame_t *>(malloc(sizeof(*frame)));

    if (!frame)
        return NULL;

    memset(frame, 0, sizeof(*frame));

    frame->library_owns_data = 1;

    if (data_bytes > 0) {
        frame->data_bytes = data_bytes;
        frame->data = malloc(data_bytes);

        if (!frame->data) {
            free(frame);
            return NULL;
        }
    }

    return frame;
}

void MlCamera::clear_pool() {
    pthread_mutex_lock(&pool_mutex);
    {
        const int n = mFramePool.size();
        for (int i = 0; i < n; i++) {
            V4L2_freeFrame(mFramePool[i]);
        }
        mFramePool.clear();
    }
    pthread_mutex_unlock(&pool_mutex);

}

V4L2_frame_t *MlCamera::get_preview_frame( ) {

    int ret;
    int length = 0;
    struct v4l2_buffer buffer;

    V4L2_frame_t *frame = NULL;
    long  tDiffTime = 0;
    long  beginIOTime = 0 ,endIOTime = 0;

    memset(&buffer, 0, sizeof(struct v4l2_buffer));
    buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buffer.memory = V4L2_MEMORY_MMAP;
    buffer.reserved = 0;

    beginIOTime = getCurrentTime();

    ret = ioctl(mCAMFD, VIDIOC_DQBUF, &buffer);
    if (ret < 0) {
        LOGE("ioctl(VIDIOC_DQBUF) failed %d(%s)", errno, strerror(errno));
        goto FAILED;
    }

    //LOGD("get_preview_frame buffer.index=%d data size=%d",buffer.index,buffer.bytesused);
    if (buffer.index >= BUFFER_COUNT) {
        LOGE("invalid buffer index: %d", buffer.index);
        goto FAILED;
    }
    endIOTime = getCurrentTime();
    tDiffTime = endIOTime - beginIOTime;
    if(tDiffTime > mMaxIOCTLTime)
    {
        LOGD("update Max IOTCTL Time %d ===> %d ",mMaxIOCTLTime,tDiffTime);
        mMaxIOCTLTime = tDiffTime;
    }


    frame = V4L2_allocateFrame(buffer.bytesused);

    if(frame) {

        frame->timestamp = buffer.timestamp;
        frame->sequence = buffer.sequence;
        frame->frame_format = mPixelFormat;
        frame->width = mFrameWidth;
        frame->height = mFrameHeight;

        memcpy(frame->data, (char*)mVideoBuffer[buffer.index], buffer.bytesused);
    }else{
        LOGE("  frame is NULL ");
        goto FAILED;
    }

    ret = ioctl(mCAMFD, VIDIOC_QBUF, &buffer);
    if (ret < 0) {
        LOGE("ioctl(VIDIOC_QBUF) failed %d(%s)", errno, strerror(errno));
        goto FAILED;
    }

    return frame;

 FAILED:
    LOGE(" get frame failed");
    return nullptr;
}


V4L2_frame_t *MlCamera::get_frame(size_t data_bytes) {
    V4L2_frame_t *frame = NULL;
    pthread_mutex_lock(&pool_mutex);
    {
        if (!mFramePool.isEmpty()) {
            frame = mFramePool.last();
        }
    }
    pthread_mutex_unlock(&pool_mutex);

    if (!frame) {
        //LOGI("allocate new frame");
        frame = V4L2_allocateFrame(data_bytes);
    }
    return frame;
}
void MlCamera::addPreviewFrame(V4L2_frame_t *frame) {

    static int addPreviewDropCnt = 0;
    //LOGD("addPreviewFrame  previewFrames ===> size:%d ",previewFrames.size());
    pthread_mutex_lock(&preview_mutex);
    if (isRunning() && (previewFrames.size() < MAX_FRAME)) {
        previewFrames.put(frame);
        frame = NULL;
        pthread_cond_signal(&preview_sync);
    }
    pthread_mutex_unlock(&preview_mutex);

    //超过池  丢弃
    if (frame) {
        addPreviewDropCnt++;
        LOGI(" addPreviewDropCnt=%d", addPreviewDropCnt);
        V4L2_freeFrame(frame);
    }

}

V4L2_frame_t *MlCamera::waitPreviewFrame() {
    V4L2_frame_t *frame = NULL;
    long diffTime = 0;
    long waitBeginTime=0 ,waitEndTime =0;
    waitBeginTime = getCurrentTime();
    pthread_mutex_lock(&preview_mutex);
    {
        //LOGI("waitPreviewFrame 11111 previewFrames ===> size:%d",previewFrames.size());
        if (!previewFrames.size()) {
            pthread_cond_wait(&preview_sync, &preview_mutex);
        }else{
            LOGI("previewFrames.size  is  %d  isRunning :%d  " ,previewFrames.size(),isRunning());
        }
        //LOGI("waitPreviewFrame 22222 previewFrames ===> size:%d",previewFrames.size());
        if (isRunning() && previewFrames.size() > 0) {
            frame = previewFrames.remove(0);
        }
    }
    pthread_mutex_unlock(&preview_mutex);
    waitEndTime = getCurrentTime();
    diffTime = waitEndTime - waitBeginTime;
    if(diffTime > mMaxWaitTime)
    {
        LOGD("update Max Wait Time %d ===> %d ",mMaxWaitTime,diffTime);
        mMaxWaitTime = diffTime;
    }

    return frame;
}

void MlCamera::preview_frame_callback(void *vptr_args) {

    MlCamera *mlCamera = reinterpret_cast<MlCamera *>(vptr_args);


    static int preview_frame_callbackcnt = 0;

    for(;mlCamera->isRunning();) {

        V4L2_frame_t *frame = mlCamera->get_preview_frame();
        if (!frame) {
            LOGE("=====NULL!");
            break;
        }
        mlCamera->addPreviewFrame(frame);

        if (preview_frame_callbackcnt % 600 == 0) {
            LOGI(" cnt=%d format=%d  size: %d (%d X %d)",
                 preview_frame_callbackcnt,
                 frame->frame_format,frame->data_bytes, frame->width,
                 frame->height);
        }
        preview_frame_callbackcnt++;

    }
    LOGI("===>preview_frame_callback exit !!!");
    pthread_exit(NULL);
}

int MlCamera::V4L2_duplicateFrame(V4L2_frame_t *in, V4L2_frame_t *out) {
    if (V4L2_ensureFrameSize(out, in->data_bytes) < 0)
        return ENOMEM;

    out->width = in->width;
    out->height = in->height;
    out->frame_format = in->frame_format;
    out->step = in->step;
    out->sequence = in->sequence;
    out->timestamp = in->timestamp;

    memcpy(out->data, in->data, in->data_bytes);

    return 0;
}

int MlCamera::V4L2_ensureFrameSize(V4L2_frame_t *frame, size_t need_bytes) {
    if (frame->library_owns_data) {
        if (!frame->data || frame->data_bytes != need_bytes) {
            frame->data_bytes = need_bytes;
            frame->data = realloc(frame->data, frame->data_bytes);
        }
        if (!frame->data)
            return ENOMEM;
        return 0;
    } else {
        if (!frame->data || frame->data_bytes < need_bytes)
            return ENOMEM;
        return 0;
    }
}

long MlCamera::getCurrentTime() {

    struct timeval tv;
    gettimeofday(&tv,NULL);
    return tv.tv_sec * 1000 + tv.tv_usec / 1000;
}

}  // namespace implementation
}  // namespace V3_4
}  // namespace device
}  // namespace camera
}  // namespace hardware
}  // namespace android
