#include "JetsonDec.h"
#include "NvApplicationProfiler.h"
#include "NvUtils.h"
#include <iostream>
#include <fstream>
#include <pthread.h>
#include <string.h>
#include <errno.h>
#include <unistd.h>
#include <fcntl.h>
#include <poll.h>

#define CHUNK_SIZE 4000000
#define MICROSECOND_UNIT 1000000

JetsonDec::JetsonDec(uint32_t decoder_pixfmt, int width, int height, unsigned char *buffer)
    : dec_pixfmt(decoder_pixfmt), dec_buffer(buffer), p_callback(nullptr), m_abort(false), flag_buffer(buffer == nullptr)
{
    if (flag_buffer && width > 0 && height > 0) {
        int size = width * height * 3 * 2;
        dec_buffer = (unsigned char *)malloc(size);
        printf("JetsonDec w:%d h:%d\n", width, height);
    }
    job_thread = std::thread(&JetsonDec::decode_proc, this);
    dec_thread = std::thread(&JetsonDec::dec_capture_loop_fcn, this);
}

JetsonDec::~JetsonDec()
{
    m_abort = true;
    job_thread.join();
    dec_thread.join();

    std::lock_guard<std::mutex> lock(mutex_data);
    for (auto it = data_list.begin(); it != data_list.end(); ++it) {
        delete *it;
    }
    data_list.clear();

    if (flag_buffer && dec_buffer != nullptr) {
        free(dec_buffer);
        dec_buffer = nullptr;
    }
    printf("~JetsonDec()\n");
}

void JetsonDec::SetDecCallBack(JetsonDecListener *call_func)
{
    p_callback = call_func;
	   return;
}

void JetsonDec::UnSetDecCallBack()
{
    p_callback = nullptr;
	   return;
}
static void convert_ms_to_timeval(uint64_t ms, struct timeval &tv) {
    tv.tv_sec = ms / 1000;
    tv.tv_usec = (ms % 1000) * 1000;
}

void JetsonDec::AddEsData(unsigned char *data, int len, uint64_t time_data)
{
    MediaData *pData = new MediaData();
    pData->data = (unsigned char *)malloc(len);
    memcpy(pData->data, data, len);
    pData->len = len;
    struct timeval tv;
    convert_ms_to_timeval(time_data, tv);
    pData->time = tv;

    {
        std::lock_guard<std::mutex> lock(mutex_data);
        data_list.push_back(pData);
    }
    cond_data.notify_one();
	 return;
}

int JetsonDec::GetQueueSize()
{
    std::lock_guard<std::mutex> lock(mutex_data);
    return data_list.size();
}

int JetsonDec::read_decoder_input_nalu(NvBuffer *buffer, char *parse_buffer, std::streamsize parse_buffer_size, struct timeval &time)
{
    std::unique_lock<std::mutex> lock(mutex_data);
    while (data_list.empty() && !m_abort) {
        cond_data.wait_for(lock, std::chrono::milliseconds(10));
    }
    if (m_abort) {
        return 0;
    }

    MediaData *node = data_list.front();
    data_list.pop_front();

    lock.unlock();

    char *buffer_ptr = (char *)buffer->planes[0].data;
    memcpy(buffer_ptr, node->data, node->len);
    buffer->planes[0].bytesused = node->len;
    time = node->time;
    delete node;
	proc_ready = true;
    return 0;
}

int JetsonDec::dump_dmabuf_ptr(int dmabuf_fd, unsigned int plane, int *len, unsigned char *outbuffer)
{
    if (dmabuf_fd <= 0)
        return -1;

    int ret = -1;

    NvBufSurface *nvbuf_surf = nullptr;
    ret = NvBufSurfaceFromFd(dmabuf_fd, (void **)(&nvbuf_surf));
    if (ret != 0) {
        return -1;
    }
    ret = NvBufSurfaceMap(nvbuf_surf, 0, plane, NVBUF_MAP_READ_WRITE);
    if (ret < 0) {
        printf("NvBufSurfaceMap failed\n");
        return ret;
    }
    NvBufSurfaceSyncForCpu(nvbuf_surf, 0, plane);
    int pos = 0;
    for (uint i = 0; i < nvbuf_surf->surfaceList->planeParams.height[plane]; ++i) {
        memcpy(outbuffer + pos, (char *)nvbuf_surf->surfaceList->mappedAddr.addr[plane] + i * nvbuf_surf->surfaceList->planeParams.pitch[plane],
               nvbuf_surf->surfaceList->planeParams.width[plane] * nvbuf_surf->surfaceList->planeParams.bytesPerPix[plane]);
        pos += nvbuf_surf->surfaceList->planeParams.width[plane] * nvbuf_surf->surfaceList->planeParams.bytesPerPix[plane];
    }
    ret = NvBufSurfaceUnMap(nvbuf_surf, 0, plane);
    if (ret < 0) {
        printf("NvBufSurfaceUnMap failed\n");
        return ret;
    }
    *len = pos;
    return 0;
}

void JetsonDec::abort()
{
    printf("ctx error\n");
    m_abort = true;
}

void JetsonDec::set_defaults()
{
    memset(&ctx, 0, sizeof(context_t));
    ctx.fullscreen = false;
    ctx.window_height = 0;
    ctx.window_width = 0;
    ctx.window_x = 0;
    ctx.window_y = 0;
    ctx.out_pixfmt = 1;
     // ctx->fps = 30; // not use
    ctx.nvosd_context = nullptr;
    ctx.dst_dma_fd = -1;
}

void JetsonDec::query_and_set_capture()
{
    NvVideoDecoder *dec = ctx.dec;
    struct v4l2_format format;
    struct v4l2_crop crop;
    int32_t min_dec_capture_buffers;
    int ret = 0;
    int error = 0;
    NvBufSurf::NvCommonAllocateParams params;

    ret = dec->capture_plane.getFormat(format);
    TEST_ERROR(ret < 0, "Error: Could not get format from decoder capture plane", error);

    ret = dec->capture_plane.getCrop(crop);
    TEST_ERROR(ret < 0, "Error: Could not get crop from decoder capture plane", error);

    std::cout << "Video Resolution: " << crop.c.width << "x" << crop.c.height << std::endl;

    ctx.dec_width = crop.c.width;
    ctx.dec_height = crop.c.height;

    if (ctx.dst_dma_fd != -1) {
        ret = NvBufSurf::NvDestroy(ctx.dst_dma_fd);
        ctx.dst_dma_fd = -1;
        TEST_ERROR(ret < 0, "Error: Error in BufferDestroy", error);
    }

     /* Create PitchLinear output buffer for transform. */
    params.memType = NVBUF_MEM_SURFACE_ARRAY;
    params.width = crop.c.width;
    params.height = crop.c.height;
    params.layout = NVBUF_LAYOUT_PITCH;
    if (ctx.out_pixfmt == 1)
        params.colorFormat = NVBUF_COLOR_FORMAT_NV12;
    else if (ctx.out_pixfmt == 2)
        params.colorFormat = NVBUF_COLOR_FORMAT_YUV420;
    else if (ctx.out_pixfmt == 3)
        params.colorFormat = NVBUF_COLOR_FORMAT_NV16;
    else if (ctx.out_pixfmt == 4)
        params.colorFormat = NVBUF_COLOR_FORMAT_NV24;

    if (ctx.enable_osd_text)
        params.colorFormat = NVBUF_COLOR_FORMAT_RGBA;

    params.memtag = NvBufSurfaceTag_VIDEO_CONVERT;

    ret = NvBufSurf::NvAllocate(&params, 3, &ctx.dst_dma_fd); // 最大分配3个，不然会引发未知异常(程序可以运行，但是会影响dec_buffer地址)
    TEST_ERROR(ret == -1, "create dmabuf failed", error);

    dec->capture_plane.deinitPlane();

    ret = dec->setCapturePlaneFormat(format.fmt.pix_mp.pixelformat,
                                     format.fmt.pix_mp.width,
                                     format.fmt.pix_mp.height);
    TEST_ERROR(ret < 0, "Error in setting decoder capture plane format", error);

    ret = dec->getMinimumCapturePlaneBuffers(min_dec_capture_buffers);
    TEST_ERROR(ret < 0, "Error while getting value of minimum capture plane buffers", error);

    ret = dec->capture_plane.setupPlane(V4L2_MEMORY_MMAP, min_dec_capture_buffers + 10, false, false);
    TEST_ERROR(ret < 0, "Error in decoder capture plane setup", error);

    ret = dec->capture_plane.setStreamStatus(true);
    TEST_ERROR(ret < 0, "Error in decoder capture plane streamon", error);

    for (uint32_t i = 0; i < dec->capture_plane.getNumBuffers(); i++) {
        struct v4l2_buffer v4l2_buf;
        struct v4l2_plane planes[MAX_PLANES];

        memset(&v4l2_buf, 0, sizeof(v4l2_buf));
        memset(planes, 0, sizeof(planes));

        v4l2_buf.index = i;
        v4l2_buf.m.planes = planes;
        ret = dec->capture_plane.qBuffer(v4l2_buf, nullptr);
        TEST_ERROR(ret < 0, "Error Qing buffer at output plane", error);
    }

    std::cout << "Query and set capture successful" << std::endl;
    return;

error:
    if (error) {
        abort();
        std::cerr << "Error in " << __func__ << std::endl;
    }
}

void JetsonDec::dec_capture_loop_fcn()
{
   
    unsigned char *ptr = dec_buffer; // 这里需要使用ptr把dec_buffer记录下来，否则后面这个地址可能会失效(地址发生变化)，原因：query_and_set_capture中NvAllocate分配的buffer数量过多会导致位置错误，引起进程空间异常

    while (!m_abort && !proc_ready) {
      std::this_thread::sleep_for(std::chrono::milliseconds(1));
        continue;
    }
  
    NvVideoDecoder *dec = ctx.dec;
    struct v4l2_event ev;
    int ret;

    std::cout << "Starting decoder capture loop thread" << std::endl;
    prctl(PR_SET_NAME, "dec_cap", 0, 0, 0);

    /* Wait for the first Resolution change event as decoder needs
       to know the stream resolution for allocating appropriate
       buffers when calling REQBUFS */
    do {
        /* VIDIOC_DQEVENT, max_wait_ms = 1000ms */
        ret = dec->dqEvent(ev, 2000);

        if (ret < 0) {
            if (errno == EAGAIN) {
                cerr << "Timed out waiting for first V4L2_EVENT_RESOLUTION_CHANGE"
                     << endl;
            } else {
                cerr << "Error in dequeueing decoder event" << endl;
            }
            abort();
            break;
        }
    } while (!m_abort && ev.type != V4L2_EVENT_RESOLUTION_CHANGE);
    printf("query_and_set_capture bre dec_buffer:%p ptr:%p\n", dec_buffer, ptr);
    /* Received the resolution change event, now can do query_and_set_capture */
    if (!m_abort) {
        query_and_set_capture();
    }
    printf("query_and_set_capture after dec_buffer:%p ptr:%p\n", dec_buffer, ptr);
    /* Exit on error or EOS which is signalled in main() */
    while (!(m_abort || dec->isInError())) {
        NvBuffer *dec_buffer;

        /* Check for resolution change again */
        ret = dec->dqEvent(ev, false);
        if (ret == 0) {
            switch (ev.type) {
            case V4L2_EVENT_RESOLUTION_CHANGE:
             query_and_set_capture();
                continue;
            }
        }
        /* Decoder capture loop */
        while (!m_abort) {
            struct v4l2_buffer v4l2_buf;
            struct v4l2_plane planes[MAX_PLANES];

            memset(&v4l2_buf, 0, sizeof(v4l2_buf));
            memset(planes, 0, sizeof(planes));
            v4l2_buf.m.planes = planes;

            /* Dequeue a valid capture_plane buffer that contains YUV BL data */
            if (dec->capture_plane.dqBuffer(v4l2_buf, &dec_buffer, NULL, 0)) {
                if (errno == EAGAIN) {
                    printf("EAGAIN\n");
                      std::this_thread::sleep_for(std::chrono::milliseconds(1));
                } else {
                    abort();
                    cerr << "Error while calling dequeue at capture plane" << endl;
                }
                break;
            }

            /* Clip & Stitch can be done by adjusting rectangle. */
            NvBufSurf::NvCommonTransformParams transform_params;
            transform_params.src_top = 0;
            transform_params.src_left = 0;
            transform_params.src_width = ctx.dec_width;
            transform_params.src_height = ctx.dec_height;
            transform_params.dst_top = 0;
            transform_params.dst_left = 0;
            transform_params.dst_width = ctx.dec_width;
            transform_params.dst_height = ctx.dec_height;
            transform_params.flag = NVBUFSURF_TRANSFORM_FILTER;
            transform_params.flip = NvBufSurfTransform_None;
            transform_params.filter = NvBufSurfTransformInter_Nearest;

            /* Perform Blocklinear to PitchLinear conversion. */
            ret = NvBufSurf::NvTransform(&transform_params, dec_buffer->planes[0].fd, ctx.dst_dma_fd);
            if (ret == -1) {
                cerr << "Transform failed" << endl;
                break;
            }
            /* Write raw video frame to file. */
            int len = 0, len1 = 0;
            ret = dump_dmabuf_ptr(ctx.dst_dma_fd, 0, &len, ptr);
            ret = dump_dmabuf_ptr(ctx.dst_dma_fd, 1, &len1, ptr + len);
            if (p_callback) {
                uint64_t v4l2_time = 1000 * v4l2_buf.timestamp.tv_sec + v4l2_buf.timestamp.tv_usec / 1000;
                p_callback->OnJetsonDecData((unsigned char *)ptr, len + len1, v4l2_time);
            }

            /* If not writing to file, Queue the buffer back once it has been used. */
       		 if (dec->capture_plane.qBuffer(v4l2_buf, nullptr) < 0) {
                std::cerr << "Error while queueing buffer at decoder capture plane" << std::endl;
                break;
            }
        }
    }

    std::cout << "Exiting decoder capture loop thread" << std::endl;
	   return;
}

void JetsonDec::decode_proc()
{
    int ret = 0;
    int error = 0;
    char *nalu_parse_buffer = nullptr;

    set_defaults();

    ctx.decoder_pixfmt = dec_pixfmt;
    ctx.out_pixfmt = 1;
    ctx.input_nalu = true;

    ctx.dec = NvVideoDecoder::createVideoDecoder("dec0");
    TEST_ERROR(!ctx.dec, "Could not create decoder", cleanup);

    ret = ctx.dec->subscribeEvent(V4L2_EVENT_RESOLUTION_CHANGE, 0, 0);
    TEST_ERROR(ret < 0, "Could not subscribe to V4L2_EVENT_RESOLUTION_CHANGE", cleanup);

    ret = ctx.dec->setOutputPlaneFormat(ctx.decoder_pixfmt, CHUNK_SIZE);
    TEST_ERROR(ret < 0, "Could not set output plane format", cleanup);

    if (ctx.input_nalu) {
        nalu_parse_buffer = new char[CHUNK_SIZE];
        ret = ctx.dec->setFrameInputMode(0);
        TEST_ERROR(ret < 0, "Error in decoder setFrameInputMode", cleanup);
    } else {
        ret = ctx.dec->setFrameInputMode(1);
        TEST_ERROR(ret < 0, "Error in decoder setFrameInputMode", cleanup);
    }

    ret = ctx.dec->disableDPB();
    TEST_ERROR(ret < 0, "Error in decoder disableDPB", cleanup);
    ret = ctx.dec->setMaxPerfMode(1);
    TEST_ERROR(ret < 0, "Error while setting decoder to max perf", cleanup);

    ret = ctx.dec->output_plane.setupPlane(V4L2_MEMORY_MMAP, 10, true, false);
    TEST_ERROR(ret < 0, "Error while setting up output plane", cleanup);

    ret = ctx.dec->output_plane.setStreamStatus(true);
    TEST_ERROR(ret < 0, "Error in output plane stream on", cleanup);

   // proc_ready = true;

    for (uint32_t i = 0; i < ctx.dec->output_plane.getNumBuffers(); i++) {
        struct v4l2_buffer v4l2_buf;
        struct v4l2_plane planes[MAX_PLANES];
        NvBuffer *buffer;

        memset(&v4l2_buf, 0, sizeof(v4l2_buf));
        memset(planes, 0, sizeof(planes));

        buffer = ctx.dec->output_plane.getNthBuffer(i);

        struct timeval time_now;
        read_decoder_input_nalu(buffer, nullptr, CHUNK_SIZE, time_now);

        v4l2_buf.index = i;
        v4l2_buf.m.planes = planes;
        v4l2_buf.m.planes[0].bytesused = buffer->planes[0].bytesused;

        v4l2_buf.timestamp = time_now;
        ret = ctx.dec->output_plane.qBuffer(v4l2_buf, nullptr);
        TEST_ERROR(ret < 0, "Error Qing buffer at output plane", cleanup);
        if (v4l2_buf.m.planes[0].bytesused == 0) {
            std::cout << "Input file read complete" << std::endl;
            break;
        }
    }

    while (!m_abort && !ctx.dec->isInError()) {
        struct v4l2_buffer v4l2_buf;
        struct v4l2_plane planes[MAX_PLANES];
        NvBuffer *buffer;

        memset(&v4l2_buf, 0, sizeof(v4l2_buf));
        memset(planes, 0, sizeof(planes));

        v4l2_buf.m.planes = planes;

        ret = ctx.dec->output_plane.dqBuffer(v4l2_buf, &buffer, nullptr, -1);
        TEST_ERROR(ret < 0, "Error DQing buffer at output plane", cleanup);

        struct timeval time_now;
        read_decoder_input_nalu(buffer, nullptr, CHUNK_SIZE, time_now);

        v4l2_buf.m.planes[0].bytesused = buffer->planes[0].bytesused;
        v4l2_buf.timestamp = time_now;
        ret = ctx.dec->output_plane.qBuffer(v4l2_buf, nullptr);
        TEST_ERROR(ret < 0, "Error Qing buffer at output plane", cleanup);
        if (v4l2_buf.m.planes[0].bytesused == 0) {
            std::cout << "Input file read complete" << std::endl;
            break;
        }
    }

    while (ctx.dec->output_plane.getNumQueuedBuffers() > 0 && !ctx.dec->isInError()) {
        struct v4l2_buffer v4l2_buf;
        struct v4l2_plane planes[MAX_PLANES];

        memset(&v4l2_buf, 0, sizeof(v4l2_buf));
        memset(planes, 0, sizeof(planes));

        v4l2_buf.m.planes = planes;
        ret = ctx.dec->output_plane.dqBuffer(v4l2_buf, nullptr, nullptr, -1);
        TEST_ERROR(ret < 0, "Error DQing buffer at output plane", cleanup);
    }

cleanup:
    delete ctx.dec;
    if (ctx.dst_dma_fd != -1) {
        ret = NvBufSurf::NvDestroy(ctx.dst_dma_fd);
        ctx.dst_dma_fd = -1;
        if (ret < 0) {
            std::cerr << "Error in BufferDestroy" << std::endl;
            error = 1;
        }
    }
    delete[] nalu_parse_buffer;
}
