//
// Created on 2025/1/23.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".

#include "camera_factory_v4l2_impl.h"
#include "Log.h"
#include <signal.h>
#include <sys/mman.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include "camera_constants.h"
#include "img_util.h"
#include "linux/v4l2-subdev.h"
#include "video_v4l2_utils.h"

CameraFactoryV4L2Impl::CameraFactoryV4L2Impl(int fd)
    : mVideoFd(fd), mSurfaceId(nullptr), func(nullptr), mIsPreviewRunning(false), previewWidth(640), previewHeight(480),
      previewFormat(VIDEO_FORMAT_YUY2), captureWidth(640), captureHeight(480), captureFormat(VIDEO_FORMAT_YUY2),
      captureBuffers(nullptr), captureBufferLength(0) {
    // 初始化互斥锁
    pthread_mutex_init(&captureMutex, nullptr);
    pthread_cond_init(&captureCond, nullptr);


    // 初始化互斥锁
    pthread_mutex_init(&callbackMutex, nullptr);
    pthread_cond_init(&callbackCond, nullptr);
}

CameraFactoryV4L2Impl::~CameraFactoryV4L2Impl() {
    pthread_mutex_destroy(&captureMutex);
    pthread_cond_destroy(&captureCond);

    pthread_mutex_destroy(&callbackMutex);
    pthread_cond_destroy(&callbackCond);

    free_napi(mSurfaceId);
    if (func) {
        napi_release_threadsafe_function(func, napi_tsfn_release);
    }
    func = nullptr;
    if (mVideoFd != -1) {
        close(mVideoFd);
    }
    mVideoFd = -1;
}

bool CameraFactoryV4L2Impl::setDisplaySurface(char *surfaceId) {
    pthread_mutex_lock(&captureMutex);
    {
        SAFE_FREE(mSurfaceId);  // 释放旧指针
        mSurfaceId = surfaceId; // 赋值新指针
    }
    pthread_mutex_unlock(&captureMutex);
    return true;
}

bool CameraFactoryV4L2Impl::setParameter(int type, char *value) { return false; }

bool CameraFactoryV4L2Impl::setPreviewSize(int width, int height, int format) {
    if (mVideoFd == -1) {
        return false;
    }
    bool result = VideoV4L2Utils::setStreamPreviewSize(mVideoFd, format, width, height);
    previewWidth = width;
    previewHeight = height;
    previewFormat = format;
    return result;
}

bool CameraFactoryV4L2Impl::startPreview() {
    if (mVideoFd == -1) {
        return false;
    }
    // 1. 获取宽高信息，设置到当前调用位置
    struct v4l2_format fmt;
    memset(&fmt, 0, sizeof(fmt));
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(mVideoFd, VIDIOC_G_FMT, &fmt) == 0) { // 获取当前的宽高配置信息
        captureWidth = fmt.fmt.pix.width;
        captureHeight = fmt.fmt.pix.height;
        captureFormat = VideoV4L2Utils::v4l2FormatToInt(fmt.fmt.pix.pixelformat);
    } else {
        captureWidth = previewWidth;
        captureHeight = previewHeight;
        captureFormat = previewFormat;
    }
    LOG_D("当前使用的分辨率信息:%{public}d*%{public}d ,format:%{public}d", captureWidth, captureHeight, captureFormat);
    // 设置窗口的宽高
    PluginWindowRender *render = loadRender();
    if (render) {
        render->initSize(captureWidth, captureHeight);
    }
    // 2. 设置缓冲区buffer
    if (!prepare_mmap()) { // 初始化缓冲区
        LOG_E("缓冲区准备失败");
        return false;
    }
    // 3. 打开流
    if (!startCameraStream()) { // 视频流启动失败
        stopPreview();
        return false;
    }
    // 4. 启动成功后，开启子线程，循环读取数据
    mIsPreviewRunning = true;
    int result = pthread_create(&captureThread, nullptr, capture_thread_func, (void *)this);
    if (result != 0) {
        stopPreview();
        return false;
    }
    // 5. 创建回调线程
    result = pthread_create(&callbackThread, nullptr, callback_thread_func, (void *)this);
    return true;
}


bool CameraFactoryV4L2Impl::prepare_mmap() {
    // 请求缓冲区，
    struct v4l2_requestbuffers req;
    memset(&req, 0, sizeof(req));
    req.count = 4;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;

    if (ioctl(mVideoFd, VIDIOC_REQBUFS, &req) < 0) {
        LOG_E("请求缓冲区失败,错误码:%{public}d", errno);
        return false;
    }
    captureBufferLength = req.count;
    captureBuffers = new Buffer[req.count];
    // 初始化缓冲区
    for (unsigned int i = 0; i < req.count; i++) {
        struct v4l2_buffer buf;
        memset(&buf, 0, sizeof(buf));
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = i;
        if (ioctl(mVideoFd, VIDIOC_QUERYBUF, &buf) < 0) {
            LOG_E("获取缓冲区数据,错误码:%{public}d", errno);
            return false;
        }
        captureBuffers[i].length = buf.length;
        captureBuffers[i].start = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, mVideoFd, buf.m.offset);
        if (captureBuffers[i].start == MAP_FAILED) {
            LOG_E("获取缓冲区数据,错误码:%{public}d", errno);
            return false;
        }

        // Queue the buffer
        if (ioctl(mVideoFd, VIDIOC_QBUF, &buf) < 0) {
            LOG_E("放回缓冲区数据失败,错误码:%{public}d", errno);
            return false;
        }
    }
    return true;
}

bool CameraFactoryV4L2Impl::startCameraStream() {
    enum v4l2_buf_type bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(mVideoFd, VIDIOC_STREAMON, &bufType) < 0) {
        LOG_E("视频流启动失败, 错误码: %{public}d", errno);
        return false;
    }
    return true;
}

bool CameraFactoryV4L2Impl::stopPreview() {
    if (mIsPreviewRunning) {
        mIsPreviewRunning = false;
        LOG_D("开始停止线程。。。");
        enum v4l2_buf_type bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if (ioctl(mVideoFd, VIDIOC_STREAMOFF, &bufType) < 0) {
            LOG_E("停止视频流失败, 错误码: %{public}d", errno);
        }
        pthread_cond_signal(&captureCond);
        // 使用pthread_kill来检查线程是否存活
        if (pthread_kill(captureThread, 0) == ESRCH || pthread_join(captureThread, nullptr) != 0) {
            LOG_E("captureThread::当前线程已经结束，或者等待结束线程失败");
        }
        pthread_cond_signal(&callbackCond);
        // 使用pthread_kill来检查线程是否存活
        if (pthread_kill(callbackThread, 0) == ESRCH || pthread_join(callbackThread, nullptr) != 0) {
            LOG_E("callbackThread::当前线程已经结束，或者等待结束线程失败");
        }
        LOG_D("停止线程结束。。。");
    }
    cleanup_buffers();
    clearCallbackFrame();
    clearCaptureFrame();
    return true;
}

void CameraFactoryV4L2Impl::cleanup_buffers() {
    if (captureBufferLength > 0 && captureBuffers) {
        for (int i = 0; i < captureBufferLength; i++) {
            munmap(captureBuffers[i].start, captureBuffers[i].length);
        }
        delete[] captureBuffers;
    }
    // VIDIOC_REQBUFS: 清除buffer
    v4l2_requestbuffers req_buffers{};
    req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // 支持的设备视频输入类型
    req_buffers.memory = V4L2_MEMORY_MMAP;
    req_buffers.count = 0;
    ioctl(mVideoFd, VIDIOC_REQBUFS, &req_buffers);
    captureBuffers = nullptr;
    captureBufferLength = 0;
}

char *CameraFactoryV4L2Impl::getParameter(int type) { return nullptr; }

char *CameraFactoryV4L2Impl::getSupportParameters(int type) {
    if (mVideoFd == -1) {
        return nullptr;
    }
    if (type == CAMERA_PARAMETER_PREVIEW_SIZE) {
        return VideoV4L2Utils::getSupportPreviewSize(mVideoFd);
    }
    return nullptr;
}

void CameraFactoryV4L2Impl::setPreviewDataListener(napi_env env, napi_value value, int mode) {}


void *CameraFactoryV4L2Impl::capture_thread_func(void *vptr_args) {
    CameraFactoryV4L2Impl *cameraFactory = static_cast<CameraFactoryV4L2Impl *>(vptr_args);
    struct v4l2_buffer buf;
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;
    while (cameraFactory->mIsPreviewRunning && cameraFactory->mVideoFd != -1) {
        // 从缓冲区队列中获取帧
        if (ioctl(cameraFactory->mVideoFd, VIDIOC_DQBUF, &buf) < 0) {
            LOG_E("从缓冲区队列中获取帧失败, 错误码: %{public}d", errno);
            continue;
        }
        if (!(buf.flags & V4L2_BUF_FLAG_ERROR)) {
            if (cameraFactory->captureBuffers && buf.index >= 0 && buf.index < cameraFactory->captureBufferLength) {
                uint8_t *frameData = (uint8_t *)cameraFactory->captureBuffers[buf.index].start;
                if (frameData) {
//                     cameraFactory->putCaptureFrame(frameData, buf.bytesused);
                    cameraFactory->drawFrame(frameData, buf.bytesused); // 绘制
                }
            }
        } else {
            LOG_E("当前获取到的帧数据错误,flags:%{public}d 错误码: %{public}d", buf.flags, errno);
        }
        // 处理完毕后，将缓冲区重新放回队列
        if (ioctl(cameraFactory->mVideoFd, VIDIOC_QBUF, &buf) < 0) {
            LOG_E("将缓冲区放回队列失败, 错误码: %{public}d", errno);
        }
    }
    return nullptr;
}

void CameraFactoryV4L2Impl::drawFrame(uint8_t *frame, int length) {
    auto outImg = ImgUtils::any2BGR(frame, captureFormat, length, captureWidth, captureHeight);
    if (outImg.empty()) {
        LOG_E("转换的格式错误，是空的数据");
        return;
    }
    // 释放源数据
    int data_bytes = outImg.total() * outImg.elemSize();
    LOG_E("转换后的大小，%{public}d", data_bytes);
    if (outImg.data && data_bytes > 0) {
        pthread_mutex_lock(&captureMutex);
        {
            PluginWindowRender *render = loadRender();
            if (render && frame) {
                render->drawFrame(outImg.data, data_bytes, captureWidth, captureHeight);
            }
        }
        pthread_mutex_unlock(&captureMutex);
    }
}

void *CameraFactoryV4L2Impl::callback_thread_func(void *vptr_args) { pthread_exit(nullptr); }

void CameraFactoryV4L2Impl::clearCaptureFrame() {}

void CameraFactoryV4L2Impl::clearCallbackFrame() {}

void CameraFactoryV4L2Impl::preview_thread_call(napi_env env, napi_value jsCb, void *context, void *data) {}

