#include "camera.h"

#include <QThread>
#include <QBuffer>

#include "rga/RgaUtils.h"
#include "rga/RgaApi.h"
#include "drm/drm_fourcc.h"

#include "rga/rga.h"

#include <stdlib.h>

#include "utils/image_drawing.h"





camera::camera(QString url,QObject *parent) :QObject(parent) {
    this->url = url;
    this->fmtCtx = nullptr;     // 输入格式上下文
    this->codecCtx = nullptr;   // 解码器上下文
    this->codec = nullptr;      // 解码器
    this->hwDeviceCtx = nullptr;// 硬件设备上下文
    this->videoStreamIdx = -1;  // 视频流索引

    //SWS上下文

    // 分配数据包和帧内存
    this->pkt = nullptr;        // 存储压缩数据
    this->drmFrame = nullptr;    // 存储硬件解码帧（drm内存）
    // this->drmPrt = nullptr;    // drm帧的描述符指针

    this->running = true;
    this->exitSuccess = false;
    this->isGetFrame = false;

    this->dst_buf = nullptr;
    this->dst_buf_size = 0;

    std::string model_path = "/home/cat/c_lib/rknn_model_zoo/examples/yolov8/model/rknn_yolov8_guanfang.rknn";

    this->test_yolov8 = new rknn::yolov8::yolov8(80);
    // test_yolov8->init_common_yolov8_model(model_path);
    this->test_yolov8->init_zero_copy_yolov8_model(model_path);
    this->is_init_yolov8 = true;
}


camera::~camera() {

    // 资源清理


    if (this->pkt != nullptr) {
        av_packet_free(&this->pkt);        // 释放数据包
        //qDebug() << "pkt";
        this->pkt = nullptr;
    }


    if (this->drmFrame != nullptr) {
        av_frame_free(&this->drmFrame);    // 释放硬件帧
        //qDebug() << "hwFrame";
        this->drmFrame = nullptr;
    }


    if (this->codecCtx != nullptr) {
        avcodec_free_context(&this->codecCtx);    // 释放解码器上下文
        //qDebug() << "codecCtx";
        this->codecCtx = nullptr;
    }


    if (this->fmtCtx != nullptr) {
        avformat_close_input(&this->fmtCtx);     // 关闭输入流
        //qDebug() << "fmtCtx";
        this->fmtCtx = nullptr;
    }


    if (this->hwDeviceCtx != nullptr) {
        av_buffer_unref(&this->hwDeviceCtx);    // 释放硬件设备上下文
        //qDebug() << "hwDeviceCtx";
        this->hwDeviceCtx = nullptr;
    }

    if (rga_manager) {
        rga_manager->release();
    }
    if (dst_buf) {
        free(dst_buf);
    }

    if (test_yolov8) {
        test_yolov8->release_common_yolov8_model();
        delete test_yolov8;
        test_yolov8 = nullptr;
    }

    this->running = false;
    this->isGetFrame = false;


}

void camera::initCamera() {
    // 初始化网络库
    avformat_network_init();
    // 创建RKMPP硬件设备上下文
    if (av_hwdevice_ctx_create(&this->hwDeviceCtx, AV_HWDEVICE_TYPE_RKMPP, nullptr, nullptr, 0) < 0) {
        qDebug() << "无法创建RKMPP硬件上下文";
        emit this->connFailMsg("无法创建RKMPP硬件上下文");
        return;
    }
    // 设置输入流参数
    AVDictionary* opts = nullptr;
    av_dict_set(&opts, "rtsp_transport", "tcp", 0);  // 强制使用TCP/UDP协议
    // av_dict_set(&opts, "buffer_size", "425984", 0);  // 16MB缓冲区[1,4](@ref)
    // av_dict_set(&opts, "max_delay", "100000", 0);      // 0.1秒延迟阈值[1](@ref)
    // av_dict_set(&opts, "rtbufsize", "20000000", 0);    // 20MB实时缓冲区[9](@ref)
    // av_dict_set(&opts, "fflags", "nobuffer", 0);       // 禁用内部缓冲[9](@ref)
    // av_dict_set(&opts, "timeout", "3000000", 0);       // 3秒超时[1](@ref)
    // av_dict_set(&opts, "pkt_size", "1472", 0);         // 避免IP分片[4](@ref)

    qDebug() << this->url;
    // 打开输入流
    if (avformat_open_input(&this->fmtCtx, this->url.toUtf8().constData(), nullptr, &opts) != 0) {
        av_dict_free(&opts);
        emit this->connFailMsg("打开输入流失败");
        return;
    }
    av_dict_free(&opts);  // 释放参数字典

    // 获取流信息
    if (avformat_find_stream_info(this->fmtCtx, nullptr) < 0) {
        avformat_close_input(&this->fmtCtx);
        this->fmtCtx = nullptr;
        qDebug() << "无法获取流信息";
        emit this->connFailMsg("无法获取流信息");
        return;
    }

    // 查找视频流索引
    for (unsigned int i = 0; i < this->fmtCtx->nb_streams; i++) {
        if (this->fmtCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            this->videoStreamIdx = i;
            break;
        }
    }

    if (this->videoStreamIdx == -1) {
        avformat_close_input(&this->fmtCtx);
        this->fmtCtx = nullptr;
        qDebug() << "未找到视频流";
        emit this->connFailMsg("未找到视频流");
        return;
    }

    // 初始化解码器（使用rkmpp硬件解码）
    this->codec = avcodec_find_decoder_by_name("h264_rkmpp");
    if (!this->codec) {
        avformat_close_input(&this->fmtCtx);
        this->fmtCtx = nullptr;
        qDebug() << "找不到rkmpp解码器";
        emit this->connFailMsg("找不到rkmpp解码器");
        return;
    }

    // 创建解码器上下文
    // 使用CUDA的解码器创建解码器上下文
    this->codecCtx = avcodec_alloc_context3(this->codec);
    // 将流参数复制到解码器上下文
    avcodec_parameters_to_context(this->codecCtx, this->fmtCtx->streams[this->videoStreamIdx]->codecpar);
    // 绑定硬件设备上下文
    // 这里也就是把解码器上下文绑定到CUDA的硬件上下文
    this->codecCtx->hw_device_ctx = av_buffer_ref(this->hwDeviceCtx);
    // 用解码器创建出一个解码器上下文，然后解码器上下文，绑定到硬件设备上，让硬件进行解码

    // 打开解码器
    if (avcodec_open2(this->codecCtx, this->codec, nullptr) < 0) {
        avcodec_free_context(&this->codecCtx);
        this->codecCtx = nullptr;

        avformat_close_input(&this->fmtCtx);
        this->fmtCtx = nullptr;
        qDebug() << "无法打开解码器";
        emit this->connFailMsg("无法打开解码器");
        return;
    }


    // 主解码循环
    while (this->running) {

        // 分配数据包和帧内存
        this->pkt = av_packet_alloc();      // 存储压缩数据
        this->drmFrame = av_frame_alloc();   // 存储硬件解码帧（drm内存）
        // this->swFrame = av_frame_alloc();   // 存储软件帧（CPU内存）

        // 读取数据包
        if (av_read_frame(this->fmtCtx, this->pkt) < 0) {
            qDebug() << "读取数据包失败";
            av_packet_unref(this->pkt);  // 释放数据包引用
            this->pkt = nullptr;
            emit this->connFailMsg("读取数据包失败");
            continue;
        }

        // 处理视频流数据
        if (this->pkt->stream_index == this->videoStreamIdx) {
            // 发送数据包到解码器
            if (avcodec_send_packet(this->codecCtx, this->pkt) == 0) {
                // 接收解码后的帧
                while (avcodec_receive_frame(this->codecCtx, this->drmFrame) == 0) {
                    this->initRgaHandle();
                    this->checkFrameSize();
                    // 将帧从GPU内存传输到CPU内存
                    // 因为这个时候的hwFrame指向的是GPU的显存，新分配的swFrame指向的是CPU的内存，因此这里需要把GPU的数据传输到CPU的内存中，才能进行后续的处理
                    // if (av_hwframe_transfer_data(this->swFrame, this->hwFrame, 0) < 0) {
                    //     qDebug()<<"帧传输失败";
                    //     // emit this->connFailMsg("帧传输失败");
                    //     continue;
                    // }

                    // 检查解码后的帧格式
                    // qDebug() << "解码后的帧格式：" << swFrame->format;

                    // 检查指针有效性
                    // if (this->swFrame->data[0] == nullptr) {
                    //     qDebug() << "图像指针无效";
                    //     continue;
                    // }
                    // 确保RGA资源正确初始化
                    if (!initRgaHandle() || !checkFrameSize()) {
                        qWarning("RGA初始化失败");
                        continue;
                    }

                    const char *out_buf = nullptr;
                    int out_width,out_height = 0;

                    bool is_success = this->yuv420ToRgb(this->drmFrame,&out_buf,&out_width,&out_height);
                    if (is_success == true) {
                        // 预测，预测完之后再传到opengl
                        // if (is_init_yolov8) {
                        //     rknn::utils::image_buffer_t src_image; // 图像缓冲区结构体
                        //     memset(&src_image, 0, sizeof(rknn::utils::image_buffer_t)); // 初始化为 0
                        //     src_image.width = out_width;
                        //     src_image.height = out_height;
                        //     src_image.size = out_width * out_height * 3;

                        //     src_image.virt_addr = reinterpret_cast<unsigned char*>(const_cast<char*>(out_buf));

                        //     src_image.format = rknn::utils::image_format_t::IMAGE_FORMAT_RGB888;
                        //     // test_yolov8->inference_common_yolov8_model(&src_image,0.5);
                        //     test_yolov8->inference_zero_copy_yolov8_model(&src_image,0.5,0.45);
                        //     rknn::yolov8::yolov8::object_detect_result_list det_result = test_yolov8->get_det_object();
                        //     std::vector<QRectF> detections;
                        //     for (int i = 0; i < det_result.count; i++) {
                        //         rknn::yolov8::yolov8::object_detect_result* det_data = &(det_result.results_list[i]);

                        //         // int x1 = det_data->box.left;
                        //         // int y1 = det_data->box.top;
                        //         // int x2 = det_data->box.right;
                        //         // int y2 = det_data->box.bottom;
                        //         // rknn::utils::draw_rectangle(&src_image, x1, y1, x2 - x1, y2 - y1, 0xFF0000FF, 3); // 画框
                        //         detections.emplace_back(
                        //             det_data->box.left,
                        //             det_data->box.top,
                        //             det_data->box.right - det_data->box.left,
                        //             det_data->box.bottom - det_data->box.top
                        //             );
                        //     }
                        //     emit detectionsReady(detections);
                        //     // out_buf = (const char *)src_image.virt_addr;
                        // }
                        emit frameReady(out_buf,out_width,out_height); // 深拷贝确保数据安全
                        // 发送帧到推理线程（零拷贝）
                        if (this->is_init_yolov8) {
                            rknn::utils::image_buffer_t src_image;
                            src_image.width = out_width;
                            src_image.height = out_height;
                            src_image.size = out_width * out_height * 3;
                            src_image.virt_addr = reinterpret_cast<unsigned char*>(const_cast<char*>(out_buf));
                            src_image.format = rknn::utils::image_format_t::IMAGE_FORMAT_RGB888;

                            // 异步推理
                            std::async(std::launch::async, [this, &src_image]() {
                                test_yolov8->inference_zero_copy_yolov8_model(&src_image, 0.5, 0.45);
                                auto det_result = test_yolov8->get_det_object();

                                std::vector<QRectF> detections;
                                for (int i = 0; i < det_result.count; i++) {
                                    auto &det_data = det_result.results_list[i];
                                    detections.emplace_back(
                                        det_data.box.left,
                                        det_data.box.top,
                                        det_data.box.right - det_data.box.left,
                                        det_data.box.bottom - det_data.box.top
                                        );
                                }

                                // 发送检测结果到OpenGL线程
                                emit detectionsReady(detections);
                            });
                        }
                    }

                    this->isGetFrame = true;
                }
            }
        }

        if (this->pkt != nullptr) {
            av_packet_free(&this->pkt);        // 释放数据包
            //qDebug() << "pkt";
            this->pkt = nullptr;
        }

        if (this->drmFrame != nullptr) {
            av_frame_free(&this->drmFrame);    // 释放硬件帧
            //qDebug() << "drmFrame";
            this->drmFrame = nullptr;
        }



    }
    this->exitSuccess = true;
    emit clearFrame();
    //qDebug() << "退出成功";
    return;
}

void camera::closeCamera()
{
    this->running = false;

    if (this->isGetFrame == true) {
        while (!this->exitSuccess) {
            QThread::usleep(1);
        }
    }

}


bool camera::yuv420ToRgb(AVFrame* frame, const char** out_buf, int* out_width, int* out_height)
{
    std::lock_guard<std::mutex> lock(rga_mutex);

    if (!rga_manager) {
        return false;
    }

    IM_STATUS ret = imcvtcolor(*rga_manager->getSrcBuffer(),
                               *rga_manager->getDstBuffer(),
                               RK_FORMAT_YCbCr_420_SP,
                               RK_FORMAT_RGB_888,
                               1);

    *out_buf = dst_buf;
    *out_width = frame->width;
    *out_height = frame->height;

    return ret == IM_STATUS_SUCCESS;

}

bool camera::initRgaHandle()
{

    std::lock_guard<std::mutex> lock(rga_mutex);

    if (!rga_manager) {
        rga_manager = std::make_unique<RgaResourceManager>();
    }

    if (drmFrame->format != AV_PIX_FMT_DRM_PRIME) {
        return false;
    }

    AVDRMFrameDescriptor* desc = reinterpret_cast<AVDRMFrameDescriptor*>(drmFrame->data[0]);
    if (!desc || desc->nb_objects < 1 || desc->objects[0].fd <= 0) {
        return false;
    }

    return rga_manager->initialize(desc->objects[0].fd,
                                   drmFrame->width,
                                   drmFrame->height,
                                   dst_buf);

}




bool camera::checkFrameSize()
{
    std::lock_guard<std::mutex> lock(rga_mutex);

    if (drmFrame->width != frameWidth || drmFrame->height != frameHeight) {
        frameWidth = drmFrame->width;
        frameHeight = drmFrame->height;

        // 重新分配内存
        if (dst_buf) {
            free(dst_buf);
        }

        dst_buf_size = frameWidth * frameHeight * 3;
        dst_buf = static_cast<char*>(aligned_alloc(64, dst_buf_size));
        if (!dst_buf) {
            return false;
        }

        // 重新初始化RGA资源
        if (rga_manager) {
            AVDRMFrameDescriptor* desc = reinterpret_cast<AVDRMFrameDescriptor*>(drmFrame->data[0]);
            if (desc && desc->nb_objects >= 1 && desc->objects[0].fd > 0) {
                return rga_manager->initialize(desc->objects[0].fd,
                                               frameWidth,
                                               frameHeight,
                                               dst_buf);
            }
        }
    }
    return true;
}
