#include "mainwindow.h"
#include "ui_mainwindow.h" 
#include "ui_imageviewer.h" 
#include "ui_videoplayer.h"
#include  <QImage>
#include  <QDebug>
#include  <QDateTime>
#include  <QThread>
#include  <QDir>
#include <QFileDialog>
#include "v4l2.h"
#include "mpp.h"
#include <alibabacloud/oss/OssClient.h>
#include <QPropertyAnimation>
#include <QLabel>

#define OSS_OPEN 0

extern "C"{
    #include <stdlib.h>
    #include <stdio.h>
    #include <string.h>
    #include <math.h>
    #include <time.h>
    #include <libavutil/avassert.h>
    #include <libavutil/channel_layout.h>
    #include <libavutil/opt.h>
    #include <libavutil/mathematics.h>
    #include <libavutil/timestamp.h>
    #include <libavformat/avformat.h>
    #include <libswscale/swscale.h>
    #include <libswresample/swresample.h>
    #include <stdio.h>
    #include <sys/ioctl.h>
    #include <linux/videodev2.h>
    #include <string.h>
    #include <sys/types.h>
    #include <sys/stat.h>
    #include <fcntl.h>
    #include <sys/mman.h>
    #include <poll.h>
    #include <stdlib.h>
    #include <stdio.h>
    #include <sys/types.h>
    #include <sys/socket.h>
    #include <arpa/inet.h>
    #include <pthread.h>
    #include <signal.h>
    #include <unistd.h>
    #include <string.h>
    #include <stdio.h>
}

//Yolo
static const unsigned char colors[19][3] = {
    {54, 67, 244}, {99, 30, 233}, {176, 39, 156}, {183, 58, 103}, {181, 81, 63},
    {243, 150, 33}, {244, 169, 3}, {212, 188, 0}, {136, 150, 0}, {80, 175, 76},
    {74, 195, 139}, {57, 220, 205}, {59, 235, 255}, {7, 193, 255}, {0, 152, 255},
    {34, 87, 255}, {72, 85, 121}, {158, 158, 158}, {139, 125, 96}
};


using namespace AlibabaCloud::OSS;

MainWindow::MainWindow(QWidget *parent)
    : QMainWindow(parent)
    , ui(new Ui::MainWindow)
    , timer(new QTimer(this))
    , isRecording(false)
    , isStreaming(false)
    , isYoloRunning(false)
    , isCameraActive(true)
    , yolo_initialized(false)
    , decoderThread(new VideoDecoderThread(this))
{
    //初始化主界面
    ui->setupUi(this);
    
    // 初始化图片查看界面
    QWidget *imageViewerWidget = new QWidget(this);
    imageUi = new Ui::ImageViewer;
    imageUi->setupUi(imageViewerWidget);
    // 获取并设置三个标签
    prevImageLabel = imageUi->prevImageLabel;
    currentImageLabel = imageUi->currentImageLabel;
    nextImageLabel = imageUi->nextImageLabel;

    // 初始化视频播放界面
    QWidget *videoPlayerWidget = new QWidget(this);
    videoUi = new Ui::VideoPlayer;
    videoUi->setupUi(videoPlayerWidget);
    videoLabel = videoUi->videoLabel;

    // 设置 QStackedWidget
    stackedWidget = new QStackedWidget(this);
    stackedWidget->addWidget(ui->centralwidget); // 索引 0：摄像头视图
    stackedWidget->addWidget(imageViewerWidget); // 索引 1：图片查看
    stackedWidget->addWidget(videoPlayerWidget); // 索引 2：视频播放
    setCentralWidget(stackedWidget);

    //启用触摸事件 WA_AcceptTouchEvents
    //主界面相关
    setAttribute(Qt::WA_AcceptTouchEvents);
    ui->label->setAttribute(Qt::WA_AcceptTouchEvents);
    ui->snapshotButton->setAttribute(Qt::WA_AcceptTouchEvents);
    ui->recordButton->setAttribute(Qt::WA_AcceptTouchEvents);
    ui->stopButton->setAttribute(Qt::WA_AcceptTouchEvents);
    ui->startStreamButton->setAttribute(Qt::WA_AcceptTouchEvents);
    ui->stopStreamButton->setAttribute(Qt::WA_AcceptTouchEvents);
    ui->startYoloButton->setAttribute(Qt::WA_AcceptTouchEvents);
    ui->stopYoloButton->setAttribute(Qt::WA_AcceptTouchEvents);

    //查看图片相关
    ui->selectImageButton->setAttribute(Qt::WA_AcceptTouchEvents);
    prevImageLabel->setAttribute(Qt::WA_AcceptTouchEvents);
    currentImageLabel->setAttribute(Qt::WA_AcceptTouchEvents);
    nextImageLabel->setAttribute(Qt::WA_AcceptTouchEvents);
    imageUi->prevButton->setAttribute(Qt::WA_AcceptTouchEvents);
    imageUi->nextButton->setAttribute(Qt::WA_AcceptTouchEvents);
    imageUi->returnButton->setAttribute(Qt::WA_AcceptTouchEvents);

    //播放视频相关
    ui->selectVideoButton->setAttribute(Qt::WA_AcceptTouchEvents);
    videoLabel->setAttribute(Qt::WA_AcceptTouchEvents);

    videoUi->playButton->setAttribute(Qt::WA_AcceptTouchEvents);
    videoUi->pauseButton->setAttribute(Qt::WA_AcceptTouchEvents);
    videoUi->stopButton->setAttribute(Qt::WA_AcceptTouchEvents);
    videoUi->returnButton->setAttribute(Qt::WA_AcceptTouchEvents);
    videoUi->prevVideoButton->setAttribute(Qt::WA_AcceptTouchEvents);
    videoUi->nextVideoButton->setAttribute(Qt::WA_AcceptTouchEvents);

    // 设置按钮样式，包括启用、禁用、悬停和按下状态
    QString buttonStyle = "QPushButton {"
                         "    background-color: rgb(119, 160, 210);"
                         "    color: white;"
                         "    border-radius: 5px;"
                         "    border: 1px solid rgb(90, 120, 160);"
                         "    font-size: 12px;"
                         "    font-weight: bold;"
                         "}"
                         "QPushButton:disabled {"
                         "    background-color: rgb(150, 150, 150);"
                         "    color: rgb(200, 200, 200);"
                         "    border: 1px solid rgb(120, 120, 120);"
                         "}"
                         "QPushButton:hover {"
                         "    background-color: rgb(100, 140, 190);"
                         "}"
                         "QPushButton:pressed {"
                         "    background-color: rgb(80, 120, 170);"
                         "}";
    //主界面相关
    ui->snapshotButton->setStyleSheet(buttonStyle);
    ui->recordButton->setStyleSheet(buttonStyle);
    ui->stopButton->setStyleSheet(buttonStyle);
    ui->startStreamButton->setStyleSheet(buttonStyle);
    ui->stopStreamButton->setStyleSheet(buttonStyle);
    ui->startYoloButton->setStyleSheet(buttonStyle);
    ui->stopYoloButton->setStyleSheet(buttonStyle);
    ui->selectImageButton->setStyleSheet(buttonStyle);
    ui->selectVideoButton->setStyleSheet(buttonStyle);

    //图片界面相关
    imageUi->prevButton->setStyleSheet(buttonStyle);
    imageUi->nextButton->setStyleSheet(buttonStyle);
    imageUi->returnButton->setStyleSheet(buttonStyle);

    //视频播放相关
    videoUi->playButton->setStyleSheet(buttonStyle);
    videoUi->pauseButton->setStyleSheet(buttonStyle);
    videoUi->stopButton->setStyleSheet(buttonStyle);
    videoUi->returnButton->setStyleSheet(buttonStyle);
    videoUi->prevVideoButton->setStyleSheet(buttonStyle);
    videoUi->nextVideoButton->setStyleSheet(buttonStyle);

    QDir pictureDir("/home/orangepi/saved_picture");
    if (!pictureDir.exists()) {
        if (pictureDir.mkpath("/home/orangepi/saved_picture")) {
            qDebug() << "创建目录：/home/orangepi/saved_picture";
        } else {
            qDebug() << "无法创建目录：/home/orangepi/saved_picture";
        }
    } else {
        qDebug() << "目录已存在：/home/orangepi/saved_picture";
    }

    QDir videoDir("/home/orangepi/saved_video");
    if(!videoDir.exists()){
        videoDir.mkpath("/home/orangepi/saved_video");
    }
    if( v4l2_init(v4l2_ctx) == -1 ){
        qDebug() << "Initialization v4l2  failed";
        exit(-1);
    }

    //initial YOLOv11
    const char* model_path = "./yolo11.rknn"; // 替换为你的模型路径
    init_post_process();
    if (init_yolo11_model(model_path, &rknn_app_ctx) != 0) {
        qDebug() << "Failed to initialize YOLOv11 model:" << model_path;
    } else {
        yolo_initialized = true;
        qDebug() << "YOLOv11 model initialized successfully";
    }

    updateButtonStates();

    // Set up timer for frame capture (approx. 25 FPS)
    connect(timer, &QTimer::timeout, this, &MainWindow::captureAndDisplayFrame);
    timer->start(1000 / FPS);

    // 连接主界面信号槽，使用 Qt::UniqueConnection 防止重复连接
    connect(ui->snapshotButton, &QPushButton::clicked, this, &MainWindow::snapshot, Qt::UniqueConnection);
    connect(ui->recordButton, &QPushButton::clicked, this, &MainWindow::startRecording, Qt::UniqueConnection);
    connect(ui->stopButton, &QPushButton::clicked, this, &MainWindow::stopRecording, Qt::UniqueConnection);
    connect(ui->startStreamButton, &QPushButton::clicked, this, &MainWindow::startStreaming, Qt::UniqueConnection);
    connect(ui->stopStreamButton, &QPushButton::clicked, this, &MainWindow::stopStreaming, Qt::UniqueConnection);
    connect(ui->startYoloButton, &QPushButton::clicked, this, &MainWindow::startYolo, Qt::UniqueConnection);
    connect(ui->stopYoloButton, &QPushButton::clicked, this, &MainWindow::stopYolo, Qt::UniqueConnection);
    connect(ui->selectImageButton, &QPushButton::clicked, this, &MainWindow::selectImage, Qt::UniqueConnection);
    connect(ui->selectVideoButton, &QPushButton::clicked, this, &MainWindow::selectVideo, Qt::UniqueConnection);


    // 连接图片查看界面信号槽
    connect(imageUi->prevButton, &QPushButton::clicked, this, &MainWindow::showPreviousImage, Qt::UniqueConnection);
    connect(imageUi->nextButton, &QPushButton::clicked, this, &MainWindow::showNextImage, Qt::UniqueConnection);
    connect(imageUi->returnButton, &QPushButton::clicked, this, &MainWindow::returnToCamera, Qt::UniqueConnection);

    // 连接视频播放界面信号槽
    connect(videoUi->playButton, &QPushButton::clicked, this, &MainWindow::playVideo, Qt::UniqueConnection);
    connect(videoUi->pauseButton, &QPushButton::clicked, this, &MainWindow::togglePause, Qt::UniqueConnection);
    connect(videoUi->stopButton, &QPushButton::clicked, this, &MainWindow::stopVideo, Qt::UniqueConnection);
    connect(videoUi->returnButton, &QPushButton::clicked, this, &MainWindow::returnToMain, Qt::UniqueConnection);
    connect(videoUi->prevVideoButton, &QPushButton::clicked, this, &MainWindow::showPreviousVideo, Qt::UniqueConnection);
    connect(videoUi->nextVideoButton, &QPushButton::clicked, this, &MainWindow::showNextVideo, Qt::UniqueConnection);
    
    // 连接解码线程信号
    connect(decoderThread, &VideoDecoderThread::frameReady, this, &MainWindow::displayFrame);
    connect(decoderThread, &VideoDecoderThread::errorOccurred, this, &MainWindow::handleDecoderError);
    connect(decoderThread, &VideoDecoderThread::playbackFinished, this, &MainWindow::stopVideo);
}

#if OSS
bool MainWindow::uploadToOss(const QString& localFile, const QString& objectKey){
    // OSS 客户端配置
    std::string accessKeyId = "LTAI5tQkVXZi4F1nUNdLTBcZ"; // 替换为你的 AccessKey ID
    std::string accessKeySecret = "WfduboKKRv0rdgbEKsdEKljvK8ujMa"; // 替换为你的 AccessKey Secret
    std::string endpoint = "oss-cn-beijing.aliyuncs.com"; // 替换为你的存储桶区域 endpoint
    std::string bucketName = "rk3566-picture-video"; // 替换为你的存储桶名称

    ClientConfiguration conf;
    OssClient client(endpoint, accessKeyId, accessKeySecret, conf);

    // 上传文件
    auto outcome = client.PutObject(bucketName, objectKey.toStdString(), localFile.toStdString());

    if (outcome.isSuccess()) {
        qDebug() << "File uploaded to OSS:" << objectKey;
        return true;
    } else {
        qDebug() << "Upload failed:" << QString::fromStdString(outcome.error().Message());
        return false;
    }
}
#endif

MainWindow::~MainWindow()
{
    timer->stop();
    stopRecording();
    stopStreaming();
    stopVideo();
    stopYolo();
    if (yolo_initialized) {
        release_yolo11_model(&rknn_app_ctx);
        deinit_post_process();
        yolo_initialized = false;
    }
    v4l2_cleanup(v4l2_ctx);
    mpp_cleanup();
    delete decoderThread;
    delete ui;
    delete imageUi;
    delete videoUi;
    delete stackedWidget;
}

//触摸事件发生时： Qt 事件系统调用 MainWindow::event()。
bool MainWindow::event(QEvent *event){
    if (event->type() == QEvent::TouchBegin ||
        event->type() == QEvent::TouchUpdate ||
        event->type() == QEvent::TouchEnd) {
        QTouchEvent *touchEvent = static_cast<QTouchEvent*>(event);
        handleTouchEvent(touchEvent);
        return true;
    }
    return QMainWindow::event(event);
}

void MainWindow:: handleTouchEvent(QTouchEvent *event)
{
    static QTime lastTouchTime = QTime::currentTime();
    static const int debounceMs = 500; // 防抖时间 500ms

    // 仅处理 TouchEnd 事件，避免重复触发
    if (event->type() != QEvent::TouchEnd) {
        return;
    }

    // 防抖：忽略短时间内的重复触摸
    QTime currentTime = QTime::currentTime();
    if (lastTouchTime.msecsTo(currentTime) < debounceMs) {
        qDebug() << "忽略重复触摸事件，时间间隔：" << lastTouchTime.msecsTo(currentTime) << "ms";
        return;
    }
    lastTouchTime = currentTime;

    // QString statusText = QString("触摸结束：%1个触点").arg(event->touchPoints().count());
    const QList<QTouchEvent::TouchPoint> touchPoints = event->touchPoints();
    // 处理单点触摸，检查是否触摸到按钮区域
    if(touchPoints.count() == 1){
        const QTouchEvent::TouchPoint &point = touchPoints.first();
        QPointF rawPos = point.pos();
        

        // 触摸屏范围（基于图像数据）
        const float touchMinX = 0.0, touchMaxX = 95.0; // 触摸屏X范围
        const float touchMinY = 0.0, touchMaxY = 70.0; // 触摸屏Y范围
        const float screenWidth = 480.0, screenHeight = 320.0;

        // 触摸屏Y -> 屏幕X（正向）
        float mappedX = (rawPos.y() / touchMaxY) * screenWidth;
        // 触摸屏X -> 屏幕Y（反转）
        float mappedY = ((touchMaxX - rawPos.x()) / touchMaxX) * screenHeight;
        
        QPointF pos(mappedX, mappedY);
        qDebug() << "原始触摸坐标：" << rawPos;
        qDebug() << "映射后触摸坐标：" << pos;

        if (stackedWidget->currentIndex() == 0) { // 摄像头界面
            qDebug() << "拍照按钮区域：" << ui->snapshotButton->geometry();
            qDebug() << "开始录制按钮区域：" << ui->recordButton->geometry();
            qDebug() << "停止录制按钮区域：" << ui->stopButton->geometry();
            qDebug() << "开始推流按钮区域：" << ui->startStreamButton->geometry();
            qDebug() << "停止推流按钮区域：" << ui->stopStreamButton->geometry();
            qDebug() << "开始物体识别按钮区域：" << ui->startYoloButton->geometry();
            qDebug() << "停止物体识别按钮区域：" << ui->stopYoloButton->geometry();
            qDebug() << "选择图片按钮区域：" << ui->selectImageButton->geometry();
            qDebug() << "选择视频按钮区域：" << ui->selectVideoButton->geometry();

            // 检查触摸点是否在按钮区域内
            if (ui->snapshotButton->geometry().contains(pos.toPoint()) && ui->snapshotButton->isEnabled()) {
                snapshot();
                 // statusText += " - 触发拍照";
            } else if (ui->recordButton->geometry().contains(pos.toPoint()) && ui->recordButton->isEnabled()) {
                startRecording();
                 // statusText += " - 触发开始录制";
            } else if (ui->stopButton->geometry().contains(pos.toPoint()) && ui->stopButton->isEnabled()) {
                stopRecording();
                 // statusText += " - 触发停止录制";
            } else if (ui->startStreamButton->geometry().contains(pos.toPoint()) && ui->startStreamButton->isEnabled()) {
                startStreaming();
                 // statusText += " - 触发开始推流";
            } else if (ui->stopStreamButton->geometry().contains(pos.toPoint()) && ui->stopStreamButton->isEnabled()) {
                stopStreaming();
                 // statusText += " - 触发停止推流";
            } else if (ui->startYoloButton->geometry().contains(pos.toPoint()) && ui->startYoloButton->isEnabled()) {
                startYolo();
                 // statusText += " - 触发开始物体识别";
            } else if (ui->stopYoloButton->geometry().contains(pos.toPoint()) && ui->stopYoloButton->isEnabled()) {
                stopYolo();
                 // statusText += " - 触发停止物体识别";
            } else if (ui->selectImageButton->geometry().contains(pos.toPoint()) && ui->selectImageButton->isEnabled()) {
                selectImage();
                 // statusText += " - 触发选择图片";
            } else if (ui->selectVideoButton->geometry().contains(pos.toPoint()) && ui->selectVideoButton->isEnabled()) {
                selectVideo();
                //statusText += " - 触发选择视频";
            }
        } else if (stackedWidget->currentIndex() == 1) {  //图片查看界面
            qDebug() << "上一张按钮区域：" << imageUi->prevButton->geometry();
            qDebug() << "下一张按钮区域：" << imageUi->nextButton->geometry();
            qDebug() << "返回按钮区域：" << imageUi->returnButton->geometry();

            if (imageUi->prevButton->geometry().contains(pos.toPoint()) && imageUi->prevButton->isEnabled()) {
                showPreviousImage();
                 // statusText += " - 触发上一张";
            } else if (imageUi->nextButton->geometry().contains(pos.toPoint()) && imageUi->nextButton->isEnabled()) {
                showNextImage();
                 // statusText += " - 触发下一张";
            } else if (imageUi->returnButton->geometry().contains(pos.toPoint()) && imageUi->returnButton->isEnabled()) {
                returnToCamera();
                 // statusText += " - 触发返回";
            }
        } else if (stackedWidget->currentIndex() == 2) {  //视频播放界面
            qDebug() << "播放按钮区域：" << videoUi->playButton->geometry();
            qDebug() << "暂停按钮区域：" << videoUi->pauseButton->geometry();
            qDebug() << "停止按钮区域：" << videoUi->stopButton->geometry();
            qDebug() << "返回按钮区域：" << videoUi->returnButton->geometry();

            if (videoUi->playButton->geometry().contains(pos.toPoint()) && videoUi->playButton->isEnabled()) {
                playVideo();
                //statusText += " - 触发播放";
            } else if (videoUi->pauseButton->geometry().contains(pos.toPoint()) && videoUi->pauseButton->isEnabled()) {
                togglePause();
                //statusText += " - 触发暂停";
            } else if (videoUi->stopButton->geometry().contains(pos.toPoint()) && videoUi->stopButton->isEnabled()) {
                stopVideo();
                //statusText += " - 触发停止";
            } else if (videoUi->returnButton->geometry().contains(pos.toPoint()) && videoUi->returnButton->isEnabled()) {
                returnToMain();
                //statusText += " - 触发返回";
            } else if (videoUi->prevVideoButton->geometry().contains(pos.toPoint()) && videoUi->prevVideoButton->isEnabled()) {
                showPreviousVideo();
                //statusText += " - 触发播放上一视频";
            } else if (videoUi->nextVideoButton->geometry().contains(pos.toPoint()) && videoUi->nextVideoButton->isEnabled()) {
                showNextVideo();
                //statusText += " - 触发播放下一视频";
            }
        }
    }
    // 更新状态栏显示触摸信息
    // statusBar()->showMessage(statusText);
    // qDebug() << statusText;
}

void MainWindow::updateButtonStates() {
    ui->recordButton->setEnabled(!isRecording);
    ui->stopButton->setEnabled(isRecording);
    ui->startStreamButton->setEnabled(!isStreaming);
    ui->stopStreamButton->setEnabled(isStreaming);
    ui->startYoloButton->setEnabled(!isYoloRunning);
    ui->stopYoloButton->setEnabled(isYoloRunning);
    // 图片查看界面按钮状态
    if (stackedWidget->currentIndex() == 1) { 
        if (imageList.isEmpty() || imageList.size() == 1) {
            imageUi->prevButton->setEnabled(false);
            imageUi->nextButton->setEnabled(false);
        } else {
            imageUi->prevButton->setEnabled(currentImageIndex > 0);
            imageUi->nextButton->setEnabled(currentImageIndex < imageList.size() - 1);
        }
    } 

    //视频播放界面按钮状态
    if (stackedWidget->currentIndex() == 2) { 
        videoUi->playButton->setEnabled(!isVideoPlaying && !videoFilePath.isEmpty());
        videoUi->stopButton->setEnabled(isVideoPlaying);        
        videoUi->pauseButton->setText(isVideoPaused ? "继续" : "暂停");
        videoUi->prevVideoButton->setEnabled(currentVideoIndex > 0 || videoList.size() > 1);
        videoUi->nextVideoButton->setEnabled(currentVideoIndex < videoList.size() - 1 || videoList.size() > 1);
    }
}


void MainWindow::captureAndDisplayFrame(){
    QMutexLocker locker(&mutex);
    
    struct v4l2_buffer buf;
    memset(&buf, 0, sizeof(buf));
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    buf.memory = V4L2_MEMORY_MMAP;
    buf.m.planes = v4l2_ctx.planes.data(); //数组的大小
    buf.length = v4l2_ctx.planes.size();   //平面数组中的元素数

    // 取
    if (ioctl(v4l2_ctx.fd, VIDIOC_DQBUF, &buf) < 0) {
        qDebug() << "Failed to dequeue buffer:" << strerror(errno);
        return;
    }

    // Verify buffer data size
    if (buf.m.planes[0].bytesused != WIDTH * HEIGHT * 2) {
        qDebug() << "Warning: Buffer data size mismatch (" << buf.m.planes[0].bytesused << " bytes, expected " << WIDTH * HEIGHT * 2 << ")";
    }

    // Convert UYVY to RGB for display(qt)
    SwsContext *rgb_sws_ctx = sws_getContext(WIDTH, HEIGHT, AV_PIX_FMT_UYVY422,
                                             WIDTH, HEIGHT, AV_PIX_FMT_RGB24,
                                             SWS_BILINEAR, NULL, NULL, NULL);
    if (!rgb_sws_ctx) {
        qDebug() << "Failed to initialize RGB conversion context";
        return;
    }

    uint8_t *frame_data = static_cast<uint8_t*>(v4l2_ctx.buffers[buf.index].start);
    int src_linesize[1] = { WIDTH * 2 };
    uint8_t *dst_data[1] = { new uint8_t[WIDTH * HEIGHT * 3] };
    int dst_linesize[1] = { WIDTH * 3 };

    //UYVY422 --> RGB24
    sws_scale(rgb_sws_ctx, &frame_data, src_linesize, 0, HEIGHT, dst_data, dst_linesize);

    /*
    用于qt显示
    */
    // Create QImage 对象 from RGB(24) data 
    QImage image(dst_data[0], WIDTH, HEIGHT, QImage::Format_RGB888);//图像数据 宽度 高度 格式
    
    QPainter painter(&image);
    QDateTime now = QDateTime::currentDateTime();
    QString datetime1 = now.toString("yyyy-MM-dd HH:mm:ss");

    // QLabel *timelabel = new QLabel(this);
    // timelabel->setFixedSize(200, 30);
    // timelabel->setText(datetime1);
    // qDebug() << "time" << datetime1;
    painter.setPen(Qt::red);
    painter.drawText(10,20, datetime1);
    // 运行 YOLO 推理
    if (isYoloRunning && yolo_initialized) {
        processYoloInference(frame_data, image);
    }

    //将图像显示在 QLabel 中               将图像缩放到指定的大小  获取 QLabel 的大小 在缩放时保持图像的宽高比
    ui->label->setPixmap(QPixmap::fromImage(image.scaled(ui->label->size(), Qt::KeepAspectRatio)));

    /*
    用于MP4显示
    */
    // Encode and write frame to MP4 if recording
    if (isRecording) {
        encode_and_write_frame(frame_data, frameCount);
    }

    //stream 
    if (isStreaming) {
        encode_and_stream_frame(frame_data, frameCount);
    }

    // Clean up
    sws_freeContext(rgb_sws_ctx);
    delete[] dst_data[0];

    // Re-enqueue
    if (ioctl(v4l2_ctx.fd, VIDIOC_QBUF, &buf) < 0) {
        qDebug() << "Failed to re-enqueue buffer:" << strerror(errno);
        return;
    }
    frameCount++;
}

// yolo  绘制 yolo需要RGB888
void MainWindow::processYoloInference(uint8_t* frame_data, QImage& image)
{
    // 转换为 YOLO 模型输入（RGB888，调整尺寸）
    const int input_width = 320; // 假设模型输入尺寸
    const int input_height = 240;

    SwsContext* sws_ctx = sws_getContext(WIDTH, HEIGHT, AV_PIX_FMT_UYVY422,
        input_width, input_height, AV_PIX_FMT_RGB24,
        SWS_BILINEAR, NULL, NULL, NULL);
    
    if (!sws_ctx) {
        qDebug() << "Failed to initialize YOLO sws context";
        return;
    }

    uint8_t* rgb_data = new uint8_t[input_width * input_height * 3];
    int src_linesize[1] = { WIDTH * 2 };
    int dst_linesize[1] = { input_width * 3 };
    uint8_t* dst_data[1] = { rgb_data };
    sws_scale(sws_ctx, &frame_data, src_linesize, 0, HEIGHT, dst_data, dst_linesize);
    sws_freeContext(sws_ctx);

    //yolo输入
    image_buffer_t src_image;
    memset(&src_image, 0, sizeof(image_buffer_t));
    src_image.width  = input_width;
    src_image.height = input_height;
    src_image.format = IMAGE_FORMAT_RGB888;
    src_image.virt_addr = rgb_data;

    // rknn推理和处理
    object_detect_result_list od_results;
    if (inference_yolo11_model(&rknn_app_ctx, &src_image, &od_results) != 0) {
        qDebug() << "YOLOv11 inference failed";
        delete[] rgb_data;
        return;
    }

    //画框和概率
    QPainter painter(&image);
    int color_index = 0;
    for(int i = 0; i < od_results.count; i++){
        const unsigned char* color = colors[color_index % 19];
        color_index++;
        object_detect_result *det_result = &(od_results.results[i]);
        
        // 缩放边界框到原始图像尺寸
        float scale_x = (float)WIDTH / input_width;
        float scale_y = (float)HEIGHT / input_height;
        int left = det_result->box.left * scale_x;
        int top = det_result->box.top * scale_y;
        int right = det_result->box.right * scale_x;
        int bottom = det_result->box.bottom * scale_y;

        painter.setPen(QPen(QColor(color[0], color[1], color[2]), 2));
        painter.drawRect(left, top, right - left, bottom - top);

        QString text = QString("%1 %2%").arg(coco_cls_to_name(det_result->cls_id)).arg(det_result->prop * 100, 0, 'f', 1);
        QFont font("Arial", 10);
        painter.setFont(font);
        QRect textRect(left, top - 20, 100, 20);
        painter.fillRect(textRect, QColor(color[0], color[1], color[2]));
        painter.setPen(Qt::white);
        painter.drawText(textRect, Qt::AlignLeft | Qt::AlignVCenter, text);

        qDebug() << text << "@ (" << left << "," << top << "," << right << "," << bottom << ")";
    }
}

/*************************************************拍照*******************************************************/

void MainWindow::snapshot(){
    QMutexLocker locker(&mutex);

    struct v4l2_buffer buf;
    memset(&buf, 0, sizeof(buf));
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    buf.memory = V4L2_MEMORY_MMAP;
    buf.m.planes = v4l2_ctx.planes.data();
    buf.length = v4l2_ctx.planes.size();

    //取
    if (ioctl(v4l2_ctx.fd, VIDIOC_DQBUF, &buf) < 0) {
        qDebug() << "Failed to dequeue buffer for snapshot:" << strerror(errno);
        return;
    }

    //本地临时保存
    QDir dir("/home/orangepi/saved_picture");
    QDateTime now = QDateTime::currentDateTime();  // 获取当前时间
    QString timestamp = now.toString("yyyyMMdd_HHmmss");  // 生成时间戳字符串
    
    QString filename = dir.filePath(QString("snapshot_%1.jpg").arg(timestamp));
    //                                    内存映射的缓冲区
    if(save_snapshot(static_cast<uint8_t *>(v4l2_ctx.buffers[buf.index].start), filename.toStdString().c_str()) == 0){
        qDebug() << "Snapshot saved as" << filename;
        #if OSS_OPEN
        // 上传到 OSS
        QString objectKey = QString("saved_picture/snapshot_%1.jpg").arg(timestamp);
        if (uploadToOss(filename, objectKey)) {
            qDebug() << "Snapshot uploaded to OSS:" << objectKey;
        } else {
            qDebug() << "Failed to upload snapshot to OSS";
        }
        #endif
    } else {
        qDebug() << "Failed to save snapshot";
    }

    // Re-enqueue buffer
    if (ioctl(v4l2_ctx.fd, VIDIOC_QBUF, &buf) < 0) {
        qDebug() << "Failed to re-enqueue buffer:" << strerror(errno);
    }
}

/*************************************************录制视频*******************************************************/

void MainWindow::startRecording(){
    QMutexLocker locker(&mutex);

    if(isRecording){
        qDebug() << "Already recording";
        return ;
    }

    QDir videoDir("/home/orangepi/saved_video");
    QDateTime now = QDateTime::currentDateTime();  // 获取当前时间
    QString timestamp = now.toString("yyyyMMdd_HHmmss");  // 生成时间戳字符串

    video_filename = videoDir.filePath(QString("video_%1.mp4")
                                        .arg(timestamp));
    if(rkmpp_init(video_filename.toStdString().c_str()) == 0){
        qDebug() << "initialize FFmpeg successfully";
    }


    isRecording = true;
    frameCount = 0;
    ui->recordButton->setEnabled(false);
    ui->stopButton->setEnabled(true);
    qDebug()<< "Started recording to" <<video_filename;

    #if OSS_OPEN
    // 连接信号到上传逻辑 当点击停止录制，触发上传逻辑
    connect(this, &MainWindow::recordingStopped, this, [this]() {
        QString objectKey = QString("saved_video/%1").arg(QFileInfo(video_filename).fileName());
        QFile videoFile(video_filename);
        if (videoFile.exists()) {
            qDebug() << "Video file exists, size:" << videoFile.size() << "bytes";
            if (uploadToOss(video_filename, objectKey)) {
                qDebug() << "Video uploaded to OSS:" << objectKey;
            } else {
                qDebug() << "Failed to upload video to OSS";
            }
        } else {
            qDebug() << "Video file does not exist:" << video_filename;
        }
    }, Qt::UniqueConnection); // 避免重复连接
    #endif
}

void MainWindow::stopRecording(){
    QMutexLocker locker(&mutex);

    if(!isRecording){
        qDebug() << "Not recording";
        return ;
    }

    isRecording = false;
    mpp_cleanup();
    frameCount = 0;
    ui->recordButton->setEnabled(true);
    ui->stopButton->setEnabled(false);
    qDebug()<< "Stop recording ";

    emit recordingStopped(); // 触发信号
}

/************************************************RTMP推流*******************************************************/

void MainWindow::startStreaming(){
    QMutexLocker locker(&mutex);

    if(isStreaming){
        qDebug() << "已在推流中";
        return;
    }

    const char *output_url = "rtmp://47.110.53.247:1935/live/stream"; 
    if(rkmpp_init_stream(output_url) == -1){
        qDebug() << "无法初始化推流";
        return;
    }

    isStreaming = true;
    frameCount = 0;
    updateButtonStates();
    qDebug() << "开始推流到" << output_url;
}

void MainWindow::stopStreaming(){
    {
        QMutexLocker locker(&mutex);
        if(!isStreaming){
            qDebug() << "未在推流";
            return;
        }
        isStreaming = false;
        frameCount = 0;
    }

    mpp_cleanup_stream();
    updateButtonStates();
    qDebug() << "停止推流";
}

/*************************************************Yolo*******************************************************/
void MainWindow::startYolo()
{
    QMutexLocker locker(&mutex);
    if (isYoloRunning) {
        qDebug() << "YOLO already running";
        return;
    }
    if (!yolo_initialized) {
        qDebug() << "YOLO model not initialized";
        return;
    }
    isYoloRunning = true;
    updateButtonStates();
    qDebug() << "Started YOLO inference";
}

void MainWindow::stopYolo()
{
    QMutexLocker locker(&mutex);
    if (!isYoloRunning) {
        qDebug() << "YOLO not running";
        return;
    }
    isYoloRunning = false;
    updateButtonStates();
    qDebug() << "Stopped YOLO inference";
}

/*************************************************图片****************************************************/
void MainWindow::selectImage(){

    QDir pictureDir("/home/orangepi/saved_picture");
    if (!pictureDir.exists()) {
        if (pictureDir.mkpath("/home/orangepi/saved_picture")) {
            qDebug() << "创建目录：/home/orangepi/saved_picture";
        } else {
            qDebug() << "无法创建目录：/home/orangepi/saved_picture";
            statusBar()->showMessage("无法创建图片目录");
            return;
        }
    }

    // 检查目录是否可读
    QFileInfo dirInfo("/home/orangepi/saved_picture");
    if (!dirInfo.isReadable()) {
        qDebug() << "目录不可读：/home/orangepi/saved_picture";
        statusBar()->showMessage("图片目录不可读");
        return;
    }

    // 加载图片列表
    loadImageList();

    // 切换到图片查看界面
    timer->stop();
    isCameraActive = false;
    ui->label->clear();
    stackedWidget->setCurrentIndex(1); // 显示图片查看界面
    updateButtonStates();

    // 显示第一张图片
    if (!imageList.isEmpty()) {
        currentImageIndex = 0;
        displayImage(currentImageIndex);
    } else {
        imageUi->currentImageLabel->setText("没有找到图片");
        prevImageLabel->setText("");
        nextImageLabel->setText("");
        qDebug() << "没有找到图片";
    }

    qDebug() << "切换到图片查看界面";
    statusBar()->showMessage("已打开图片查看器");
}

void MainWindow::returnToCamera()
{
    // 切换回摄像头界面
    stackedWidget->setCurrentIndex(0); // 显示摄像头界面
    timer->start(1000 / FPS);
    isCameraActive = true;
    ui->label->clear();
    imageList.clear();
    currentImageIndex = -1;
    updateButtonStates();
    qDebug() << "已恢复摄像头画面";
    statusBar()->showMessage("已恢复摄像头画面");
}

void MainWindow::loadImageList()
{
    imageList.clear();
    QDir dir("/home/orangepi/saved_picture");
    if (!dir.exists()) {
        qDebug() << "目录 /home/orangepi/saved_picture 不存在";
        return;
    }
    dir.setNameFilters(QStringList() << "*.jpg" << "*.png" << "*.bmp");
    dir.setSorting(QDir::Time ); //以时间正序排序 若要按时间倒序排序 --> | QDir::Reversed
    imageList = dir.entryList();
    for (int i = 0; i < imageList.size(); ++i) {
        imageList[i] = dir.filePath(imageList[i]);
    }
    qDebug() << "加载图片列表：" << imageList;
}

void MainWindow::displayImage(int index)
{
    if (index < 0 || index >= imageList.size()) {
        currentImageLabel->setText("没有图片");
        prevImageLabel->setText("");
        nextImageLabel->setText("");
        qDebug() << "无效图片索引：" << index;
        imageUi->prevButton->setEnabled(false);
        imageUi->nextButton->setEnabled(false);
        return;
    }

    // 显示当前图片
    QImage currentImage(imageList[index]);
    if (!currentImage.isNull()) {
        currentImageLabel->setPixmap(QPixmap::fromImage(currentImage.scaled(currentImageLabel->size(), Qt::KeepAspectRatio)));
    } else {
        currentImageLabel->setText("无法加载图片");
    }

    // 显示上一张图片
    int prevIndex = (index - 1 + imageList.size()) % imageList.size();
    QImage prevImage(imageList[prevIndex]);
    if (!prevImage.isNull()) {
        prevImageLabel->setPixmap(QPixmap::fromImage(prevImage.scaled(prevImageLabel->size(), Qt::KeepAspectRatio)));
    } else {
        prevImageLabel->setText("");
    }

    // 显示下一张图片
    int nextIndex = (index + 1) % imageList.size();
    QImage nextImage(imageList[nextIndex]);
    if (!nextImage.isNull()) {
        nextImageLabel->setPixmap(QPixmap::fromImage(nextImage.scaled(nextImageLabel->size(), Qt::KeepAspectRatio)));
    } else {
        nextImageLabel->setText("");
    }

    // 更新按钮状态
    imageUi->prevButton->setEnabled(index > 0 || imageList.size() > 1);
    imageUi->nextButton->setEnabled(index < imageList.size() - 1 || imageList.size() > 1);

    currentImageLabel->update();
    prevImageLabel->update();
    nextImageLabel->update();
    qDebug() << "显示图片：当前" << imageList[index] << ", 上一张" << imageList[prevIndex] << ", 下一张" << imageList[nextIndex];
}
void MainWindow::showPreviousImage()
{
    if (imageList.isEmpty()) return;

    int newIndex = (currentImageIndex - 1 + imageList.size()) % imageList.size();
    if (newIndex == currentImageIndex) return;

    // 使用动画平滑过渡
    QPropertyAnimation *anim = new QPropertyAnimation(currentImageLabel, "pos");
    anim->setDuration(500);
    anim->setStartValue(currentImageLabel->pos());
    anim->setEndValue(currentImageLabel->pos() + QPoint(currentImageLabel->width(), 0));
    anim->setEasingCurve(QEasingCurve::InOutQuad);

    connect(anim, &QPropertyAnimation::finished, this, [=]() {
        currentImageIndex = newIndex;
        displayImage(currentImageIndex);
        currentImageLabel->move(prevImageLabel->pos()); // 重置位置
    });

    anim->start(QAbstractAnimation::DeleteWhenStopped);

    qDebug() << "显示上一张图片，索引：" << newIndex;
}

void MainWindow::showNextImage()
{
    if (imageList.isEmpty()) return;

    int newIndex = (currentImageIndex + 1) % imageList.size();
    if (newIndex == currentImageIndex) return;

    // 使用动画平滑过渡
    QPropertyAnimation *anim = new QPropertyAnimation(currentImageLabel, "pos");
    anim->setDuration(500);
    anim->setStartValue(currentImageLabel->pos());
    anim->setEndValue(currentImageLabel->pos() + QPoint(-currentImageLabel->width(), 0));
    anim->setEasingCurve(QEasingCurve::InOutQuad);

    connect(anim, &QPropertyAnimation::finished, this, [=]() {
        currentImageIndex = newIndex;
        displayImage(currentImageIndex);
        currentImageLabel->move(nextImageLabel->pos()); // 重置位置
    });

    anim->start(QAbstractAnimation::DeleteWhenStopped);

    qDebug() << "显示下一张图片，索引：" << newIndex;
}


/*******************************************视频***************************************************/
void MainWindow::loadVideoList(){
    videoList.clear();
    QDir videoDir("/home/orangepi/saved_video");
    if (!videoDir.exists()) {
        qDebug() << "目录 /home/orangepi/saved_video 不存在";
        return;
    }

    videoDir.setNameFilters(QStringList() << "*.mp4" << "*.avi" << "*.mov");
    videoDir.setSorting(QDir::Time);
    videoList = videoDir.entryList();
    for (int i = 0; i < videoList.size(); ++i) {
        videoList[i] = videoDir.filePath(videoList[i]);
    }
    qDebug() << "加载视频列表：" << videoList;
}

void MainWindow::displayVideo(int index){
    if (index < 0 || index >= videoList.size()) {
        videoLabel->setText("没有视频");
        videoUi->prevVideoButton->setEnabled(false);
        videoUi->nextVideoButton->setEnabled(false);
        videoUi->playButton->setEnabled(false);
        qDebug() << "无效图片索引：" << index;
        return;
    }
    videoFilePath = videoList[index];
    videoLabel->setText("已选择视频");
    videoUi->prevVideoButton->setEnabled(index > 0 || videoList.size() > 1);
    videoUi->nextVideoButton->setEnabled(index < videoList.size() - 1 || videoList.size() > 1);
    videoUi->playButton->setEnabled(true);
    qDebug() << "显示视频：" << videoFilePath;
}

void MainWindow::selectVideo(){
    QDir videoDir("/home/orangepi/saved_video");
    if (!videoDir.exists()) {
        if (videoDir.mkpath("/home/orangepi/saved_video")) {
            qDebug() << "创建目录：/home/orangepi/saved_video";
        } else {
            qDebug() << "无法创建目录：/home/orangepi/saved_video";
            //statusBar()->showMessage("无法创建视频目录");
            return;
        }
    }

    // 检查目录是否可读
    QFileInfo dirInfo("/home/orangepi/saved_video");
    if (!dirInfo.isReadable()) {
        qDebug() << "目录不可读：/home/orangepi/saved_video";
        //statusBar()->showMessage("视频目录不可读");
        return;
    }

     // 加载视频列表
    loadVideoList();

    // 切换到视频播放界面
    timer->stop();
    isCameraActive = false;
    ui->label->clear();
    stackedWidget->setCurrentIndex(2); // 显示视频播放界面
    updateButtonStates();

    // 显示第一张图片
    if (!videoList.isEmpty()) {
        currentVideoIndex = 0;
        displayVideo(currentVideoIndex);
    } else {
        videoLabel->setText("没有找到视频");
        videoUi->prevVideoButton->setEnabled(false);
        videoUi->nextVideoButton->setEnabled(false);
        videoUi->playButton->setEnabled(false);
        qDebug() << "没有找到视频";
    }

    qDebug() << "切换到视频播放界面";
    //statusBar()->showMessage("已打开图片查看器");
}

void MainWindow::returnToMain()
{
    // 切换回摄像头界面
    stopVideo();
    stackedWidget->setCurrentIndex(0);
    timer->start(1000 / FPS);
    isCameraActive = true;
    videoList.clear();
    currentVideoIndex = -1;
    videoFilePath.clear();
    videoLabel->clear();
    updateButtonStates();
    qDebug() << "已恢复摄像头画面";
    //statusBar()->showMessage("已恢复摄像头画面");
}

void MainWindow::playVideo()
{
    if (videoFilePath.isEmpty()) {
        qDebug() << "没有选择视频文件";
        //statusBar()->showMessage("请先选择视频文件");
        return;
    }

    stopVideo();

    // 重新连接 frameReady 信号，确保使用队列连接
    connect(decoderThread, &VideoDecoderThread::frameReady, this, &MainWindow::displayFrame, Qt::QueuedConnection);
    if (decoderThread->isRunning) {
        qDebug() << "playVideo: Decoder thread still running, waiting";
        decoderThread->wait();
    }

    decoderThread->setVideoFile(videoFilePath);
    decoderThread->start();
    isVideoPlaying = true;
    isVideoPaused = false;
    updateButtonStates();
    qDebug() << "开始播放视频：" << videoFilePath;
    //statusBar()->showMessage("正在播放视频");
}

void MainWindow::stopVideo()
{
    if (!isVideoPlaying) {
        qDebug() << "stopVideo: Not playing, returning";
        return;
    }

    QMutexLocker locker(&mutex);
    qDebug() << "stopVideo: Stopping decoderThread";
    decoderThread->stop();
    qDebug() << "stopVideo: Waiting for decoderThread to finish";
    decoderThread->wait(); // 等待线程结束
    isVideoPlaying = false;
    isVideoPaused = false;

    // 断开 frameReady 信号，防止残留信号
    disconnect(decoderThread, &VideoDecoderThread::frameReady, this, &MainWindow::displayFrame);

    qDebug() << "stopVideo: Clearing videoLabel";
    videoLabel->clear();
    videoLabel->update();

    updateButtonStates();
    qDebug() << "停止播放视频";
    //statusBar()->showMessage("视频播放已停止");
}

void MainWindow::showPreviousVideo()
{
    if (videoList.isEmpty()) return;

    stopVideo();
    currentVideoIndex = (currentVideoIndex - 1 + videoList.size()) % videoList.size();
    displayVideo(currentVideoIndex);
    playVideo();
    qDebug() << "播放上一视频，索引：" << currentVideoIndex;
}

void MainWindow::showNextVideo()
{
    if (videoList.isEmpty()) return;

    stopVideo();
    currentVideoIndex = (currentVideoIndex + 1) % videoList.size();
    displayVideo(currentVideoIndex);
    playVideo();
    qDebug() << "播放下一视频，索引：" << currentVideoIndex;
}

void MainWindow::togglePause()
{
    if (!isVideoPlaying) {
        qDebug() << "未播放视频，无法暂停";
        //statusBar()->showMessage("请先播放视频");
        return;
    }

    isVideoPaused = !isVideoPaused;
    decoderThread->pause(isVideoPaused);
    //statusBar()->showMessage(isVideoPaused ? "视频已暂停" : "视频继续播放");
    qDebug() << (isVideoPaused ? "视频暂停" : "视频恢复播放");
    updateButtonStates();
}

void MainWindow::displayFrame(const QImage &image)
{
    QMutexLocker locker(&mutex);
    if (!isVideoPlaying) {
        qDebug() << "displayFrame: Video not playing, ignoring frame";
        return;
    }
    if (image.isNull() || image.width() <= 0 || image.height() <= 0) {
        qDebug() << "displayFrame: Invalid image, isNull=" << image.isNull()
                 << ", width=" << image.width() << ", height=" << image.height();
        return;
    }
    if (!videoLabel) {
        qDebug() << "displayFrame: videoLabel is null";
        return;
    }

    if (!image.isNull()) {
        videoLabel->setPixmap(QPixmap::fromImage(image.scaled(videoLabel->size(), Qt::KeepAspectRatio)));
        videoLabel->update();
    }
}

void MainWindow::handleDecoderError(const QString &error)
{
    qDebug() << "解码错误：" << error;
    //statusBar()->showMessage(error);
    stopVideo(); // Corrected from "stop someday()" to "stopVideo()"
}