#include "camera.h"
#include "ui_camera.h"
#include "showpic.h"
#include <QDateTime> // 添加QDateTime头文件

// 自定义clamp函数
inline int clamp(int value, int min, int max) {
    return value < min ? min : (value > max ? max : value);
}

camera::camera(QWidget *parent) :
    QMainWindow(parent),
    ui(new Ui::camera)
{
    ui->setupUi(this);
    ui->pushButton->setStyleSheet("border-image: url(./btn1.jpg);");
    ui->pushButton_2->setStyleSheet("border-image: url(./btn1.jpg);");
    ui->pushButton_3->setStyleSheet("border-image: url(./btn1.jpg);");
    ui->pushButton_4->setStyleSheet("border-image: url(./btn1.jpg);");
    ui->pushButton_5->setStyleSheet("border-image: url(./btn1.jpg);");



    QPalette pal = this->palette();
    pal.setBrush(QPalette::Background,QBrush(QPixmap("./bg.jpg")));
    setPalette(pal);
    // 创建抓拍目录
    QDir dir;
    if (!dir.exists(captureDir)) {
        dir.mkdir(captureDir);
    }

    timer = new QTimer(this);
    connect(timer, &QTimer::timeout, this, &camera::updateFrame);
}

camera::~camera()
{
    destroyCamera();
    delete ui;
}

void camera::on_pushButton_clicked()
{
    if (QWidget *parent = this->parentWidget()) {
        parent->show();
        parent->raise();
    }
    this->close();
}

void camera::on_pushButton_2_clicked()
{
    if (!isCameraRunning) {
        if (initCamera()) { // 现在返回bool
            startCamera();
            isCameraRunning = true;
            ui->pushButton_2->setText("关闭");
            timer->start(33); // 约30fps
        }
    } else {
        stopCamera();
        destroyCamera();
        isCameraRunning = false;
        ui->pushButton_2->setText("开启");
        timer->stop();
    }
}

void camera::on_pushButton_3_clicked()
{
    on_pushButton_2_clicked(); // 关闭摄像头
}

void camera::on_pushButton_4_clicked()
{
    if (isCameraRunning) {
        captureImage();
    }
}

void camera::on_pushButton_5_clicked()
{
    showpic *picWindow = new showpic(this);
    this->hide();
    picWindow->show();
}

bool camera::initCamera() // 改为返回bool
{
    const char *deviceName = "/dev/video7";
    fd = open(deviceName, O_RDWR);
    if (fd == -1) {
        perror("打开摄像头失败");
        return false;
    }

    // 1. 枚举支持的格式
    struct v4l2_fmtdesc fmtDesc;
    memset(&fmtDesc, 0, sizeof(fmtDesc));
    fmtDesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmtDesc.index = 0;

    qDebug() << "支持的摄像头格式:";
    while (ioctl(fd, VIDIOC_ENUM_FMT, &fmtDesc) == 0) {
        qDebug() << "格式索引:" << fmtDesc.index
                 << "描述:" << reinterpret_cast<const char*>(fmtDesc.description)
                 << "格式码:" << QString().sprintf("%c%c%c%c",
                                                  fmtDesc.pixelformat & 0xFF,
                                                  (fmtDesc.pixelformat >> 8) & 0xFF,
                                                  (fmtDesc.pixelformat >> 16) & 0xFF,
                                                  (fmtDesc.pixelformat >> 24) & 0xFF);
        fmtDesc.index++;
    }

    // 2. 获取当前格式
    struct v4l2_format curFmt;
    memset(&curFmt, 0, sizeof(curFmt));
    curFmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if (ioctl(fd, VIDIOC_G_FMT, &curFmt) == 0) {
        qDebug() << "当前格式: 宽度=" << curFmt.fmt.pix.width
                 << "高度=" << curFmt.fmt.pix.height
                 << "格式=" << QString().sprintf("%c%c%c%c",
                                                curFmt.fmt.pix.pixelformat & 0xFF,
                                                (curFmt.fmt.pix.pixelformat >> 8) & 0xFF,
                                                (curFmt.fmt.pix.pixelformat >> 16) & 0xFF,
                                                (curFmt.fmt.pix.pixelformat >> 24) & 0xFF);
    }

    // 3. 设置格式
    struct v4l2_format vfmt = {};
    vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    // 尝试使用当前格式
    vfmt.fmt.pix.width = curFmt.fmt.pix.width ? curFmt.fmt.pix.width : 640;
    vfmt.fmt.pix.height = curFmt.fmt.pix.height ? curFmt.fmt.pix.height : 480;
    vfmt.fmt.pix.pixelformat = curFmt.fmt.pix.pixelformat ?
                               curFmt.fmt.pix.pixelformat :
                               V4L2_PIX_FMT_YUYV;

    // 尝试设置格式
    if (ioctl(fd, VIDIOC_S_FMT, &vfmt) < 0) {
        perror("设置格式失败");

        // 尝试默认的YUYV格式
        qDebug() << "尝试使用默认YUYV格式";
        vfmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
        if (ioctl(fd, VIDIOC_S_FMT, &vfmt) < 0) {
            perror("再次设置格式失败");

            // 尝试MJPEG格式
            qDebug() << "尝试使用MJPEG格式";
            vfmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
            if (ioctl(fd, VIDIOC_S_FMT, &vfmt) < 0) {
                perror("设置MJPEG格式失败");
                ::close(fd);
                return false;
            }
        }
    }

    // 4. 获取实际设置的分辨率
    memset(&vfmt, 0, sizeof(vfmt));
    vfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if (ioctl(fd, VIDIOC_G_FMT, &vfmt) == 0) {
        qDebug() << "实际设置格式: 宽度=" << vfmt.fmt.pix.width
                 << "高度=" << vfmt.fmt.pix.height
                 << "格式=" << QString().sprintf("%c%c%c%c",
                                                vfmt.fmt.pix.pixelformat & 0xFF,
                                                (vfmt.fmt.pix.pixelformat >> 8) & 0xFF,
                                                (vfmt.fmt.pix.pixelformat >> 16) & 0xFF,
                                                (vfmt.fmt.pix.pixelformat >> 24) & 0xFF);

        // 保存实际分辨率和格式
        captureWidth = vfmt.fmt.pix.width;
        captureHeight = vfmt.fmt.pix.height;
        pixelFormat = vfmt.fmt.pix.pixelformat;
    } else {
        perror("获取实际格式失败");
        ::close(fd);
        return false;
    }

    // 申请内核缓冲区
    v4l2_requestbuffers reqbuffer = {};
    reqbuffer.count = 4;
    reqbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    reqbuffer.memory = V4L2_MEMORY_MMAP;

    if (ioctl(fd, VIDIOC_REQBUFS, &reqbuffer) < 0) {
        perror("申请缓冲区失败");
        ::close(fd);
        return false;
    }

    // 内存映射
    v4l2_buffer mapbuffer = {};
    for (int i = 0; i < 4; i++) {
        mapbuffer.index = i;
        mapbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

        if (ioctl(fd, VIDIOC_QUERYBUF, &mapbuffer) < 0) {
            perror("查询缓冲区失败");
            ::close(fd);
            return false;
        }

        mptr[i] = (unsigned char *)mmap(NULL, mapbuffer.length,
                                       PROT_READ | PROT_WRITE,
                                       MAP_SHARED, fd, mapbuffer.m.offset);
        size[i] = mapbuffer.length;

        if (ioctl(fd, VIDIOC_QBUF, &mapbuffer) < 0) {
            perror("入队缓冲区失败");
            ::close(fd);
            return false;
        }
    }
    return true;
}

void camera::startCamera()
{
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(fd, VIDIOC_STREAMON, &type) < 0) {
        perror("启动摄像头失败");
    }
}

void camera::stopCamera()
{
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(fd, VIDIOC_STREAMOFF, &type) < 0) {
        perror("停止摄像头失败");
    }
}

void camera::destroyCamera()
{
    if (fd != -1) {
        // 释放内存映射
        for (int i = 0; i < 4; i++) {
            if (mptr[i]) {
                munmap(mptr[i], size[i]);
            }
        }
        ::close(fd); // 使用全局的close
        fd = -1;
    }
}

void camera::getFrame(unsigned char *buffer)
{
    v4l2_buffer readbuffer = {};
    readbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if (ioctl(fd, VIDIOC_DQBUF, &readbuffer) < 0) {
        perror("获取帧失败");
        return;
    }

    memcpy(buffer, mptr[readbuffer.index], readbuffer.length);

    if (ioctl(fd, VIDIOC_QBUF, &readbuffer) < 0) {
        perror("重新入队失败");
    }
}

void camera::yuyvToRgb(unsigned char *yuyvdata, unsigned char *rgbdata, int width, int height)
{
    for (int i = 0; i < width * height / 2; i++) {
        unsigned char Y0 = yuyvdata[i*4];
        unsigned char U0 = yuyvdata[i*4+1];
        unsigned char Y1 = yuyvdata[i*4+2];
        unsigned char V1 = yuyvdata[i*4+3];

        // 计算RGB值
        int r1 = Y0 + 1.4075 * (V1 - 128);
        int g1 = Y0 - 0.3455 * (U0 - 128) - 0.7169 * (V1 - 128);
        int b1 = Y0 + 1.7790 * (U0 - 128);

        int r2 = Y1 + 1.4075 * (V1 - 128);
        int g2 = Y1 - 0.3455 * (U0 - 128) - 0.7169 * (V1 - 128);
        int b2 = Y1 + 1.7790 * (U0 - 128);

        // 使用自定义的clamp函数
        rgbdata[i*6+0] = clamp(r1, 0, 255);
        rgbdata[i*6+1] = clamp(g1, 0, 255);
        rgbdata[i*6+2] = clamp(b1, 0, 255);
        rgbdata[i*6+3] = clamp(r2, 0, 255);
        rgbdata[i*6+4] = clamp(g2, 0, 255);
        rgbdata[i*6+5] = clamp(b2, 0, 255);
    }
}

void camera::updateFrame()
{
    int bufferSize = captureWidth * captureHeight * 2; // 默认YUYV的大小

    if (pixelFormat == V4L2_PIX_FMT_MJPEG) {
        // MJPEG格式需要更大的缓冲区
        bufferSize = captureWidth * captureHeight * 3;
    }

    unsigned char *yuyvBuffer = new unsigned char[bufferSize];
    getFrame(yuyvBuffer);

    // 根据格式处理图像
    if (pixelFormat == V4L2_PIX_FMT_YUYV) {
        unsigned char *rgbBuffer = new unsigned char[captureWidth * captureHeight * 3];
        yuyvToRgb(yuyvBuffer, rgbBuffer, captureWidth, captureHeight);

        QImage img(rgbBuffer, captureWidth, captureHeight, QImage::Format_RGB888);
        ui->label->setPixmap(QPixmap::fromImage(img).scaled(ui->label->size(), Qt::KeepAspectRatio));

        delete[] rgbBuffer;
    }
    else if (pixelFormat == V4L2_PIX_FMT_MJPEG) {
        // 直接加载MJPEG图像
        QImage img;
        if (img.loadFromData(yuyvBuffer, bufferSize, "JPEG")) {
            ui->label->setPixmap(QPixmap::fromImage(img).scaled(ui->label->size(), Qt::KeepAspectRatio));
        } else {
            qDebug() << "加载MJPEG图像失败";
        }
    }

    delete[] yuyvBuffer;
}

void camera::captureImage()
{
    int bufferSize = captureWidth * captureHeight * 2; // 默认YUYV的大小

    if (pixelFormat == V4L2_PIX_FMT_MJPEG) {
        bufferSize = captureWidth * captureHeight * 3;
    }

    unsigned char *yuyvBuffer = new unsigned char[bufferSize];
    getFrame(yuyvBuffer);

    QString fileName = captureDir + "/capture_" + QDateTime::currentDateTime().toString("yyyyMMdd_hhmmss");

    if (pixelFormat == V4L2_PIX_FMT_YUYV) {
        unsigned char *rgbBuffer = new unsigned char[captureWidth * captureHeight * 3];
        yuyvToRgb(yuyvBuffer, rgbBuffer, captureWidth, captureHeight);

        QImage img(rgbBuffer, captureWidth, captureHeight, QImage::Format_RGB888);
        fileName += ".bmp";
        img.save(fileName);

        delete[] rgbBuffer;
    }
    else if (pixelFormat == V4L2_PIX_FMT_MJPEG) {
        fileName += ".jpg";
        QFile file(fileName);
        if (file.open(QIODevice::WriteOnly)) {
            file.write((const char*)yuyvBuffer, bufferSize);
            file.close();
        } else {
            qDebug() << "保存MJPEG图像失败";
        }
    }

    qDebug() << "抓拍保存到:" << fileName;
    delete[] yuyvBuffer;
}
