#include "MainWindow.h"
#include "ui_MainWindow.h"
#include <QFileDialog>
#include <QMessageBox>
#include <QPixmap>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <iostream>
#include <vector>
#include <cmath>
#include <complex>
#include <algorithm>

// 快速傅里叶变换 (FFT)
std::vector<std::complex<double>> fft(const std::vector<std::complex<double>>& in) {
    int N = in.size();
    if (N <= 1) return in;

    std::vector<std::complex<double>> even(N / 2);
    std::vector<std::complex<double>> odd(N / 2);
    for (int i = 0; i < N / 2; ++i) {
        even[i] = in[i * 2];
        odd[i] = in[i * 2 + 1];
    }

    std::vector<std::complex<double>> q = fft(even);
    std::vector<std::complex<double>> r = fft(odd);

    std::vector<std::complex<double>> result(N);
    for (int k = 0; k < N / 2; ++k) {
        std::complex<double> t = std::polar(1.0, -2 * M_PI * k / N) * r[k];
        result[k] = q[k] + t;
        result[k + N / 2] = q[k] - t;
    }
    return result;
}

// 计算傅里叶描述子
std::vector<double> calculateFourierDescriptor(const std::vector<cv::Point>& contour) {
    std::vector<double> descriptor;

    if (contour.empty()) {
        return descriptor;
    }

    std::vector<std::complex<double>> complexPoints(contour.size());
    for (size_t i = 0; i < contour.size(); ++i) {
        complexPoints[i] = std::complex<double>(contour[i].x, contour[i].y);
    }

    std::vector<std::complex<double>> fourierTransform = fft(complexPoints);
    int numCoefficients = std::min(10, static_cast<int>(fourierTransform.size())); // 保留前10个系数
    for (int i = 0; i < numCoefficients; ++i) {
        descriptor.push_back(std::abs(fourierTransform[i]));
    }
    return descriptor;
}

// 匹配傅里叶描述子
double matchFourierDescriptors(const std::vector<double>& desc1, const std::vector<double>& desc2) {
    if (desc1.size() != desc2.size()) return DBL_MAX;

    double distance = 0.0;
    for (size_t i = 0; i < desc1.size(); ++i) {
        distance += std::abs(desc1[i] - desc2[i]);
    }
    return distance;
}

MainWindow::MainWindow(QWidget *parent)
    : QMainWindow(parent), ui(new Ui::MainWindow) {
    ui->setupUi(this); // 加载通过Qt Designer设计的界面

    // 连接信号与槽
    connect(ui->openCameraButton, &QPushButton::clicked, this, &MainWindow::openCamera);
    connect(ui->closeCameraButton, &QPushButton::clicked, this, &MainWindow::closeCamera);
    connect(ui->openVideoButton, &QPushButton::clicked, this, &MainWindow::openVideo);
    connect(ui->pauseButton, &QPushButton::clicked, this, &MainWindow::pauseVideo);
    connect(ui->captureButton, &QPushButton::clicked, this, &MainWindow::captureVideo);
    connect(&timer, &QTimer::timeout, this, &MainWindow::updateFrame);

    // 加载手势模板
    loadGestureTemplates();
}

MainWindow::~MainWindow() {
    delete ui;
}

void MainWindow::loadGestureTemplates() {
    gestureTemplates.clear();
    for (int i = 1; i <= 5; ++i) { // 只加载1到5的模板
        for (int j = 1; j <= 5; ++j) {
            QString templatePath = QString("C:/Users/70403/Desktop/hand/hand_number/gesture_templates/%1_%2.bmp").arg(i).arg(j);
            cv::Mat templateImage = cv::imread(templatePath.toStdString(), cv::IMREAD_GRAYSCALE);
            if (templateImage.empty()) {
                qDebug() << "无法加载手势模板:" << templatePath;
                continue;
            }
            gestureTemplates.push_back(templateImage);
        }
    }
    qDebug() << "手势模板加载完成，共加载" << gestureTemplates.size() << "个模板";
}

cv::Mat MainWindow::extractSkin(const cv::Mat& input) {
    cv::Mat ycrcb;
    cv::cvtColor(input, ycrcb, cv::COLOR_BGR2YCrCb); // 转换到 YCrCb 色彩空间

    cv::Mat skinRegion;
    cv::inRange(ycrcb, cv::Scalar(0, 138, 67), cv::Scalar(255, 173, 133), skinRegion); // 根据肤色范围提取皮肤

    cv::Mat skin;
    input.copyTo(skin, skinRegion); // 将皮肤区域提取到原图上

    // 形态学操作去除噪声
    cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3));
    cv::morphologyEx(skin, skin, cv::MORPH_OPEN, kernel); // 开运算
    cv::morphologyEx(skin, skin, cv::MORPH_CLOSE, kernel); // 闭运算

    return skin;
}

void MainWindow::openCamera() {
    capture.open(1); // 打开默认摄像头
    if (!capture.isOpened()) {
        qDebug() << "无法打开摄像头";
        return;
    }
    timer.start(30); // 设置帧率为30fps
    isPaused = false;
}

void MainWindow::closeCamera() {
    if (capture.isOpened()) {
        capture.release(); // 关闭摄像头
        timer.stop(); // 停止定时器
        ui->videoLabel->clear(); // 清空视频显示区域
        ui->gestureLabel->setText("手势识别结果"); // 重置手势识别结果
        qDebug() << "摄像头已关闭";
    }
}

void MainWindow::openVideo() {
    QString videoPath = ui->videoPathLineEdit->text(); // 使用用户输入的路径
    if (videoPath.isEmpty()) {
        videoPath = QFileDialog::getOpenFileName(this, "选择视频文件", "", "Video Files (*.mp4 *.avi)");
        if (videoPath.isEmpty()) {
            return;
        }
    }
    capture.open(videoPath.toStdString());
    if (!capture.isOpened()) {
        qDebug() << "无法打开视频文件";
        return;
    }
    timer.start(30); // 设置帧率为30fps
    isPaused = false;
}

void MainWindow::pauseVideo() {
    isPaused = !isPaused;
    if (isPaused) {
        ui->pauseButton->setText("继续");
    } else {
        ui->pauseButton->setText("暂停");
    }
}

void MainWindow::captureVideo() {
    if (!capture.isOpened()) {
        qDebug() << "没有打开视频源";
        return;
    }
    savePath = ui->savePathLineEdit->text();
    if (savePath.isEmpty()) {
        qDebug() << "未指定保存路径";
        return;
    }
    cv::VideoWriter writer;
    int fourcc = cv::VideoWriter::fourcc('M', 'J', 'P', 'G');
    double fps = 25.0;
    cv::Size frameSize(static_cast<int>(frame.cols), static_cast<int>(frame.rows));
    writer.open(savePath.toStdString(), fourcc, fps, frameSize);
    if (!writer.isOpened()) {
        qDebug() << "无法创建视频文件";
        return;
    }
    writer.write(frame);
    writer.release();
}

void MainWindow::updateFrame() {
    if (!isPaused && capture.read(frame)) {
        // 提取皮肤颜色
        cv::Mat skinFrame = extractSkin(frame);

        // 显示视频帧
        cv::cvtColor(skinFrame, skinFrame, cv::COLOR_BGR2RGB);
        QImage qimage(skinFrame.data, skinFrame.cols, skinFrame.rows, static_cast<int>(skinFrame.step), QImage::Format_RGB888);
        ui->videoLabel->setPixmap(QPixmap::fromImage(qimage).scaled(ui->videoLabel->size(), Qt::KeepAspectRatio));

        medianBlur(skinFrame, skinFrame, 5);

        // 转换为灰度图
        cv::Mat grayFrame;
        cv::cvtColor(skinFrame, grayFrame, cv::COLOR_BGR2GRAY);

        // 提取轮廓
        std::vector<std::vector<cv::Point>> contours;
        cv::findContours(grayFrame, contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);

        if (!contours.empty()) {
            // 找到最大轮廓
            std::vector<cv::Point> largestContour = *std::max_element(contours.begin(), contours.end(), [](const std::vector<cv::Point>& a, const std::vector<cv::Point>& b) {
                return cv::contourArea(a) < cv::contourArea(b);
            });

            // 检测手指
            std::vector<cv::Point> hullPoints;
            cv::convexHull(largestContour, hullPoints);

            std::vector<int> hullIndices;
            cv::convexHull(largestContour, hullIndices, false);

            std::vector<cv::Vec4i> defects;
            cv::convexityDefects(largestContour, hullIndices, defects);

            int fingerCount = 0;
            for (const auto& defect : defects) {
                int startIdx = defect[0];
                int endIdx = defect[1];
                int farIdx = defect[2];
                int depth = defect[3];

                if (depth > 10000) { // 阈值可以根据实际情况调整
                    fingerCount++;
                }
            }

            // 显示手指数量
            ui->gestureLabel->setText(QString("手势数字: %1").arg(fingerCount));
        } else {
            ui->gestureLabel->setText("未识别到手势");
        }
    }
}
