#include "Work.h"
#include <QDebug>

// 定义OpenPose相关常量
const std::vector<std::pair<int, int>> Worker::POSE_PAIRS = {
    {1, 2}, {1, 5}, {2, 3}, {3, 4}, {5, 6}, {6, 7},
    {1, 8}, {8, 9}, {9, 10}, {1, 11}, {11, 12}, {12, 13},
    {1, 0}, {0, 14}, {14, 16}, {0, 15}, {15, 17}
};

const std::vector<std::string> Worker::BODY_PARTS_COCO = {
    "Nose", "Neck", "RShoulder", "RElbow", "RWrist", "LShoulder", "LElbow", "LWrist",
    "RHip", "RKnee", "RAnkle", "LHip", "LKnee", "LAnkle", "REye", "LEye", "REar", "LEar"
};

Worker::Worker(cv::dnn::Net* net, QObject* parent)
    : QObject(parent)
    , openPoseNet(net)
{
}

Worker::~Worker()
{
}

void Worker::processFrame(cv::Mat frame, bool openPoseLoaded)
{
    QMutexLocker locker(&mutex);

    cv::Mat displayFrame = frame.clone();

    if (openPoseLoaded && openPoseNet && !openPoseNet->empty()) {
        processFrameWithOpenPose(displayFrame);
    }

    QImage resultImage = convertToQImage(displayFrame);
    emit frameProcessed(resultImage);
}

void Worker::processFrameWithOpenPose(cv::Mat& frame)
{
    try {
        // 使用更小的输入尺寸
        cv::Mat inputBlob = cv::dnn::blobFromImage(
            frame,
            1.0 / 255,
            cv::Size(184, 184),
            cv::Scalar(0, 0, 0),
            false,
            false
        );

        // 设置输入并进行推理
      //  openPoseNet->setInput(inputBlob);
        //cv::Mat output = openPoseNet->forward();

        // 处理OpenPose输出
       // processOpenPoseOutput(output, frame, 0.1f);

    }
    catch (const std::exception& e) {
        qDebug() << "OpenPose处理失败:" << e.what();
        // 在图像上显示错误信息
        cv::putText(frame, "OpenPose Error", cv::Point(10, 30),
            cv::FONT_HERSHEY_SIMPLEX, 0.6, cv::Scalar(0, 0, 255), 2);
    }
}

std::vector<cv::Point> Worker::parseOpenPoseOutput(const cv::Mat& output,
    const cv::Size& originalSize,
    float threshold)
{
    std::vector<cv::Point> keypoints(BODY_PARTS_COCO.size(), cv::Point(-1, -1));

    H = output.size[2];
    W = output.size[3];

    // 找到每个部位的关键点位置
    //for (int n = 0; n < BODY_PARTS_COCO.size(); n++) {
    //   // cv::Mat probMap(H, W, CV_32F, output.ptr<float>(0, n));

    //    cv::Point maxLoc;
    //    double maxVal;
    //    //cv::minMaxLoc(probMap, nullptr, &maxVal, nullptr, &maxLoc);

    //    if (maxVal > threshold) {
    //        keypoints[n] = cv::Point(
    //            static_cast<int>((maxLoc.x * originalSize.width) / W),
    //            static_cast<int>((maxLoc.y * originalSize.height) / H)
    //        );
    //    }
    //}

    return keypoints;
}

void Worker::processOpenPoseOutput(const cv::Mat& output, cv::Mat& image, float threshold)
{
    cv::Size originalSize = image.size();
    std::vector<cv::Point> keypoints = parseOpenPoseOutput(output, originalSize, threshold);

    // 绘制关键点
    for (size_t i = 0; i < keypoints.size(); ++i) {
        if (keypoints[i].x > 0 && keypoints[i].y > 0) {
            cv::circle(image, keypoints[i], 4, cv::Scalar(0, 0, 255), -1);
            cv::putText(image, std::to_string(i),
                cv::Point(keypoints[i].x + 5, keypoints[i].y - 5),
                cv::FONT_HERSHEY_SIMPLEX, 0.3, cv::Scalar(255, 0, 0), 1);
        }
    }

    // 绘制连接线
    for (const auto& pair : POSE_PAIRS) {
        int partA = pair.first;
        int partB = pair.second;

        if (partA < keypoints.size() && partB < keypoints.size() &&
            keypoints[partA].x > 0 && keypoints[partA].y > 0 &&
            keypoints[partB].x > 0 && keypoints[partB].y > 0) {
            cv::line(image, keypoints[partA], keypoints[partB],
                cv::Scalar(0, 255, 0), 2);
        }
    }
}

QImage Worker::convertToQImage(const cv::Mat& frame)
{
    cv::Mat rgbFrame;
    cv::cvtColor(frame, rgbFrame, cv::COLOR_BGR2RGB);

    // 恢复到显示尺寸
    cv::Mat displayFrame;
    cv::resize(rgbFrame, displayFrame, cv::Size(640, 480));

    return QImage(displayFrame.data,
        displayFrame.cols,
        displayFrame.rows,
        displayFrame.step,
        QImage::Format_RGB888).copy();
}