﻿// main.cpp
#include <iostream>
#include <string>
#include <vector>
#include <cmath>
#include <Windows.h>
#include <opencv2/opencv.hpp>
#include <onnxruntime_cxx_api.h>

using namespace std;

struct OutputDet {
    int id;
    float confidence;
    cv::Rect box;
};

struct Net_config
{
    string imagepath;            // 图片路径
    const ORTCHAR_T* modelpath;  // 模型路径
    float confThreshold;         // 置信度阈值
    float iouThreshold;          // NMS阈值
};

class V8
{
public:
    V8() { cout << "Infer start" << endl; }
    void detect(const Net_config& config);

private:
    int inpWidth{}, inpHeight{}, maxSide{}, Padwl{}, Padwr{}, Padht{}, Padhd{};
    float ratio{};

    vector<float> input_image_;
    cv::Mat letter_(cv::Mat& img);
    void normalize_(cv::Mat img);

    Ort::Session* ort_session{};
    vector<const char*> input_names;
    vector<const char*> output_names;
    vector<vector<int64_t>> input_node_dims;
    vector<vector<int64_t>> output_node_dims;

    std::vector<std::string> _className = {
            "person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light",
            "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow",
            "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee",
            "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard",
            "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple",
            "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch",
            "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone",
            "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear",
            "hair drier", "toothbrush"
    };
};

cv::Mat V8::letter_(cv::Mat& img)
{
    cv::Mat dstimg;
    maxSide = img.rows > img.cols ? img.rows : img.cols;
    ratio = inpWidth / float(maxSide);

    int fx = int(img.cols * ratio);
    int fy = int(img.rows * ratio);

    Padwl = int(round((inpWidth - fx) * 0.5 - 0.1));
    Padwr = int(round((inpWidth - fx) * 0.5 + 0.1));
    Padht = int(round((inpWidth - fy) * 0.5 - 0.1));
    Padhd = int(round((inpWidth - fy) * 0.5 + 0.1));

    cv::resize(img, dstimg, cv::Size(fx, fy));
    cv::copyMakeBorder(dstimg, dstimg, Padht, Padhd, Padwl, Padwr, cv::BORDER_CONSTANT, cv::Scalar(127,127,127));
    return dstimg;
}

void V8::normalize_(cv::Mat img)
{
    int row = img.rows;
    int col = img.cols;
    input_image_.resize(row * col * img.channels());
    for (int c = 0; c < 3; c++)
    {
        for (int i = 0; i < row; i++)
        {
            for (int j = 0; j < col; j++)
            {
                float pix = img.ptr<uchar>(i)[j * 3 + 2 - c]; // BGR->RGB
                input_image_[c * row * col + i * col + j] = pix / 255.0f;
            }
        }
    }
}

void V8::detect(const Net_config& config)
{
    try {
        cout << "Model path: " << config.modelpath << endl;
        cout << "Conf Threshold: " << config.confThreshold << ", IoU Threshold: " << config.iouThreshold << endl;

        Ort::Env env(ORT_LOGGING_LEVEL_WARNING, "yolov8_infer");
        Ort::SessionOptions session_options;
        session_options.SetIntraOpNumThreads(1);
        session_options.SetGraphOptimizationLevel(GraphOptimizationLevel::ORT_ENABLE_ALL);

        ort_session = new Ort::Session(env, config.modelpath, session_options);

        Ort::AllocatorWithDefaultOptions allocator;

        // 保存AllocatedStringPtr，确保字符串生命周期
        std::vector<Ort::AllocatedStringPtr> input_name_ptrs;
        std::vector<Ort::AllocatedStringPtr> output_name_ptrs;

        // 输入信息
        size_t numInputNodes = ort_session->GetInputCount();
        input_names.clear();
        input_node_dims.clear();
        for (size_t i = 0; i < numInputNodes; i++) {
            Ort::AllocatedStringPtr inputNamePtr = ort_session->GetInputNameAllocated(i, allocator);
            input_names.push_back(inputNamePtr.get());
            input_name_ptrs.push_back(std::move(inputNamePtr));

            Ort::TypeInfo input_type_info = ort_session->GetInputTypeInfo(i);
            auto input_tensor_info = input_type_info.GetTensorTypeAndShapeInfo();
            input_node_dims.push_back(input_tensor_info.GetShape());
        }

        // 输出信息
        size_t numOutputNodes = ort_session->GetOutputCount();
        output_names.clear();
        output_node_dims.clear();
        for (size_t i = 0; i < numOutputNodes; i++) {
            Ort::AllocatedStringPtr outputNamePtr = ort_session->GetOutputNameAllocated(i, allocator);
            output_names.push_back(outputNamePtr.get());
            output_name_ptrs.push_back(std::move(outputNamePtr));

            Ort::TypeInfo output_type_info = ort_session->GetOutputTypeInfo(i);
            auto output_tensor_info = output_type_info.GetTensorTypeAndShapeInfo();
            output_node_dims.push_back(output_tensor_info.GetShape());
        }

        inpHeight = input_node_dims[0][2];
        inpWidth = input_node_dims[0][3];

        cout << "Input dims: " << inpHeight << " x " << inpWidth << endl;

        cv::Mat frame = cv::imread(config.imagepath);
        if (frame.empty()) {
            cerr << "Failed to read image from path: " << config.imagepath << endl;
            return;
        }

        cv::Mat dstimg = letter_(frame);
        normalize_(dstimg);

        array<int64_t, 4> input_shape{1, 3, inpHeight, inpWidth};
        auto memory_info = Ort::MemoryInfo::CreateCpu(OrtDeviceAllocator, OrtMemTypeCPU);
        Ort::Value input_tensor = Ort::Value::CreateTensor<float>(
                memory_info,
                input_image_.data(),
                input_image_.size(),
                input_shape.data(),
                input_shape.size());

        vector<const char *> input_node_names{input_names[0]};
        vector<const char *> output_node_names{output_names[0]};

        vector<Ort::Value> ort_outputs = ort_session->Run(
                Ort::RunOptions{nullptr},
                input_node_names.data(),
                &input_tensor,
                1,
                output_node_names.data(),
                1);

        float *output_data = ort_outputs[0].GetTensorMutableData<float>();
        auto output_shape = ort_outputs[0].GetTensorTypeAndShapeInfo().GetShape();

        // 假设输出shape: [1, 84, 8400]，这里84 = 4(box) + 80(classes)
        int num_pred = output_shape[2];
        int num_info = output_shape[1];

        cv::Mat output_mat(num_pred, num_info, CV_32F, output_data);
        // 转置成每行是一个预测框
        cv::Mat output = output_mat.t();

        vector<int> class_ids;
        vector<float> confidences;
        vector<cv::Rect> boxes;

        int net_width = (int) _className.size() + 4; // 80+4 = 84

        float *pdata = (float *) output.data;

        for (int i = 0; i < output.rows; ++i) {
            cv::Mat scores(1, (int) _className.size(), CV_32F, pdata + 4);
            cv::Point classIdPoint;
            double max_class_score;

            minMaxLoc(scores, nullptr, &max_class_score, nullptr, &classIdPoint);

            if (max_class_score > config.confThreshold) {
                confidences.push_back((float) max_class_score);
                class_ids.push_back(classIdPoint.x);

                float x = pdata[0];
                float y = pdata[1];
                float w = pdata[2];
                float h = pdata[3];

                int left = int((x - Padwl - 0.5 * w) / ratio);
                int top = int((y - Padht - 0.5 * h) / ratio);
                int width = int(w / ratio);
                int height = int(h / ratio);

                boxes.emplace_back(left, top, width, height);
            }
            pdata += net_width;
        }

        vector<int> nms_indices;
        cv::dnn::NMSBoxes(boxes, confidences, config.confThreshold, config.iouThreshold, nms_indices);

        cout << "Detected boxes after NMS: " << nms_indices.size() << endl;

        for (int idx: nms_indices) {
            OutputDet det{class_ids[idx], confidences[idx], boxes[idx]};
            string label = _className[det.id] + " " + to_string(det.confidence).substr(0, 4);

            cv::Scalar color = cv::Scalar(0, 255, 0);
            cv::rectangle(frame, det.box, color, 2);
            cv::putText(frame, label, cv::Point(det.box.x, det.box.y - 5),
                        cv::FONT_HERSHEY_SIMPLEX, 0.6, color, 2);
        }

        cv::imshow("Detection Result", frame);
        cout << "Press any key to exit..." << endl;
        cv::waitKey(0);
        cv::destroyAllWindows();

        delete ort_session;
        ort_session = nullptr;
    }
    catch (const Ort::Exception &e) {
        std::cerr << "ONNX Runtime exception: " << e.what() << std::endl;
    }
    catch (const std::exception &e) {
        std::cerr << "std::exception: " << e.what() << std::endl;
    }
}


int main()
{
    string model_path = "H:\\DWORKPLACE\\CWork\\YOLOv8ONNX\\checkpoints\\yolov8n.onnx";
    string image_path = "H:\\DWORKPLACE\\CWork\\YOLOv8ONNX\\test-img.jpg";

    // 转成 wchar_t* 适配 Windows 的 ORTCHAR_T
    int size_needed = MultiByteToWideChar(CP_UTF8, 0, model_path.c_str(), (int)model_path.size(), NULL, 0);
    std::wstring wmodel_path(size_needed, 0);
    MultiByteToWideChar(CP_UTF8, 0, model_path.c_str(), (int)model_path.size(), &wmodel_path[0], size_needed);

    const ORTCHAR_T* ort_model_path = (const ORTCHAR_T*)wmodel_path.c_str();

    Net_config config{ image_path, ort_model_path, 0.5f, 0.5f };

    V8 model;
    model.detect(config);

    return 0;
}
