/**
 * @file model_infer_vino.h
 * @brief Header file for the YOLO OpenVINO inference class.
 *
 * This file defines the structure and class for performing object detection
 * using a YOLO model with OpenVINO backend.
 *
 * @author Zongyue
 * @date 2024-11-08
 * @version 1.0
 */

#ifndef MODEL_INFER_VINO_H
#define MODEL_INFER_VINO_H

#pragma once  // Ensure the header file is included only once

#include <qglobal.h>

#include <vector>  // Include the standard library vector for dynamic arrays
#include "common/define.h"  // Include the header file for common definitions
#include <openvino/openvino.hpp> // Include the OpenVINO header file
#include <opencv2/opencv.hpp>    // Include the OpenCV header file
#include "prep_img_ipp.h"  // Include the header file for ipp image preprocessing code
#include <utility>

#include "common/common.hpp"
#include "common/device_utility.h"
#include "opencv2/opencv.hpp"

using namespace ov; // Use the OpenVINO namespace
using namespace std;
using namespace cv;
using namespace dnn; // Use the OpenCV DNN namespace
// #define USE_RESIZE_IPP // Uncomment this to enable IPP preprocessing

namespace vino_v6 {  // Define the vino_v6 namespace for YOLO v6 implementation

    // Define the YOLO detector class for managing model loading, inference, and post-processing
    class Yolo_VINO {
    public:

        // Define the structure for detection results, including class and bounding box information
        struct DetRes_VINO {
            int classes;    // Class ID
            float x;        // Bounding box top-left x-coordinate
            float y;        // Bounding box top-left y-coordinate
            float w;        // Bounding box width
            float h;        // Bounding box height
            float prob;     // Confidence
        };

        Yolo_VINO();    // Constructor for initializing the detector
        ~Yolo_VINO();   // Destructor for releasing resources

        // Initialize the detector, set the GPU ID, and load the model file
        bool Init(const char *hdats_path);
        int BatchSize() const;
        // Reset the detector state and release allocated resources
        void Reset();
        // Execute detection, process batches of images, and return detection results
        void Detect(st_dlm_data *p_imgs,
                    int num,
                    std::vector<std::vector<DetRes_VINO>> &vv_results);

    private:

        std::vector<std::vector<DetRes_VINO>> postProcess_batch(
            const std::vector<cv::Mat> &img_batch,
            const std::vector<ov::Tensor> &data_tensors,
            float prob_threshold_, float nms_threshold_);

        std::vector<DetRes_VINO> postProcess_yolov6(
            const cv::Mat &src_img, const ov::Tensor &data_tensor,
            float prob_threshold_, float nms_threshold_);

        // Define the encrypted information structure, including model and configuration information
        st_encrypt_info_ovino o_m_info;    // Information structure for storing model metadata

        ov::Core core; // OpenVINO core object
        ov::CompiledModel compiled_model; // Compiled model
        std::vector<ov::InferRequest> infer_requests; // Vector of inference requests

        std::vector<void*> m_v_dev_buffer;  // Array of pointers to inference input/output buffers on the GPU
        std::vector<int64_t> m_v_dev_buf_size;  // Sizes of each buffer for memory allocation on the device
        std::vector<std::vector<int>> m_grids;  // Store detection grid information for post-processing

        std::vector<Tensor> m_v_tensors;
        int CATEGORY = 1; // Number of classes
        int m_out_size = 0;   // Size of the inference output

    };

    float IOUCalculate(
        std::vector<Yolo_VINO::DetRes_VINO>::const_reference det_a,
        std::vector<Yolo_VINO::DetRes_VINO>::const_reference det_b);
    void NmsDetect(
        std::vector<Yolo_VINO::DetRes_VINO> &detections,
        float nms_threshold_);

    void visualize_and_save_yolov6(const cv::Mat &original_img,
                                   const std::vector<Yolo_VINO::DetRes_VINO> &detection_results,
                                   const std::string &output_file_path);

}  // End of vino_v6 namespace

#endif //MODEL_INFER_VINO_H