#pragma once

#include "IInfer.h"
#include <opencv2/opencv.hpp>
#include "openvino/openvino.hpp"
#include <opencv2/dnn/utils/inference_engine.hpp>

struct DetResult {
	cv::Rect bbox;
	float conf;
	int lable;
	DetResult() {};
	DetResult(cv::Rect bbox, float conf, int lable) :bbox(bbox), conf(conf), lable(lable) {};
};

class InferOpenVino : public IInfer
{
public:
	InferOpenVino() { taskType_ = task_type::infer_detect; };
	~InferOpenVino();


	void setTaskType(task_type type) { taskType_ = type; };
	DetectionResults runInfer(cv::Mat &src);
	bool loadModel(std::string modelPath);
	bool loadLabels(std::string labelFilePath);
	void setSrcImage(cv::Mat &src);
	void setInferParams(float modelConfidence, float modelScore, float modelNMS)
	{
		modelConfidenceThreshold_ = modelConfidence;
		modelScoreThreshold_ = modelScore;
		modelNMSThreshold_ = modelNMS;
	}
	std::string getInferInfo() { return ""; }

protected:
	void visualizeResult();
	// classesFile
	void parseTextFile(std::wstring filePath);
	//void parseYamlFile(std::string filePath);

	void draw_bbox(cv::Mat& img, std::vector<DetResult>& res);
	cv::Mat pre_process(cv::Mat* img, int length, float* factor);
	std::vector<DetResult> post_process_v8(float* result, float factor, int outputLength);
	std::vector<DetResult> post_process_v5(float* result, float factor, int outputLength);
	std::vector<DetResult> post_process_v5_async(ov::InferRequest* infer, float factor);
	std::vector<DetResult> post_process_seg(ov::Tensor output0, ov::Tensor output1, float* result, float factor, int outputLength);


private:
	task_type taskType_;
	float modelConfidenceThreshold_ = 0.25;
	float modelScoreThreshold_ = 0.25;
	float modelNMSThreshold_ = 0.45;

	bool suc_loadModel_ = false;
	bool suc_loadLabel_ = false;

	std::vector<std::string> labels_;

	cv::Mat srcImage_;
	ov::Output<const ov::Node> input_port_;
	ov::InferRequest infer_;
	DetectionResults detections_;
};

