//
// Created by pulsarv on 19-12-8.
//

#ifndef MOBILESEARCH_NET_WRAPPERS_H
#define MOBILESEARCH_NET_WRAPPERS_H

#include <map>
#include <list>
#include <string>
#include <utility>
#include <vector>
#include <base/common.hpp>
#include <inference_engine.hpp>
#include <base/ocv_common.hpp>


namespace MobileSearch {


    class Lpr {
    public:
        Lpr() = default;

        Lpr(InferenceEngine::Core &ie, const std::string &deviceName, const std::string &xmlPath, bool autoResize,
            const std::map<std::string, std::string> &pluginConfig, std::string model_lpr_path, bool auto_resize);

        InferenceEngine::InferRequest createInferRequest();

        void setImage(InferenceEngine::InferRequest &inferRequest, const cv::Mat &img, cv::Rect plateRect);

        std::string getResults(InferenceEngine::InferRequest &inferRequest);

    private:
        int maxSequenceSizePerPlate{};
        std::string LprInputName;
        std::string LprInputSeqName;
        std::string LprOutputName;
        InferenceEngine::Core ie_;  // The only reason to store a device as to assure that it lives at least as long as ExecutableNetwork
        InferenceEngine::ExecutableNetwork net;
    };
}
#endif
