/*
* Copyright (c) 2022 Shenzhen Kaihong Digital Industry Development Co., Ltd. 
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*     http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

#ifndef NCNN_WRAPPER_H
#define NCNN_WRAPPER_H

#include <algorithm>
#include <dlfcn.h>
#include <map>
#include <vector>

#include "utils/log/infer_log.h"
#include "infer_framework.h"
#include "ncnn_config.h"
#include "net.h"

class NcnnWrapper : public InferFramework
{
public:
    NcnnWrapper();
    virtual ~NcnnWrapper();
    virtual AiRetCode Init(const AlgorithmInfo &algoConfig);
    virtual AiRetCode Load() override;
    virtual AiRetCode SynInfer(const std::vector<IOTensor> &inputs, std::vector<IOTensor> &outputs) override;
    virtual AiRetCode Unload() override;

protected:
    void ClearModelAndSession();
    ncnn::Extractor createNcnnExtracor(std::string model_dir, std::string weight_dir);
    void GetOutputData(ncnn::Mat ncnnOutputs_, IOTensor &outputs);
    void SetIOTensorType(ncnn::Mat const &ncnnTensor, IOTensor &tensor);
    void SetMnnTensorElemsize(IOTensor const &tensor, ncnn::Mat &ncnnTensor);

protected:
    NcnnConfig pdAlgoConfig_;
};

#endif // MINDSPORE_WRAPPER_H
