/*
* Copyright (c) 2022 Shenzhen Kaihong Digital Industry Development Co., Ltd. 
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*     http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

#ifndef MNN_WRAPPER_H
#define MNN_WRAPPER_H

#include <algorithm>
#include <dlfcn.h>
#include <map>
#include <vector>
#include <memory>

#include "utils/log/infer_log.h"
#include "infer_framework.h"
#include "mnn_config.h"
#include <MNN/Interpreter.hpp>

enum DataType {
  DataType_DT_INVALID = 0,
  DataType_DT_FLOAT = 1,
  DataType_DT_DOUBLE = 2,
  DataType_DT_INT32 = 3,
  DataType_DT_UINT8 = 4,
  DataType_DT_INT16 = 5,
  DataType_DT_INT8 = 6,
  DataType_DT_STRING = 7,
  DataType_DT_COMPLEX64 = 8,
  DataType_DT_INT64 = 9,
  DataType_DT_BOOL = 10,
  DataType_DT_QINT8 = 11,
  DataType_DT_QUINT8 = 12,
  DataType_DT_QINT32 = 13,
  DataType_DT_BFLOAT16 = 14,
  DataType_DT_QINT16 = 15,
  DataType_DT_QUINT16 = 16,
  DataType_DT_UINT16 = 17,
  DataType_DT_COMPLEX128 = 18,
  DataType_DT_HALF = 19,
  DataType_DT_RESOURCE = 20,
  DataType_DT_VARIANT = 21,
  DataType_MIN = DataType_DT_INVALID,
  DataType_MAX = DataType_DT_VARIANT
};
class MnnWrapper : public InferFramework
{
public:
    MnnWrapper();
    virtual ~MnnWrapper();
    virtual AiRetCode Init(const AlgorithmInfo &algoConfig);
    virtual AiRetCode Load() override;
    virtual AiRetCode SynInfer(const std::vector<IOTensor> &inputs, std::vector<IOTensor> &outputs) override;
    virtual AiRetCode Unload() override;

protected:
    void ClearModelAndSession();
    MNN::Interpreter* createMnnInterpreter(std::string model_dir);
    void GetOutputData(MNN::Tensor mnnOutput_, IOTensor &output);
    void SetIOTensorType(MNN::Tensor const &mnnTensor,IOTensor &tensor);
    void SetMnnTensorType(IOTensor const &tensor,MNN::Tensor &mnnTensor);
    void SetIOTensorShape(MNN::Tensor const &mnnTensor,IOTensor &tensor);
    MNN::Tensor::DimensionType SetMnnTensorShape(IOTensor const &tensor);

protected:
    MnnConfig pdAlgoConfig_;
    MNN::Interpreter* pMnnInterpreter_;
    std::map<std::string, MNN::Tensor*> mnnOutputs_;
    MNN::Tensor* mnnInputTensor;
    MNN::Session *session;
};

#endif // MNN_WRAPPER_H