//
// Created on 2025/10/15.
//
// Node APIs are not fully supported. To solve the compilation error of the interface cannot be found,
// please include "napi/native_api.h".

#ifndef MSLITEVIDEOENHANCE_MS_MODEL_H
#define MSLITEVIDEOENHANCE_MS_MODEL_H
#include <future>
#include <mindspore/context.h>
#include <mindspore/model.h>
#include <mindspore/status.h>
#include <mindspore/tensor.h>
#include <mindspore/types.h>
#include <string>
class MSModel {
public:
    MSModel(){};
    ~MSModel() {
        GetHandle();
        OH_AI_ModelDestroy(&handle);
    };

    static OH_AI_ModelHandle CreateMSLiteModelCPU(const std::string file_name);
    static OH_AI_ModelHandle CreateMSLiteModelNNRT(const std::string file_name);

    void CreateMsLiteModelNNRTAsync(const std::string file_name);
    void CreateMsLiteModelCPUAsync(const std::string file_name);

    void SwitchPredictContext(const std::string file_name, bool is_enable_npu);
    int FillInputTensor(size_t tensor_index, std::vector<std::vector<std::vector<float>>> &input_data);

    int SetModelInput(float *input_arr);

    const float *Predict();
    void GetHandle();
    std::future<OH_AI_ModelHandle> future_load_model_;
    OH_AI_ModelHandle handle = nullptr;

private:
};

#endif // MSLITEVIDEOENHANCE_MS_MODEL_H
