//
// Created by wumingli on 2023/2/10.
//


#include "tensorflow/lite/interpreter.h"
#include "tensorflow/lite/interpreter_builder.h"
#include "tensorflow/lite/model.h"
#include "tensorflow/lite/model_builder.h"
#include "tensorflow/lite/op_resolver.h"
#include "tensorflow/lite/create_op_resolver.h"
#include "tensorflow/lite/kernels/register.h"
#include "tensorflow/lite/delegates/gpu/api.h"
#include "tensorflow/lite/delegates/gpu/cl/api.h"
#include "tensorflow/lite/delegates/gpu/common/model.h"
#include "tensorflow/lite/delegates/gpu/common/model_builder.h"

#include "espcn.h"

using namespace std;
using namespace tflite;
using namespace tflite::gpu;
using namespace tflite::gpu::cl;

namespace espcn {

#define RETURN_ERROR(tag, expr) \
    {                           \
        auto c = expr;          \
        if(!c.ok()) {           \
            __android_log_print(ANDROID_LOG_ERROR, "#expr", "%s", \
                string(c.message().data()).c_str());   \
            return false;       \
        }                       \
    }

    class ModelHolderCLImpl : ModelHolder {
    public:
        ModelHolderCLImpl(const void *modelData, size_t modelBytes) :
                modelData(modelData),
                modelBytes(modelBytes),
                model(FlatBufferModel::VerifyAndBuildFromBuffer(
                        reinterpret_cast<const char *>(modelData), modelBytes)) {
        }

        ~ModelHolderCLImpl() {
            modelData = nullptr;
        }

        bool configure() {
            ObjectDef glTexDef;
            glTexDef.data_layout = DataLayout::DHWC4;
            glTexDef.data_type = DataType::FLOAT32;
            glTexDef.object_type = ObjectType::OPENGL_TEXTURE;
            glTexDef.user_provided = true;

            GraphFloat32 gpuModel;
            RETURN_ERROR("BuildFromFlatBuffer",
                         BuildFromFlatBuffer(std::move(*model), *CreateOpResolver(), &gpuModel));

            InferenceEnvironmentOptions clEnvOptions;
            clEnvOptions.
                    egl_context = eglGetCurrentContext();
            clEnvOptions.
                    egl_display = eglGetCurrentDisplay();
            RETURN_ERROR("NewInferenceEnvironment",
                         NewInferenceEnvironment(clEnvOptions, &clEnv, nullptr));

            cl::InferenceOptions clOptions;
            unique_ptr<InferenceBuilder> builder;
            RETURN_ERROR("NewInferenceBuilder",
                         clEnv->NewInferenceBuilder(clOptions, std::move(gpuModel), &builder));

            RETURN_ERROR("SetInputObjectDef", builder->SetInputObjectDef(0, glTexDef));
            RETURN_ERROR("SetOutputObjectDef", builder->SetOutputObjectDef(0, glTexDef));

            RETURN_ERROR("Build", builder->Build(&runner));

            return true;
        }

        bool runModel(int hInSsbo, int hOutSsbo) {
            RETURN_ERROR("SetInputObject", runner->SetInputObject(0, OpenGlBuffer(hInSsbo)));
            RETURN_ERROR("SetOutputObject", runner->SetOutputObject(0, OpenGlBuffer(hOutSsbo)));
            RETURN_ERROR("Run", runner->Run());
            return true;
        }

        const void *modelData = nullptr;
        size_t modelBytes;
        unique_ptr<FlatBufferModel> model;
        InterpreterOptions options;

        unique_ptr<InferenceEnvironment> clEnv;
        unique_ptr<InferenceRunner> runner;
    };

#ifdef MODEL_HOLDER_CL_IMPL

    ModelHolder *ModelHolder::createModelHolder(const void *modalData, size_t modelBytes) {
        return new ModelHolderCLImpl(modalData, modelBytes);
    }

    void ModelHolder::deleteModelHolder(ModelHolder *holder) {
        delete (ModelHolderCLImpl*) holder;
    }

#endif

};