#include "napi/native_api.h"
#include <hilog/log.h>
#include <stdio.h>
#include <MNN/ImageProcess.hpp>
#include <MNN/expr/Module.hpp>
#include <MNN/expr/Executor.hpp>
#include <MNN/expr/ExprCreator.hpp>
#include <MNN/expr/Executor.hpp>

#include <cv/cv.hpp>

using namespace MNN;
using namespace MNN::Express;
using namespace MNN::CV;

static std::shared_ptr<Module> net;
static std::shared_ptr<Executor::RuntimeManager> rtmgr;

static const std::vector<std::string> classes{"person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch", "potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"};

static void DerefItem(napi_env env, void *data, void *hint) {
    // Optional native callback, which is used to release the native instance when the ArkTS object is garbage-collected.
    OH_LOG_INFO(LOG_APP, "Node-API DerefItem");
    (void)hint;
}

void load_model(const char* model_path) {
    int thread = 2;
    int precision = 0;
    int forwardType = MNN_FORWARD_CPU;
    MNN::ScheduleConfig sConfig;
    sConfig.type = static_cast<MNNForwardType>(forwardType);
    sConfig.numThread = thread;
    BackendConfig bConfig;
    bConfig.precision = static_cast<BackendConfig::PrecisionMode>(precision);
    sConfig.backendConfig = &bConfig;
    rtmgr = std::shared_ptr<Executor::RuntimeManager>(Executor::RuntimeManager::createRuntimeManager(sConfig));
    if(rtmgr == nullptr) {
        MNN_ERROR("Empty RuntimeManger\n");
    }
//    rtmgr->setCache(".cachefile");
    
    net = std::shared_ptr<Module>(Module::load(std::vector<std::string>{}, std::vector<std::string>{}, model_path, rtmgr));
}

std::vector<std::string> inference(const char* image_path) {
    std::vector<std::string> result;
    auto original_image = imread(image_path);
    auto dims = original_image->getInfo()->dim;
    int ih = dims[0];
    int iw = dims[1];
    int len = ih > iw ? ih : iw;
    float scale = len / 640.0;
    std::vector<int> padvals { 0, len - ih, 0, len - iw, 0, 0 };
    auto pads = _Const(static_cast<void*>(padvals.data()), {3, 2}, NCHW, halide_type_of<int>());
    auto image = _Pad(original_image, pads, CONSTANT);
    image = resize(image, Size(640, 640), 0, 0, INTER_LINEAR, -1, {0., 0., 0.}, {1./255., 1./255., 1./255.});
    auto input = _Unsqueeze(image, {0});
    input = _Convert(input, NC4HW4);
    auto outputs = net->onForward({input});
    auto output = _Convert(outputs[0], NCHW);
    output = _Squeeze(output);
    // output shape: [84, 8400]; 84 means: [cx, cy, w, h, prob * 80]
    auto cx = _Gather(output, _Scalar<int>(0));
    auto cy = _Gather(output, _Scalar<int>(1));
    auto w = _Gather(output, _Scalar<int>(2));
    auto h = _Gather(output, _Scalar<int>(3));
    std::vector<int> startvals { 4, 0 };
    auto start = _Const(static_cast<void*>(startvals.data()), {2}, NCHW, halide_type_of<int>());
    std::vector<int> sizevals { -1, -1 };
    auto size = _Const(static_cast<void*>(sizevals.data()), {2}, NCHW, halide_type_of<int>());
    auto probs = _Slice(output, start, size);
    // [cx, cy, w, h] -> [y0, x0, y1, x1]
    auto x0 = cx - w * _Const(0.5);
    auto y0 = cy - h * _Const(0.5);
    auto x1 = cx + w * _Const(0.5);
    auto y1 = cy + h * _Const(0.5);
    auto boxes = _Stack({x0, y0, x1, y1}, 1);
    auto scores = _ReduceMax(probs, {0});
    auto ids = _ArgMax(probs, 0);
    auto result_ids = _Nms(boxes, scores, 100, 0.45, 0.25);
    auto result_ptr = result_ids->readMap<int>();
    auto box_ptr = boxes->readMap<float>();
    auto ids_ptr = ids->readMap<int>();
    auto score_ptr = scores->readMap<float>();
    for (int i = 0; i < 100; i++) {
        auto idx = result_ptr[i];
        if (idx < 0) break;
        auto x0 = std::max(box_ptr[idx * 4 + 0] * scale, 0.0f);
        auto y0 = std::max(box_ptr[idx * 4 + 1] * scale, 0.0f);
        auto x1 = std::min(box_ptr[idx * 4 + 2] * scale, float(iw));
        auto y1 = std::min(box_ptr[idx * 4 + 3] * scale, float(ih));
        auto class_idx = ids_ptr[idx];
        auto score = score_ptr[idx];
        std::string class_label = classes[class_idx];
        result.push_back("["+std::to_string((int) x0) + ","+std::to_string((int) y0)+ ","+std::to_string((int) x1)+ ","+std::to_string((int) y1)+ "],"+class_label+ ","+std::to_string(score));
        rectangle(original_image, {x0, y0}, {x1, y1}, {0, 0, 255}, 2);
        resize(original_image, {512,512});
        
    }
    std::string save_path = image_path;
    save_path.replace(save_path.find_last_of("."), 4, "_res.jpg");
    if (imwrite(save_path, original_image)) {
         MNN_PRINT("result image write to `res.jpg`.\n");
     }
    return result;
}

void unload_model() {
    net = nullptr;
}

static napi_value nativeLoad(napi_env env, napi_callback_info info) {
    size_t argc = 1;
    napi_value args[1] = {nullptr};
    
    napi_get_cb_info(env, info, &argc, args , nullptr, nullptr);
    // 假设需要的第一个参数是字符串类型
    size_t filePathSize = 0;
    napi_get_value_string_utf8(env, args[0], nullptr, 0, &filePathSize);
    char *filePath= new char[filePathSize + 1];
    size_t filePathLength = 0;
    napi_get_value_string_utf8(env, args[0], filePath, filePathSize + 1, &filePathLength);
    
    load_model(filePath);
    
    return nullptr;  // 返回值需要根据实际情况调整
}

static napi_value nativeDetect(napi_env env, napi_callback_info info) {
    size_t argc = 1;
    napi_value args[1] = {nullptr};
    
    napi_get_cb_info(env, info, &argc, args , nullptr, nullptr);
    // 假设需要的第一个参数是字符串类型
    size_t image_pathSize = 0;
    napi_get_value_string_utf8(env, args[0], nullptr, 0, &image_pathSize);
    char *image_path= new char[image_pathSize + 1];
    size_t image_pathLength = 0;
    napi_get_value_string_utf8(env, args[0], image_path, image_pathSize + 1, &image_pathLength);
    std::vector<std::string>  res = inference(image_path);
    int num = res.size();
    // 创建一个空数组
    napi_value jsArray = nullptr;
    napi_create_array(env, &jsArray);
    // 将创建好的数组进行赋值
    for (int i = 0; i < num; i++) {
        // 获取字符串的长度
        size_t length = strlen(res[i].c_str());
        napi_value element = nullptr;
        napi_status status = napi_create_string_utf8(env, res[i].c_str(), length, &element);
        napi_set_element(env, jsArray, i, element);
    }
    // 返回已创建好的数组
    return jsArray;
}

static napi_value nativeUnload(napi_env env, napi_callback_info info) {
    size_t argc = 0;
    napi_value args[] = {nullptr};
    unload_model();
    return nullptr;
}


EXTERN_C_START
static napi_value Init(napi_env env, napi_value exports) {
    napi_property_descriptor desc[] = {
        {"nativeLoad", nullptr, nativeLoad, nullptr, nullptr, nullptr, napi_default, nullptr},
        {"nativeDetect", nullptr, nativeDetect, nullptr, nullptr, nullptr, napi_default, nullptr},
        {"nativeUnload", nullptr, nativeUnload, nullptr, nullptr, nullptr, napi_default, nullptr},
    };
    napi_define_properties(env, exports, sizeof(desc) / sizeof(desc[0]), desc);
    return exports;
}
EXTERN_C_END
    
static napi_module demoModule = {
    .nm_version = 1,
    .nm_flags = 0,
    .nm_filename = nullptr,
    .nm_register_func = Init,
    .nm_modname = "mnnyolo",
    .nm_priv = ((void*)0),
    .reserved = { 0 },
};

extern "C" __attribute__((constructor)) void RegisterEntryModule(void)
{
    napi_module_register(&demoModule);
}
