/*
* Copyright (c) 2022 Shenzhen Kaihong Digital Industry Development Co., Ltd. 
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*     http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

#include "include/test.h"

#include <cstring>
#include <stdio.h>
#include <getopt.h>
#include <unistd.h>
#include "protocol/retcode_inner/infer_retcode_inner.h"
#include "utils/log/infer_log.h"

using namespace OHOS::AI_STD;

std::map<TensorType, int> dataType2Size = {
    {UINT8, 1},
    {INT8, 1},
    {UINT16, 2},
    {INT16, 2},
    {UINT32, 4},
    {INT32, 4},
    {FLOAT16, 2},
    {FLOAT32, 4},
    {INT64, 8},
    {UINT64, 8}
};

void DestroyDataInfo(DataInfo &datainfo) {
    LOG_INFO("Start Release output buffer\n");
    std::vector<IOTensor>::iterator iter;
    for (iter = datainfo.begin(); iter != datainfo.end(); ++iter) {
        if ((*iter).buffer.first != nullptr) {
            LOG_INFO("Release output buffer addr[%p]\n",
                 (*iter).buffer.first);
            free((*iter).buffer.first);
            (*iter).buffer.first = nullptr;
        }
    }

    datainfo.clear();
}

void ShowDataInfo(DataInfo &datainfo) {
    std::vector<IOTensor>::iterator iter;
    for (iter = datainfo.begin(); iter != datainfo.end(); ++iter) {
        if ((*iter).buffer.first != nullptr) {
            LOG_INFO("sync infer output IOTensor addr[%p] buffer.addr[%p]\n",
                 &(*iter), (*iter).buffer.first);
            int singleSize = 1;
            std::map<TensorType, int>::iterator it = dataType2Size.find((*iter).type);
            
            if (it != dataType2Size.end()) {
                singleSize = it->second;
            }
            LOG_INFO("sync infer output data:\n");
            int count = 0;
            for (int i = 0; i < (*iter).buffer.second * singleSize; i++) {
                count += 1;
                printf("%x ",
                    *((reinterpret_cast<uint8_t*>((*iter).buffer.first)) + i));
                if (count % 10 == 0) {
                    printf("\n");
                }
            }
            printf("\n");
        }
    }
}

int SetInputTensor(const std::string &framework, IOTensor *inputTensor) {
    inputTensor->name = "input";
    inputTensor->type = UINT8;
    inputTensor->layout = NCHW;
    int32_t inputSize = 0;
    if (framework == "ncnn") {
        inputTensor->type = FLOAT32;
        inputTensor->shape = {1, 3, 227, 227};
        inputSize = 227*227*3;
    } else if (framework == "paddlelite") {
        inputTensor->shape = {1, 3, 224, 224};
        inputSize = 224*224*3;
    } else if (framework == "mnn") {
        inputTensor->type = FLOAT32;
        inputTensor->shape = {1, 3, 300, 300};
        inputSize = 300*300*3;
    }
    
    int8_t *data = (int8_t *)malloc(inputSize);
    if (data == nullptr) {
        return AI_RETCODE_NULL_PARAM;
    }
    memset(data, 1, inputSize);
    inputTensor->buffer = std::make_pair((void *)data, inputSize);

    return AI_RETCODE_SUCCESS;
}

int SyncInferProcess(AiInference *pAiInfer, IOTensors &inputs, IOTensors &outputs)
{
    int retcode = pAiInfer->SyncExecute(inputs, outputs);
    if (retcode != AI_RETCODE_SUCCESS) {
        LOG_ERROR("sync inference failed, retcode=[%d]\n", retcode);
        return AI_RETCODE_FAILURE;
    }
    LOG_INFO("First syncExecute success, output.size=%d \n", outputs.size());
    //ShowDataInfo(outputs);
    DestroyDataInfo(outputs);
    
    LOG_INFO("Start second sync inference\n");
    retcode = pAiInfer->SyncExecute(inputs, outputs);
    if (retcode != AI_RETCODE_SUCCESS) {
        LOG_ERROR("sync inference failed, retcode=[%d]\n", retcode);
        return AI_RETCODE_FAILURE;
    }
    LOG_INFO("Second SyncExecute success, output.size=%d \n", outputs.size());

    //ShowDataInfo(outputs);
    DestroyDataInfo(outputs);
    
    retcode = pAiInfer->Destroy();
    if (retcode != AI_RETCODE_SUCCESS) {
        LOG_ERROR("inference engine destroy failed, retcode=[%d]\n", retcode);
        return AI_RETCODE_FAILURE;
    }
    LOG_INFO("Inference engine has been destroyed \n");
    
    LOG_INFO("Ai inference task manager test success.\n");
    return AI_RETCODE_SUCCESS;
}

int TestPaddlelitInfer(const char *modelPath)
{
    LOG_INFO("start paddlelite framework inference, modelPath=%s\n", modelPath);
    ModelPathInfo modelPathInfo = {.type = NET_COMBINED_TYPE, .path = modelPath};
    std::vector<ModelPathInfo> modelPaths;
    modelPaths.emplace_back(modelPathInfo);

    IOTensor inputTensor;
    int retCode = SetInputTensor("paddlelite", &inputTensor);
    if (retCode != AI_RETCODE_SUCCESS) {
        LOG_ERROR("set inputTensor failed, retCode=%d\n", retCode);
        return retCode;
    }

    IOTensors inputs;
    inputs.push_back(inputTensor);
    IOTensors outputs;

    AiInference* pAiInfer = new AiInference();
    if (pAiInfer == nullptr) {
        LOG_ERROR("new AiInference failed!\n");
        DestroyDataInfo(inputs);
        return AI_RETCODE_NULL_PARAM;
    }
    LOG_INFO("AiInference instance has been created\n");

    int32_t retcode = pAiInfer->Create(AI_FRAMEWORK_ID_PADDLELITE, resnet50, modelPaths, paddleliteVersion);
    if (retcode != AI_RETCODE_SUCCESS) {
        LOG_ERROR("create paddlelite inference engine failed, retcode=[%d]\n", retcode);
        delete pAiInfer;
        DestroyDataInfo(inputs);
        return AI_RETCODE_FAILURE;
    }
    LOG_INFO("Inference engine has been created\n");

    retCode = SyncInferProcess(pAiInfer, inputs, outputs);
    delete pAiInfer;
    DestroyDataInfo(inputs);

    return retCode;
}

int TestNcnnInfer(const char *modelPath, const char *weightPath)
{
    LOG_INFO("start ncnn framework inference, modelPath=%s weightPath=%s\n", modelPath, weightPath);
    ModelPathInfo modelPathInfo = {.type = NET_TOPO_FILE_TYPE, .path = modelPath};
    ModelPathInfo weightPathInfo = {.type = WEIGHT_FILE_TYPE, .path = weightPath};
    std::vector<ModelPathInfo> modelPaths;
    modelPaths.emplace_back(modelPathInfo);
    modelPaths.emplace_back(weightPathInfo);
    
    IOTensor inputTensor;
    int retCode = SetInputTensor("ncnn", &inputTensor);
    if (retCode != AI_RETCODE_SUCCESS) {
        LOG_ERROR("set inputTensor failed, retCode=%d\n", retCode);
        return retCode;
    }

    IOTensors inputs;
    inputs.push_back(inputTensor);
    IOTensors outputs;

    AiInference* pAiInfer = new AiInference();
    if (pAiInfer == nullptr) {
        LOG_ERROR("new AiInference failed!\n");
        DestroyDataInfo(inputs);
        return AI_RETCODE_NULL_PARAM;
    }
    LOG_INFO("AiInference instance has been created\n");

    int32_t retcode = pAiInfer->Create(AI_FRAMEWORK_ID_NCNN, squeezenet_v1, modelPaths, ncnnVersion);
    if (retcode != AI_RETCODE_SUCCESS) {
        LOG_ERROR("create ncnn inference engine failed, retcode=[%d]\n", retcode);
        delete pAiInfer;
        DestroyDataInfo(inputs);
        return AI_RETCODE_FAILURE;
    }
    LOG_INFO("Inference engine has been created\n");

    retCode = SyncInferProcess(pAiInfer, inputs, outputs);
    delete pAiInfer;
    DestroyDataInfo(inputs);

    return retCode;
}

int TestMnnInfer(const char *modelPath)
{
    LOG_INFO("start mnn framework inference, modelPath=%s\n", modelPath);
    ModelPathInfo modelPathInfo = {.type = NET_COMBINED_TYPE, .path = modelPath};
    std::vector<ModelPathInfo> modelPaths;
    modelPaths.emplace_back(modelPathInfo);
    
    IOTensor inputTensor;
    int retCode = SetInputTensor("mnn", &inputTensor);
    if (retCode != AI_RETCODE_SUCCESS) {
        LOG_ERROR("set inputTensor failed, retCode=%d\n", retCode);
        return retCode;
    }
    IOTensors inputs;
    inputs.push_back(inputTensor);

    IOTensors outputs;
    AiInference* pAiInfer = new AiInference();
    if (pAiInfer == nullptr) {
        LOG_ERROR("new AiInference failed!\n");
        DestroyDataInfo(inputs);
        return AI_RETCODE_NULL_PARAM;
    }
    LOG_INFO("AiInference instance has been created\n");

    int32_t retcode = pAiInfer->Create(AI_FRAMEWORK_ID_MNN, mobilenetssd, modelPaths, mnnVersion);
    if (retcode != AI_RETCODE_SUCCESS) {
        LOG_ERROR("create mnn inference engine failed, retcode=[%d]\n", retcode);
        delete pAiInfer;
        DestroyDataInfo(inputs);
        return AI_RETCODE_FAILURE;
    }
    LOG_INFO("Inference engine has been created\n");

    retCode = SyncInferProcess(pAiInfer, inputs, outputs);
    delete pAiInfer;
    DestroyDataInfo(inputs);

    return retCode;
}

const char* short_options = "m:w:f:l:";
const struct option long_options[] = {
    {"model_path", 1, NULL, 'm' },
    {"weight_path", 1, NULL, 'w' },
    {"framework", 1, NULL, 'f' },
    {"loop_count", 1, NULL, 'l' },
    {0, 0, 0, 0 },
};

int main(int argc, char **argv)
{
    LOG_INFO("start inference test, para count: %d\n", argc);
    int c;
    std::string modelPath = "";
    std::string weightPaht = "";
    std::string framework = "";
    int loop_count = 1;
    while ((c = getopt_long(argc, argv, short_options, long_options, NULL)) != -1)
    {
        switch (c)
        {
        case 'm':
            modelPath.assign(optarg);
            break;
        case 'w':
            weightPaht.assign(optarg);
            break;
        case 'f':
            framework.assign(optarg);
            break;
        case 'l':
            loop_count = atoi(optarg);
            break;
        default:
            break;
        }
    }
    
    int retcode = AI_RETCODE_SUCCESS;
    for (int i = 0; i < loop_count; i++) {
        if (strcmp(framework.c_str(), "paddlelite") == 0) {
            retcode = TestPaddlelitInfer(modelPath.c_str());
        } else if (strcmp(framework.c_str(), "ncnn") == 0) {
            retcode = TestNcnnInfer(modelPath.c_str(), weightPaht.c_str());
        } else if (strcmp(framework.c_str(), "mnn") == 0) {
            retcode = TestMnnInfer(modelPath.c_str());
        }

        usleep(20000);
    }

    return retcode;
}

