#include "trt_predictor.h"

#include <chrono>


#define MODEL_PATH  "./ppliteseg.trt"
#define INPUT_NAME  "x"
#define OUTPUT_NAME "argmax_0.tmp_0"
#define DEVICE      0
#define INPUT_SIZE  3 * 464 * 464
#define OUTPUT_SIZE 1 * 464 * 464

int main()
{
    TRTPredictor predictor;

    // config predictor
    assert(predictor.load_model(MODEL_PATH));
    assert(predictor.set_input_name(INPUT_NAME));
    assert(predictor.set_output_name(OUTPUT_NAME));
    assert(predictor.set_device_id(DEVICE));
    assert(predictor.set_input_typesize(sizeof(float)));
    assert(predictor.set_output_typesize(sizeof(int)));

    // init predictor
    assert(predictor.init());

    // init vars
    cv::Mat img = cv::Mat::zeros(cv::Size(464, 400), CV_8UC3);

    float* input = blobFromImage(img);
    int* output = new int[OUTPUT_SIZE];

    // warmup predictor
    for (int i = 0; i < 5; i++)
    {
        predictor.inference(input, INPUT_SIZE, output);
    }

    // predict
    auto start = std::chrono::system_clock::now();
    for (int i = 0; i < 10; i++)
    {
        predictor.inference(input, INPUT_SIZE, output);
    }
    auto end = std::chrono::system_clock::now();
    std::cout << std::chrono::duration_cast<std::chrono::milliseconds>(end - start).count() / 10.0 << "ms" << std::endl;

    // Got output dims
    Dims out_dims = predictor.get_output_dims();
    std::cout << "output_size:";
    for(int j = 0; j<out_dims.nbDims; j++) 
    {
        std::cout << out_dims.d[j] << ",";
    }
    std::cout << std::endl;

    // free memory
    delete[] input;
    delete[] output;

    // destory predictor
    predictor.destroy();

    return 0;
}