#ifndef __BSNN_MODEL_LOAD__
#define __BSNN_MODEL_LOAD__

#include <iostream>
#include <thread>
#include "bsnn_user_api.h"
#include <string.h>

class BSNN_MODEL
{
public:
    BSNN_MODEL(const std::string& model_dir) : model_dir_(model_dir)
    {
        InitModel(model_dir_);
    }

    ~BSNN_MODEL()
    {
        ReleaseModel();
        
    }

    void InitModel(const std::string& model_dir)
    {
        if(bsnn_model_load(&bsnn_model_, model_dir.c_str()) != BSNN_SUCC)
        {
            printf("bsnn: load model failed!\n");
            ReleaseModel();
            model_init_succ_ = false;
            return;
        }

        printf("bsnn: load model done!\n");
        bsnn_user_instance_cfg_t u_ins_cfg;
        u_ins_cfg.queue_type = GET_IN_OUT_NON_BLOCKING;
        u_ins_cfg.in_type = USER_ADDR;
        u_ins_cfg.input_bufferq_size = 3;
        u_ins_cfg.output_bufferq_size = 3;
        u_ins_cfg.in_out_timeout_period = 5000;

        if(bsnn_instance_create(&bsnn_model_, &bsnn_instance_, &u_ins_cfg) != BSNN_SUCC)
        {
            printf("bsnn: creat model instance failed!\n");
            ReleaseModel();
            model_init_succ_ = false;
            return;
        }

        model_init_succ_ = true;   
    }


    void ReleaseModel()
    {
        bsnn_all_destroy();
    }

    bool Run(uint8_t* input_data, size_t len)
    {
        if(!model_init_succ_)
            return false;
        if(!output_buffer_released_)
            ReleaseOutputBuffer();

        //get input buffer
        bsnn_user_input_t bsnn_u_input;
        if(bsnn_input_get(&bsnn_instance_, &bsnn_u_input) != BSNN_SUCC)
        {
            printf("bsnn: input get failed\n");
            return false;
        }

 
        
        // 将输入数据数组拷贝到bsnn输入buffer中
        uint8_t* buf = (uint8_t*)(bsnn_u_input.frame_ts_usr_addr[0]);
        memcpy(buf, input_data, len);

        //set input data
        if(bsnn_input_set(&bsnn_instance_, &bsnn_u_input) != BSNN_SUCC)
        {
            printf("bsnn: input set failed!\n");
            return false;
        }

        
        // net inference
        int ret_bsnn = -1;
        
        do{
            ret_bsnn = bsnn_instance_run(&bsnn_instance_);
            // if queue is empty, sleep a while then try again.
            if(BSNN_INPUT_QUEUE_EMPTY == ret_bsnn || BSNN_OUTPUT_QUEUE_EMPTY == ret_bsnn)
            {
                std::this_thread::sleep_for(std::chrono::milliseconds(1));
            }
        } while(BSNN_INPUT_QUEUE_EMPTY == ret_bsnn || BSNN_OUTPUT_QUEUE_EMPTY == ret_bsnn);

        if(ret_bsnn != BSNN_SUCC) {
            printf("bsnn: run model failed!!\n");
            return false;
        }

        //get output
        do{
            ret_bsnn = bsnn_output_get(&bsnn_instance_, &bsnn_u_output_, &post_idx_);
            if(BSNN_SUCC != ret_bsnn)
            {
                printf("bsnn: failed to get net output!\n");
                return false;
            }
        }while(BSNN_OUTPUT_QUEUE_EMPTY == ret_bsnn);

        // 获取模型输出结果返回ture
        return true;

    }

    //返回指向输出的智能指针
    std::shared_ptr<bsnn_user_output_t> GetModelOutput()
    {
        return std::make_shared<bsnn_user_output_t>(bsnn_u_output_);
    }

    bool ReleaseOutputBuffer()
    {
        if(bsnn_output_release(&bsnn_instance_, post_idx_) != BSNN_SUCC)
        {
            printf("bsnn: release output buffer failed!\n");
            output_buffer_released_ = false;
            return false;
        }
        output_buffer_released_ = true;
        return true;
    }

public:
    std::string model_dir_;
    bool model_init_succ_;
    bool output_buffer_released_ = true;
    bsnn_user_instance_t bsnn_instance_;
    bsnn_user_model_t bsnn_model_;
    bsnn_user_output_t bsnn_u_output_;
    int post_idx_ = -1;

};


#endif