#ifndef __NNIE_SERVICE_H__
#define __NNIE_SERVICE_H__
#include "sample_comm_nnie.h"
#include "hw_cv_type.h"
#include "utils_service.h"

#include <iostream>
#include <vector>
#include <memory>
#include <opencv2/opencv.hpp>

#include "types.h"

extern GC_APP_CONFIG gc_app_config;
extern std::vector<std::string> PLATE_CHAR_LIST;

#ifdef __cplusplus
extern "C" {
#endif
#define MAX_OUTPUT_NUM 5
#define BLOB_NUM 3
#define GRID_BIAS_NUM (BLOB_NUM * 2)
#define ANCHOR_NUM 3
#define EACH_GRID_BIAS_NUM 3
#define MAX_OUTPUT_BLOB_NUM 10 // 可输出的最大的blob数目

namespace HWApp
{
typedef struct {
    uint32_t batchNum;
    int8_t loadModeModel;
    char* modelPath; // 模型路径
} NnCreateParam;;

typedef struct NNTensor{
    int n;
    int channel;
    int height;
    int width;
    uint64_t *data;
} NNTensor;

typedef struct Tensors{
    int blobNum;
    NNTensor tensor[MAX_OUTPUT_BLOB_NUM];
} Tensors;

typedef struct Anchors {
    int16_t anchorW;
    int16_t anchorH;
} Anchors;

typedef struct HiSAMPLE_SVP_NNIE_MODEL_PARAM_S {
     HI_U32 u32OriImHeight;
     HI_U32 u32OriImWidth;
     HI_U32 u32BboxNumEachGrid;
     HI_U32 u32ClassNum;
     HI_U32 au32GridNumHeight[3];
     HI_U32 au32GridNumWidth[3];
     HI_U32 u32NmsThresh;
     HI_U32 u32ConfThresh;
     HI_U32 u32MaxRoiNum;
     Anchors anchors[ANCHOR_NUM][EACH_GRID_BIAS_NUM]; 
     SVP_MEM_INFO_S stGetResultTmpBuf;
     SVP_DST_BLOB_S stClassRoiNum;
     SVP_DST_BLOB_S stDstRoi;
     SVP_DST_BLOB_S stDstScore;
    //  SVP_DST_BLOB_S stDstLandmark;
} SAMPLE_SVP_NNIE_MODEL_PARAM_S;

typedef struct hiSAMPLE_SVP_NNIE_STACK {
    HI_S32 s32Min;
    HI_S32 s32Max;
} SAMPLE_SVP_NNIE_STACK_S;

typedef struct NNIE_DATA_INDEX_S
{
	HI_U32 u32SegIdx;
	HI_U32 u32NodeIdx;
}NNIE_DATA_INDEX_S;

// typedef struct GC_YOLO5_LANDMARKS {
// 	HI_FLOAT f32LmX0;
// 	HI_FLOAT f32LmY0;
// 	HI_FLOAT f32LmX1;
// 	HI_FLOAT f32LmY1;
// 	HI_FLOAT f32LmX2;
// 	HI_FLOAT f32LmY2;
// 	HI_FLOAT f32LmX3;
// 	HI_FLOAT f32LmY3;
// }GC_YOLO5_LANDMARKS;

class NNIE
{
public:
    NNIE();
    NNIE(GC_YOLO_VERSION yolo_version);
    ~NNIE();
public:
    void Init();                               // fd init
    
    int GetFd();
    int ServiceRegister();
    int SDCNnieForward(sdc_nnie_forward_s *p_sdc_nnie_forward);
    int Forward(NNIE_DATA_INDEX_S* pstInputDataIdx, NNIE_DATA_INDEX_S* pstProcSegIdx);

    int LoadModel(NnCreateParam* param);
    int ParamInit();
    int FillSrcData(const ImageInfo& img);
    int FillSrcData(HI_CHAR *pcSrcYUV);
    void PostProcess(std::vector<ObjBox> &objBoxs);

    // void Create(NnCreateParam* param);       //
    int InferenceFile(const char* filePath, ImageInfo &img, std::vector<ObjBox> &objBoxs);
    int InferenceYuv(HI_CHAR *pcSrcYUV, std::vector<ObjBox> &objBoxs);
    void Inference(ImageInfo* img, NNTensor* inTenser, NNTensor* outTenser);            // TODO 考虑多batch处理进行加速

    int32_t UnLoadModel();
    int ParamDeInit();
    int NnieParamInit();
    
    int32_t SAMPLE_SVP_NNIE_Yolov3_ParamInit();
    int32_t Yolov3SoftwareInit();
    int32_t Yolov3Deinit();
    int32_t NNIE_Yolov3_Deinit();
    void Destroy();

    int FillSrcData(HI_U8 *pData);
    void PostLprProcess(std::string &plate, float &plate_char_conf, std::vector<float>& char_conf);
    void decode_outputs(int *indexs, float *confs, int output_size, std::string &plate, float &plate_char_conf, std::vector<float>& char_conf);
    int InferenceFile(const char* filePath, std::string &plate, float &plate_char_conf, std::vector<float>& char_conf);
    int InferenceMat(const cv::Mat& img, std::string &plate, float &plate_char_conf, std::vector<float>& char_conf);

private:
    int m_fd_alg = -1;
    std::mutex m_mutex;
    
    std::shared_ptr<UtilsService> m_utilsSrv;

    const char* m_modelPath;

    SAMPLE_SVP_NNIE_MODEL_S m_NnieModel;
    SAMPLE_SVP_NNIE_PARAM_S m_NnieParam;
    SAMPLE_SVP_NNIE_MODEL_PARAM_S m_SoftwareParam;
    SAMPLE_SVP_NNIE_CFG_S   m_NnieCfg;
    NnCreateParam m_NNParam = {};

    GC_YOLO_VERSION m_yolo_version;
};

}

#ifdef __cplusplus
}
#endif /* __cplusplus */

#endif