/******************************************************************************

                  版权所有 (C), 2019-2029, SDC OS 开源软件小组所有

 ******************************************************************************
  文 件 名   : user_app.cpp
  版 本 号   : 初稿
  作    者   :  s30001871
  生成日期   : 2020年7月4日
  最近修改   :
  功能描述   : 用户自定义算法模型入口
  函数列表   :
  修改历史   :
  1.日    期   : 2020年7月4日
    作    者   :  s30001871
    修改内容   : 创建文件

******************************************************************************/
#include "user_app.h"

#include <math.h>
#include <chrono>
#include <iostream>
#include <fstream>
#include <codecvt>
#include "spdlog/stopwatch.h"

#include "sdc_def_ext.h"
#include "hw_app_log.h"
#include "hw_app_config.h"
#include "EventSubscriber.h"
#include "AppEpoll.h"
#include "cv_utils.h"
#include "lpr.h"
#include "CvxText.h"
#include "hw_ptz_service.hpp"
#include "illegal_parking_tracker.h"

#include "basic_mem.h"
#include "hw_app_metadata.h"

namespace HWApp
{

#define DEFAULT_CHANNEL_ID 101              //默认IPC通道

std::shared_ptr<Yolov3App> CreateYolov3App(AppType type)
{
    std::shared_ptr<Yolov3App> app = nullptr;
    switch (type) {
    case AppType::APP_TYPE_LPD:
        app = std::make_shared<AIPlus>();
        break;
    default:
        break;
    }
    return app;
}

// =====================

AIPlus::AIPlus(void) : Yolov3App() 
{
    m_mapRatio = 1.f;
    b_event_subscriber_thread_run = false;

    b_upload_state_image_thread_run = false;
    b_track_thread_run = false;
    b_inout_thread_run = false;
    b_algv_thread_run = false;

    m_yolo_version = static_cast<GC_YOLO_VERSION>(YOLO_VERSION | GC_YOLO_VERSION::YOLO_V5);

    LOG_DEBUG("AIPlus constructor");
}

AIPlus::~AIPlus(void)
{
    std::map<std::string, BYTETracker*>::iterator iter = m_byte_trackers.begin();
    while (iter != m_byte_trackers.end()) {
        delete iter->second;
        iter ++;
    }
    m_byte_trackers.clear();
    std::map<std::string, InoutTracker*>::iterator iter0 = m_inout_trackers.begin();
    while (iter0 != m_inout_trackers.end()) {
        delete iter0->second;
        iter0 ++;
    }
    m_inout_trackers.clear();

    LOG_DEBUG("AIPlus destructor");
}

int32_t AIPlus::ConvertForwardResult(std::vector<ObjBox> &objBoxs, SDC_SSD_RESULT_S &stResult)
{
    if (objBoxs.empty()) {
        return HW_OK;
    }
    stResult.numOfObject = objBoxs.size();
    int i = 0;
    for (auto &box: objBoxs) {
        // SDC_SSD_OBJECT_INFO_S
        stResult.pObjInfo[i].track_id = 0;
        stResult.pObjInfo[i].c_class = box.enClass;
        stResult.pObjInfo[i].confidence = box.fScore;
        stResult.pObjInfo[i].x_left = box.rect.x;
        stResult.pObjInfo[i].y_top = box.rect.y;
        stResult.pObjInfo[i].w = box.rect.width;
        stResult.pObjInfo[i].h = box.rect.height;
        stResult.pObjInfo[i].x_right = box.rect.x + box.rect.width;
        stResult.pObjInfo[i].y_bottom = box.rect.y + box.rect.height;
        if (box.landmark.x0 > 0 || box.landmark.x2 > 0) {
            stResult.pObjInfo[i].landmark.x0 = box.landmark.x0;
            stResult.pObjInfo[i].landmark.y0 = box.landmark.y0;
            stResult.pObjInfo[i].landmark.x1 = box.landmark.x1;
            stResult.pObjInfo[i].landmark.y1 = box.landmark.y1;
            stResult.pObjInfo[i].landmark.x2 = box.landmark.x2;
            stResult.pObjInfo[i].landmark.y2 = box.landmark.y2;
            stResult.pObjInfo[i].landmark.x3 = box.landmark.x3;
            stResult.pObjInfo[i].landmark.y3 = box.landmark.y3;
        }
        i++;
    }
    // LOG_DEBUG("convert result from objBoxs to stResult successfully !");
    return HW_OK;
}

int32_t AIPlus::ConvertTrackResult(std::vector<STrack> &output_stracks, SDC_SSD_RESULT_S &stResult)
{
    if (output_stracks.empty()) {
        return HW_OK;
    }
    stResult.numOfObject = output_stracks.size();
    int i = 0;
    for (auto &strack: output_stracks) {
        vector<float> tlwh = strack.tlwh;
        // SDC_SSD_OBJECT_INFO_S
        stResult.pObjInfo[i].track_id = strack.track_id;
        stResult.pObjInfo[i].c_class = 0;
        stResult.pObjInfo[i].confidence = strack.score;
        stResult.pObjInfo[i].x_left = tlwh[0];
        stResult.pObjInfo[i].y_top = tlwh[1];
        stResult.pObjInfo[i].w = tlwh[2];
        stResult.pObjInfo[i].h = tlwh[3];
        stResult.pObjInfo[i].x_right = tlwh[0] + tlwh[2];
        stResult.pObjInfo[i].y_bottom = tlwh[1] + tlwh[3];
        i++;
    }
    // LOG_DEBUG("convert result from output_stracks to stResult successfully !");
    return HW_OK;
}

static inline bool containRect(GCRect &src, GCRect &target)
{
    return target.x > src.x && (target.x + target.width) < (src.x + src.width) && target.y > src.y && (target.y + target.height) < (src.y + src.height);
}

void AIPlus::StartAllThread(void)
{
    std::map<std::string, GC_CAMERA_CONFIG>::iterator iter = gc_app_config.camera_config_map.begin();
    while (iter != gc_app_config.camera_config_map.end()) {
        m_byte_trackers[iter->first] = new BYTETracker(15, 5);
        switch (iter->second.alg_type)
        {
        case GC_ALG_TYPE::ROAD_PARKING:
            m_inout_trackers[iter->first] = new InoutTracker();
            break;
        case GC_ALG_TYPE::SEMI_CLOSED_PARKING:
        case GC_ALG_TYPE::CLOSED_PARKING:
            m_inout_trackers[iter->first] = new InoutTracker();
            break;
        case GC_ALG_TYPE::ILLEGALE_PARKING:
            m_inout_trackers[iter->first] = new IllegalParkingTracker();
            break;
        default:
            m_inout_trackers[iter->first] = new InoutTracker();
            break;
        }
        iter++;
    }

    auto funcAppEpoll = std::bind(&AIPlus::AppEpollThrd, this);
    std::thread thrAppEpoll(funcAppEpoll);
    m_appEpoll_thread = std::move(thrAppEpoll);

    b_track_thread_run = true;
    auto trackFunc = std::bind(&AIPlus::TrackThrd, this);
    std::thread trackThrDecode(trackFunc);
    m_track_thread = std::move(trackThrDecode);

    b_inout_thread_run = true;
    auto inoutFunc = std::bind(&AIPlus::InoutThrd, this);
    std::thread inoutThrDecode(inoutFunc);
    m_inout_thread = std::move(inoutThrDecode);

    b_upload_state_image_thread_run = true;
    auto funcRead = std::bind(&AIPlus::UploadStateImageThrd, this);
    std::thread thrDecode(funcRead);
    m_upload_state_image_thread = std::move(thrDecode);

    b_algv_thread_run = true;
    auto algvFunc = std::bind(&AIPlus::AlgvThrd, this);
    std::thread algvThrDecode(algvFunc);
    m_algv_thread = std::move(algvThrDecode);

}

void AIPlus::PreProcessYuvData(sdc_yuv_frame_s &frame, uint8_t *&yuv, cv::Mat &dst)
{
    m_mapRatio = GetScaleRatio(m_videoSize.width, m_videoSize.height, m_inputSize.width, m_inputSize.height); // 获取缩放比率
    bool resizeFlag = YUVResize(frame, m_inputSize.width, m_inputSize.height, (unsigned char *)yuv, dst);
    if(!resizeFlag) {
        LOG_WARN("AIPlus YUVResize error");     
    }
}

void AIPlus::PreProcessYuvData(sdc_yuv_frame_s &frame, uint8_t *&yuv, cv::Mat &dst, std::string &save_path)
{
    m_mapRatio = GetScaleRatio(m_videoSize.width, m_videoSize.height, m_inputSize.width, m_inputSize.height); // 获取缩放比率
    bool resizeFlag = YUVResize(frame, m_inputSize.width, m_inputSize.height, (unsigned char *)yuv, dst, save_path);
    if(!resizeFlag) {
        LOG_WARN("AIPlus YUVResize error");     
    }
}

void AIPlus::PretreatObjectData(SDC_SSD_OBJECT_INFO_S &data)
{
}

void AIPlus::ClassifyTargets(uint32_t uclass, uint8_t target[20])
{
}

void AIPlus::PrintObjectInfo(const SDC_SSD_OBJECT_INFO_S &info) const
{
}

int32_t AIPlus::LoadModel(unsigned int loadMode, std::string &modelPath)
{
    m_nnie = new NNIE(m_yolo_version);
    m_nnie->Init();
    NnCreateParam param = {};
    param.batchNum = 1;
    param.loadModeModel= 1;
    param.modelPath = (char *)modelPath.c_str();
    int ret = m_nnie->LoadModel(&param);
    LOG_IF_FAIL(ret == HW_OK, "nnie load model error");

    // m_nnie_plate = new NNIE(GC_YOLO_VERSION::YOLO_V5_ONLY_PLATE);    
    // m_nnie_plate->Init();
    // NnCreateParam nnie_param = {};
    // nnie_param.batchNum = 1;
    // nnie_param.loadModeModel= 1;
    // nnie_param.modelPath = (char*) "./model/lpd0616_yuv_rgb.wk"; // TODO configable
    // ret = m_nnie_plate->LoadModel(&nnie_param);
    // LOG_IF_FAIL(ret == HW_OK, "load plate model error");

    return ret;
}

int32_t AIPlus::NNieParamInit(void)
{
    int ret = m_nnie->ParamInit();
    LOG_IF_FAIL(ret == HW_OK, "nnie param init error");
    // m_nnie_plate->ParamInit();
    // LOG_IF_FAIL(ret == HW_OK, "nnie plate param init error");
    return ret;
}

void AIPlus::GetDataThrd(void)
{
    std::wstring_convert<std::codecvt_utf16<wchar_t>> converter;

    sdc_yuv_data_s stSdcYuvData;
    sdc_yuv_data_stru* stYuvData = NULL;  //新建一个指向yuv_data的指针
    sdc_yuv_frame_stru stYuvCombine;
    sdc_yuv_frame_stru* yuv4jpeg_frame;
    cv::Mat img;
    spdlog::stopwatch sw;
    int yuv_fail_time = 0;
    int wh = m_inputSize.width * m_inputSize.height;
    int buff_size = sizeof(uint8_t) * wh * 3 / 2;
    m_mapRatio = GetScaleRatio(m_videoSize.width, m_videoSize.height, m_inputSize.width, m_inputSize.height);

    while (bReadThreadRun == TRUE) {
        if (m_dataSrc->GetYuvData(stSdcYuvData)) {

            sw.reset();

            yuv_fail_time = 0;

            // LOG_DEBUG("get yuv data return true, size of m_chan_yuv_data=%d, size of IMG_CACHE=%d", m_chan_yuv_data.size(), IMG_CACHE.Size());

            auto tp=std::chrono::system_clock::now();
            auto time=std::chrono::system_clock::to_time_t(tp);
            std::stringstream ss;
            if (gc_app_config.push_to_algv) {
                ss<<std::put_time(std::localtime(&time),"%Y-%m-%dT%H:%M:%S+08:00"); // RFC 3339
            } else {
                ss<<std::put_time(std::localtime(&time),"%Y-%m-%d %H:%M:%S");
            }

            GCTrackData yuv_data;

            // 获取当前预置位，生成相机当前预置位的key，默认key为相机硬件id
            sdc_preset_pos preset_pos;
            yuv_data.camera_key = gc_app_config.hardware_id;
            if (gc_app_config.has_preset) {
                int moving = 0;
                int ret = PtzService::GetInstance().GetCurrentLocation(preset_pos, &moving);
                if (moving == 1) {
                    // 相机非静止状态
                    m_dataSrc->FreeYuvData(stSdcYuvData);
                    LOG_DEBUG("camera is moving, drop this frame");
                    usleep(100000);
                    continue;
                }
                if (ret == HW_OK) {
                    generateSn(&preset_pos, yuv_data.camera_key);
                } else {
                    m_dataSrc->FreeYuvData(stSdcYuvData);
                    LOG_ERROR("GetCurrentLocation failed for preset_pos=%f,%f", preset_pos.h_location, preset_pos.v_location);
                    usleep(100000);
                    continue;
                }
            }

            // yuv_data.objBoxs = objBoxs;
            yuv_data.pts = stSdcYuvData.pts_sys;
            strcpy(yuv_data.time_str, ss.str().c_str());

            yuv_data.yuvForwardBuf = (uint8_t *)malloc(buff_size);
            if (yuv_data.yuvForwardBuf == nullptr) {
                m_dataSrc->FreeYuvData(stSdcYuvData);
                LOG_ERROR("yuvForwardBuf malloc failed");
                usleep(100000);
                continue;
            }

            // 1. 通过华为服务接口直接将yuv转jpg流（实测可解决走廊模式花屏问题）
            // 2. 直接保存jpg二进制流数据至本地（实测内存占用可减少一半）
            std::string img_path(gc_app_config.base_config.alg_config.cache_dir);
            img_path += yuv_data.camera_key;
            img_path += "/";
            img_path += ss.str() + "_" + std::to_string(stSdcYuvData.pts_sys) + ".jpg";

            // PreProcessYuvData(stSdcYuvData.frame, (uint8_t *&)(yuv_data.yuvForwardBuf), img);
            // PreProcessYuvData(stSdcYuvData.frame, (uint8_t *&)(yuv_data.yuvForwardBuf), img, img_path);

            sdc_jpeg_frame_s jpeg_frame;
	        sdc_osd_region_s osd_region;

            // TODO cache the str for camera key
            std::string osd_str = gc_app_config.camera_config_map[yuv_data.camera_key].road
                + "-" + gc_app_config.camera_config_map[yuv_data.camera_key].name
                + "-" + std::string(yuv_data.time_str)
                + "\n"
                + ((yuv_data.camera_key.length() <= 6) ? yuv_data.camera_key : yuv_data.camera_key.substr(yuv_data.camera_key.length() - 6))
                + "-" + gc_app_config.camera_config_map[yuv_data.camera_key].ip
                + "-" + to_string(gc_app_config.camera_config_map[yuv_data.camera_key].model)
                + "-" + to_string(gc_app_config.camera_config_map[yuv_data.camera_key].alg_type)
            ;
            #ifdef BUILD_INFO
                osd_str += "-" + std::string(BUILD_INFO);
            #endif        
            osd_str += "@" + app_start_time;

            std::wstring w_osd_str = converter.from_bytes(osd_str);

            /*
                fgColor=0xRRGGBB
                bgColor=0xRRGGBB
                fgAlpha=0~128 //将自动向系统支持的数字调整（后续版本会调整到0-100）
                bgAlpha=0~128 //将自动向系统支持的数字调整（后续版本会调整到0-100）
                fontSize=0~8 // 0（16x16），1（24x24），2（32x32），3（48x48），4
                （64x64），5（72x72），6（88x88），7（96x96），8（128x128）                    
            */
            const char format[] = "fgColor=0xffffff;bgColor=0x000000;fgAlpha=128;bgAlpha=70;fontSize=0";
            memset_s(&osd_region, sizeof(osd_region), 0, sizeof(osd_region));
            (void)memcpy_s(osd_region.osd.format, sizeof(osd_region.osd.format), format, sizeof(format));

            (void)memcpy_s(osd_region.osd.content, sizeof(osd_region.osd.content), w_osd_str.c_str(), w_osd_str.size() * sizeof(wchar_t));
            osd_region.osd.content_len = w_osd_str.size() * sizeof(wchar_t);

            osd_region.region.x=0;
            osd_region.region.y=0;
            osd_region.region.w=stSdcYuvData.frame.width;
            osd_region.region.h=stSdcYuvData.frame.height;

            int ret = -1;
            // M2141相机，m_yuvChnId写死为5的情况下，订阅时设定的帧分辨率不起作用，永远都会取实况的最大分辨率
            yuv4jpeg_frame = &stSdcYuvData.frame;
            if (stSdcYuvData.frame.width != m_videoSize.width) {
                stYuvData = &stSdcYuvData;
                stYuvCombine = CombineYuvData(stYuvData, (uint32_t) (gc_app_config.base_config.alg_config.subscribe_yuv_img_w), (uint32_t) (gc_app_config.base_config.alg_config.subscribe_yuv_img_h), true);
                yuv4jpeg_frame = &stYuvCombine;
            }
            ret = encode_jpeg(*yuv4jpeg_frame, osd_region, jpeg_frame);
            if (ret == HW_OK) {
                ret = save_jpeg(jpeg_frame, img_path);
            }
            if (stSdcYuvData.frame.width != m_videoSize.width) {
                combined_yuv_free(fd_codec, yuv4jpeg_frame);
            }

            if (ret == HW_OK) {
                GCImageCache::GetInstance().Put(img_path.c_str(), img, false); // 设置本地保存为false
                
                strcpy(yuv_data.img_path, img_path.c_str());

                {
                    // std::lock_guard<std::mutex> l(gc_app_config_mutex);
                    if (gc_app_config.base_config.upload_state_image && b_upload_state_image_thread_run) {
                        std::time_t now = std::time(nullptr);
                        if (
                            (gc_app_config.camera_config_map[yuv_data.camera_key].last_status_image_save_time == 0
                                || now - gc_app_config.camera_config_map[yuv_data.camera_key].last_status_image_save_time > gc_app_config.base_config.middleware_upload_image_period
                            )
                            && !gc_app_config.camera_config_map[yuv_data.camera_key].has_new_state_image
                        ) {
                            gc_app_config.camera_config_map[yuv_data.camera_key].last_status_image_save_time = now;

                            std::string state_img_path(gc_app_config.base_config.alg_config.cache_dir);
                            state_img_path.append(yuv_data.camera_key);
                            state_img_path.append("/");
                            state_img_path.append(STATE_IMAGE_NAME);
                            ret = save_jpeg(jpeg_frame, state_img_path);
                            if (ret == HW_OK) {
                                gc_app_config.camera_config_map[yuv_data.camera_key].has_new_state_image = true;
                                gc_app_config.camera_config_map[yuv_data.camera_key].new_state_image_time_str = std::string(yuv_data.time_str);

                                LOG_INFO("save state image successful, camera_key=%s", yuv_data.camera_key.c_str());
                            } else {
                                LOG_ERROR("save state image failed, camera_key=%s, ret=%d", yuv_data.camera_key.c_str(), ret);
                            }
                        }
                    }
                }
                free_encoded_jpeg(jpeg_frame);
            }

            stYuvData = &stSdcYuvData;
            stYuvCombine = CombineYuvData(stYuvData, (uint32_t) (m_inputSize.width), (uint32_t) (m_inputSize.height), true);
            memcpy(yuv_data.yuvForwardBuf, (uint8_t *) stYuvCombine.addr_virt, buff_size);
            combined_yuv_free(fd_codec, &stYuvCombine);

            m_dataSrc->FreeYuvData(stSdcYuvData);

            if (bForwardThreadRun) {
                yuv_data >> m_chan_yuv_data;
            } else {
                free(yuv_data.yuvForwardBuf);
            }

            // logger->debug("ConvertYuv_time: {:.3} s", sw);

            if (sw.elapsed().count() > 2) {
                LOG_WARN("ConvertYuv_time too long: %.3f s, abort application after sleep, waiting container auto restart", sw.elapsed().count());
                // 临时方案，程序运行数据异常，直接终止，让容器自动重启
                sleep(2); // 休眠2s等待日志打印完成
                if (!LOG_DEBUG_LEVEL) {
                    abort();
                }
                continue;
            } else if (sw.elapsed().count() > 1) {
                LOG_WARN("ConvertYuv_time too long: %.3f s, try sleep for performance recover", sw.elapsed().count());
                sleep(5);
                continue;
            }
        } else {
            yuv_fail_time ++;
            if (yuv_fail_time >= 30) {
                LOG_WARN("yuv return false last %d times, abort application after sleep, waiting container auto restart", yuv_fail_time);
                sleep(2); // 休眠2s等待日志打印完成
                if (!LOG_DEBUG_LEVEL) {
                    abort();
                }
            }
            LOG_WARN("get yuv data return false, sleep 1000 ms");
            usleep(1000000);
            continue;
        }
        usleep(200000);
    }

    LOG_ERROR("GetDataThrd has stopped!!!!!");

    return;
}

void AIPlus::ForwardThrd(void)
{
    if (m_dataSrc == nullptr) {
        LOG_ERROR("The data source is not given");
        return;
    }
    spdlog::stopwatch sw;

    while (bForwardThreadRun == TRUE) {

        // LOG_DEBUG("size of m_chan_yuv_data=%d", m_chan_yuv_data.size());

        GCTrackData yuv_data;
        yuv_data << m_chan_yuv_data;

        sw.reset();

        // YuvFrame帧数据进行推理
        std::vector<ObjBox> objBoxs;
        if (m_nnie->InferenceYuv((char *)yuv_data.yuvForwardBuf, objBoxs) != HW_OK) {
            free(yuv_data.yuvForwardBuf);
            LOG_ERROR("NNIE Inference() failed");
            continue;
        }

        // std::vector<ObjBox> plateObjBoxs;
        // if (m_nnie_plate->InferenceYuv((char *)yuv_data.yuvForwardBuf, objBoxs) != HW_OK) {
        //     LOG_ERROR("NNIE plate Inference() failed");
        // }

        free(yuv_data.yuvForwardBuf);
        yuv_data.yuvForwardBuf = nullptr;

        // logger->debug("Forward_time: {:.3} s", sw);
        sw.reset();
        
        #ifdef __DEBUG_SAVE_YUV__
            cv::Mat img;
            GCImageCache::GetInstance().Get(yuv_data.img_path, img);
            ConvertForwardResult(objBoxs, stResult);
            DrawImgBoxes(img, &stResult, m_mapRatio);
            // ConvertForwardResult(plateObjBoxs, stResult);
            // DrawImgBoxes(img, &stResult, m_mapRatio);
            string jpg_name = std::string(yuv_data.img_path) + "_yuv.jpg";
            cv::imwrite(jpg_name.c_str(), img);

            logger->debug("SaveYuv_time {:.3} s", sw);
            sw.reset();
        #endif

        yuv_data.objBoxs = objBoxs;

        // 通过TLV方式发送抓拍图和坐标框
        // ConstructTlvMetaData(yuv_data);

        if (b_track_thread_run) {
            yuv_data >> m_chan_track_data;

            // LOG_DEBUG("size of m_chan_track_data=%d", m_chan_track_data.size());
        }

    }
    return;
}

void AIPlus::Track(GCTrackData &track_data)
{
    //----- byte tracking begin -----//
    // /*
    spdlog::stopwatch sw;

    {
        std::lock_guard<std::mutex> l(last_track_data_mutex);
        track_data.cloneExcludeYuvForwardBuf(last_track_data);
    }

    float rect_scale = 1.0; // TODO configurable
    vector<Object> objects;
    for (auto &box: track_data.objBoxs) {
        // 只追车，不追车牌
        if (std::find(gc_app_config.base_config.alg_config.plate_classes.begin(), gc_app_config.base_config.alg_config.plate_classes.end(), box.enClass) != gc_app_config.base_config.alg_config.plate_classes.end()) {
            continue;
        }
        float x = box.rect.x / m_mapRatio;
        float y = box.rect.y / m_mapRatio;
        float w = box.rect.width / m_mapRatio;
        float h = box.rect.height / m_mapRatio;
        Object obj;
        obj.rect.x = x - (w * (rect_scale - 1)) / 2;
        obj.rect.y = y - (h * (rect_scale - 1)) / 2;
        obj.rect.width = w * rect_scale;
        obj.rect.height = h * rect_scale;
        obj.label = box.enClass;
        obj.prob = box.fScore;
        if (gc_app_config.camera_config_map[track_data.camera_key].style != GC_CAMERA_STYLE::STYLE_BAN) {
            // 过滤掉检测框宽度大于图片帧宽度或高度0.x倍的（宽度仅限微卡）
            if (gc_app_config.camera_config_map[track_data.camera_key].mode == GC_CAMERA_MODE::MODE_WK && obj.rect.width > gc_app_config.base_config.alg_config.subscribe_yuv_img_w * 0.8) {
                LOG_INFO("ignore the obj with too big width=%f", obj.rect.width);
                continue;
            }
            if (obj.rect.height > gc_app_config.base_config.alg_config.subscribe_yuv_img_h * 0.8) {
                LOG_INFO("ignore the obj with too big height=%f", obj.rect.height);
                continue;
            }
        }
        objects.emplace_back(obj);
    }

    if (objects.size() == 0) {
        return;
    }

    vector<STrack> output_stracks = m_byte_trackers[track_data.camera_key]->update(objects);
    vector<GCTrackObj> gc_track_objs;
    for (int i = 0; i < output_stracks.size(); i++) {
        int track_id = output_stracks[i].track_id;
        // vector<float> tlwh = output_stracks[i]._tlwh;
        vector<float> tlwh = output_stracks[i].tlwh;

        // LOG_DEBUG("camera_key=%s track_id=%d tlwh=[%f, %f, %f, %f], area=%f m_mapRatio=%f, is on tracking", track_data.camera_key.c_str(), track_id, tlwh[0], tlwh[1], tlwh[2], tlwh[3], tlwh[2] * tlwh[3], m_mapRatio);

        int x = (int) tlwh[0];
        int y = (int) tlwh[1];
        int w = (int) tlwh[2];
        int h = (int) tlwh[3];
        GCTrackObj track_obj;
        track_obj.camera_key = track_data.camera_key;
        strcpy(track_obj.img_path, track_data.img_path);
        track_obj.pts = track_data.pts;
        strcpy(track_obj.time_str, track_data.time_str);
        // track_obj.img = img;
        track_obj.track_id = track_id;
        track_obj.rect.x = x + (w * (rect_scale - 1)) / 2;
        track_obj.rect.y = y + (h * (rect_scale - 1)) / 2;
        track_obj.rect.width = w / rect_scale;
        track_obj.rect.height = h / rect_scale;
        // 依赖前面rect值，不要调整顺序
        if (gc_app_config.camera_config_map[track_data.camera_key].style == GC_CAMERA_STYLE::STYLE_XIE) {
            switch (gc_app_config.camera_config_map[track_data.camera_key].pos)
            {
            case GC_CAMERA_POS::POS_LEFT:
                track_obj.cxby = { track_obj.rect.x + track_obj.rect.width * 2 / 3, track_obj.rect.y + track_obj.rect.height };
                break;
            case GC_CAMERA_POS::POS_RIGHT:
                track_obj.cxby = { track_obj.rect.x + track_obj.rect.width / 3, track_obj.rect.y + track_obj.rect.height };
                break;
            default:
                track_obj.cxby = { track_obj.rect.x + track_obj.rect.width / 2, track_obj.rect.y + track_obj.rect.height };
                break;
            }
        } else {
            track_obj.cxby = { track_obj.rect.x + track_obj.rect.width / 2, track_obj.rect.y + track_obj.rect.height };
        }
        track_obj.tlbr = { track_obj.rect.x, track_obj.rect.y, track_obj.rect.x + track_obj.rect.width, track_obj.rect.y + track_obj.rect.height };
        track_obj.trbl = { track_obj.rect.x + track_obj.rect.width, track_obj.rect.y, track_obj.rect.x, track_obj.rect.y + track_obj.rect.height };
        track_obj.polygon = { track_obj.rect.x, track_obj.rect.y, track_obj.rect.x + track_obj.rect.width, y, track_obj.rect.x + track_obj.rect.width, track_obj.rect.y + track_obj.rect.height, x, track_obj.rect.y + track_obj.rect.height };
        track_obj.score = output_stracks[i].score;
        track_obj.state = output_stracks[i].state;
        track_obj.is_cross_frame = false;
        track_obj.is_proof = false;
        track_obj.frame_obj_count = output_stracks.size();
        // 未处理 rect_scale
        for (auto &box: track_data.objBoxs) {
                GCRect rect = GCRect(box.rect.x / m_mapRatio, box.rect.y / m_mapRatio, box.rect.width / m_mapRatio, box.rect.height / m_mapRatio,
                    LandmarkInfo{
                        box.landmark.x0 / m_mapRatio, box.landmark.y0 / m_mapRatio,
                        box.landmark.x1 / m_mapRatio, box.landmark.y1 / m_mapRatio,
                        box.landmark.x2 / m_mapRatio, box.landmark.y2 / m_mapRatio,
                        box.landmark.x3 / m_mapRatio, box.landmark.y3 / m_mapRatio
                    },
                    box.fScore,
                    box.enClass
                );
            if (std::find(gc_app_config.base_config.alg_config.plate_classes.begin(), gc_app_config.base_config.alg_config.plate_classes.end(), box.enClass) != gc_app_config.base_config.alg_config.plate_classes.end()) {
                track_obj.plate_boxes.emplace_back(rect);
                if (containRect(track_obj.rect, rect)) {
                    track_obj.car_plate_boxes.emplace_back(rect);
                }
            } else {
                track_obj.car_boxes.emplace_back(rect);
            }
        }

        gc_track_objs.emplace_back(track_obj);
    }

    // logger->debug("Bytetrack_time: {:.3} s", sw);

    //----- byte tracking end -----//

    //----- inout tracking begin -----//
    sw.reset();
    vector<InoutTrack> track_with_inouts = m_inout_trackers[track_data.camera_key]->update(gc_track_objs);
    if (track_with_inouts.size() > 0 && b_inout_thread_run) {
        track_with_inouts >> m_chan_inout_data;

        LOG_DEBUG("size of track_with_inouts=%d", track_with_inouts.size());
    }

    // logger->debug("Inouttracker_time: {:.3} s", sw);
    //----- inout tracking end -----//
}

void AIPlus::TrackThrd(void)
{
    spdlog::stopwatch sw;
    while (b_track_thread_run) {

        // LOG_DEBUG("size of m_chan_track_data=%d", m_chan_track_data.size());

        GCTrackData track_data;

        track_data << m_chan_track_data;

        // 如果预置点变化，则结束上一个预置点的追踪并确定行为，转回来再继续追踪的意义不大，并且之前追踪的图片帧已经被清除
        if (track_data.camera_key != m_curr_tracking_camera_key) {

            LOG_INFO("trackig camera key change, reset last track");

            delete m_byte_trackers[m_curr_tracking_camera_key];
            m_byte_trackers[m_curr_tracking_camera_key] = new BYTETracker(15, 5);
            m_inout_trackers[track_data.camera_key]->End();
            m_curr_tracking_camera_key = track_data.camera_key;
        }

        sw.reset();

        Track(track_data);

        // logger->debug("Track_time: {:.3f} s", sw);

    }
}

void AIPlus::InoutThrd(void)
{
    while (b_inout_thread_run) {

        LOG_DEBUG("size of m_chan_inout_data=%d", m_chan_inout_data.size());

        vector<InoutTrack> track_with_inouts;
        track_with_inouts << m_chan_inout_data;

        LOG_DEBUG("size of track_with_inouts=%d", track_with_inouts.size());

        bool use_local_lpr = gc_app_config.base_config.lpr_host.rfind("local", 0) == 0;

        auto track_same_plate_iou_thresh = gc_app_config.base_config.alg_config.track_same_plate_iou_thresh;
        bool is_low_fps = gc_app_config.base_config.alg_config.subscribe_yuv_fps >= 1;
        if (is_low_fps) {
            track_same_plate_iou_thresh = track_same_plate_iou_thresh * 0.3;
        }

        spdlog::stopwatch sw;
        for (auto &inout_track: track_with_inouts) {

            sw.reset();

            std::vector<int> lpr_index_list;
            std::vector<std::string> img_base64_list;
            std::vector<cv::Mat> img_list;
            std::vector<GC_PLATE_TYPE> plate_type_list; // 与img_list一一对应
            // int size = inout_track.m_track_objs.size();
            // int cross_idx = -1; // 默认取中间帧为跨线帧
            // GCTrackObj obj_with_most_frame_obj;
            // obj_with_most_frame_obj.frame_obj_count = 0;
            // int idx = 0;
            // for (auto &track_obj: inout_track.m_track_objs) {
            //     if (track_obj.frame_obj_count > obj_with_most_frame_obj.frame_obj_count) {
            //         obj_with_most_frame_obj = track_obj;
            //     }
            //     if (track_obj.is_cross_frame) {
            //         cross_idx = idx;
            //     }
            //     idx ++;
            // }
            // // 识别跨线前后15s的车牌
            // int offset = 15 * gc_app_config.base_config.alg_config.subscribe_yuv_fps;
            // if (cross_idx < 0) {
            //     cross_idx = inout_track.m_track_result.inout == InoutType::IN ? std::min(offset, size / 2) : std::max(size - offset, size / 2);

            //     LOG_WARN("track_id=%d has no cross frame found, set cross_idx=%d", inout_track.m_track_id, cross_idx);
            // }
            // int start = std::max(0, cross_idx - offset);
            // int end = std::min(size, cross_idx + offset);
            int start = 0;
            int end = inout_track.m_track_objs.size();
            vector<int> last_plate_tlbr;
            cv::Mat frame_img;
            for (int i = start; i < end; i ++) {
                auto &track_obj = inout_track.m_track_objs[i];
                if (strlen(track_obj.img_path) == 0) {
                    LOG_ERROR("camera_key=%s track_id=%d ignore frame idx=%d/%d, track_obj.img_path is empty", inout_track.m_camera_key.c_str(), inout_track.m_track_id, i, end-1);
                    continue;
                }
                if ((m_yolo_version == GC_YOLO_VERSION::YOLO_V5_ONLY_PLATE_CAR || m_yolo_version == GC_YOLO_VERSION::YOLO_V5_WITH_PLATE) && track_obj.car_plate_boxes.size() == 0) {
                    LOG_INFO("camera_key=%s track_id=%d ignore frame idx=%d/%d, has no plate on the car frame", inout_track.m_camera_key.c_str(), inout_track.m_track_id, i, end-1);
                    continue;
                }
                if ((m_yolo_version == GC_YOLO_VERSION::YOLO_V5_ONLY_PLATE_CAR || m_yolo_version == GC_YOLO_VERSION::YOLO_V5_WITH_PLATE) && track_obj.car_plate_boxes.size() > 1) {
                    // FIXME 这里直接跳过了车牌数量不为1的车辆进行车牌识别
                    LOG_INFO("camera_key=%s track_id=%d ignore frame idx=%d/%d, has more than plate on the car frame: %d", inout_track.m_camera_key.c_str(), inout_track.m_track_id, i, end-1, track_obj.car_plate_boxes.size());
                    continue;
                }
                GCImageCache::GetInstance().Get(track_obj.img_path, frame_img);
                if (frame_img.empty()) {
                    LOG_ERROR("camera_key=%s track_id=%d image is empty for path=%s", inout_track.m_camera_key.c_str(), track_obj.track_id, track_obj.img_path);
                } else {

                    // LOG_DEBUG("camera_key=%s track_id=%d car xywh=%d,%d,%d,%d, img_path=%s", inout_track.m_camera_key.c_str(), track_obj.track_id, track_obj.rect.x, track_obj.rect.y, track_obj.rect.width, track_obj.rect.height, track_obj.img_path);

                    if (m_yolo_version == GC_YOLO_VERSION::YOLO_V5_ONLY_PLATE_CAR || m_yolo_version == GC_YOLO_VERSION::YOLO_V5_WITH_PLATE) {
                        GCRect rect = track_obj.car_plate_boxes[0];
                        int x = std::max(rect.x, 0);
                        int y = std::max(rect.y, 0);
                        int w = rect.x < 0 ? rect.width + rect.x : rect.width;
                        int h = rect.y < 0 ? rect.height + rect.y : rect.height;
                        // 如果车牌检测框和上一帧位置差距不大则跳过不进行识别
                        if (!last_plate_tlbr.empty()) {
                            float iou_v = iou(last_plate_tlbr, { x, y, x + w, y + h }, 2);
                            if (iou_v >= track_same_plate_iou_thresh) {
                                last_plate_tlbr = { x, y, x + w, y + h };

                                LOG_INFO("camera_key=%s track_id=%d ignore frame idx=%d/%d, iou=%.2f with last plate rect", inout_track.m_camera_key.c_str(), track_obj.track_id, i, end-1, iou_v);
                                continue;
                            }
                        }
                        last_plate_tlbr = { x, y, x + w, y + h };
                        cv::Mat plate_img = frame_img(cv::Rect(x, y, std::min(w, frame_img.cols - x), std::min(h, frame_img.rows - y)));
                        std::vector<cv::Point2f> src_pts = {
                            cv::Point2f(rect.landmark.x0 - rect.x, rect.landmark.y0 - rect.y),
                            cv::Point2f(rect.landmark.x1 - rect.x, rect.landmark.y1 - rect.y),
                            cv::Point2f(rect.landmark.x2 - rect.x, rect.landmark.y2 - rect.y),
                            cv::Point2f(rect.landmark.x3 - rect.x, rect.landmark.y3 - rect.y)
                        };
                        std::vector<cv::Point2f> dst_pts = {
                            cv::Point2f(0, 0),
                            cv::Point2f(gc_app_config.base_config.alg_config.lpr_img_w, 0),
                            cv::Point2f(gc_app_config.base_config.alg_config.lpr_img_w, gc_app_config.base_config.alg_config.lpr_img_h),
                            cv::Point2f(0, gc_app_config.base_config.alg_config.lpr_img_h)
                        };
                        cv::Mat transform_mat;
                        cv::Mat perspective_mat;
                        if (is_low_fps) {
                            // 仿射变换
                            std::vector<cv::Point2f> affine_src_pts = {src_pts[0], src_pts[1], src_pts[2]};
                            std::vector<cv::Point2f> affine_dst_pts = {dst_pts[0], dst_pts[1], dst_pts[2]};
                            transform_mat = cv::getAffineTransform(affine_src_pts, affine_dst_pts);
                            cv::warpAffine(plate_img, perspective_mat, transform_mat, cv::Size(gc_app_config.base_config.alg_config.lpr_img_w, gc_app_config.base_config.alg_config.lpr_img_h));
                        } else {
                            // 透视变换
                            transform_mat = cv::getPerspectiveTransform(src_pts, dst_pts);
                            cv::warpPerspective(plate_img, perspective_mat, transform_mat, cv::Size(gc_app_config.base_config.alg_config.lpr_img_w, gc_app_config.base_config.alg_config.lpr_img_h));
                        }

                        if (use_local_lpr) {
                            img_list.emplace_back(perspective_mat);
                            plate_type_list.emplace_back(static_cast<GC_PLATE_TYPE>(rect.enClass | GC_PLATE_TYPE::SINGLE));
                        } else {
                            std::string base64 = Mat2Base64(perspective_mat, ".jpg");
                            base64.insert(0, "data:image/jpeg;base64,");
                            img_base64_list.emplace_back(base64);
                            plate_type_list.emplace_back(static_cast<GC_PLATE_TYPE>(rect.enClass | GC_PLATE_TYPE::SINGLE));
                            perspective_mat.release();
                        }
                        lpr_index_list.emplace_back(i);
                        transform_mat.release();
                        if (is_low_fps) {
                            usleep(50000); // warpAffine操作占用cpu高，sleep 50ms
                        } else {
                            usleep(10000); // warpPerspective操作占用cpu高，sleep 5ms
                        }
                    } else {
                        int x = std::max(track_obj.rect.x, 0);
                        int y = std::max(track_obj.rect.y, 0);
                        int w = track_obj.rect.x < 0 ? track_obj.rect.width + track_obj.rect.x : track_obj.rect.width;
                        int h = track_obj.rect.y < 0 ? track_obj.rect.height + track_obj.rect.y : track_obj.rect.height;
                        cv::Mat car_img = frame_img(cv::Rect(x, y, std::min(w, frame_img.cols - x), std::min(h, frame_img.rows - y)));
                        if (!car_img.empty()) {
                            std::string base64 = Mat2Base64(car_img, ".jpg");
                            base64.insert(0, "data:image/jpeg;base64,");
                            img_base64_list.emplace_back(base64);
                            lpr_index_list.emplace_back(i);
                            plate_type_list.emplace_back(static_cast<GC_PLATE_TYPE>(track_obj.rect.enClass | GC_PLATE_TYPE::SINGLE));
                            car_img.release();
                        }
                        // 首尾帧、跨线帧、目标最多帧识别整图，取车牌时过滤规则使用
                        // if (i == start || i == end - 1 || track_obj.is_cross_frame || track_obj.img_path == obj_with_most_frame_obj.img_path) {
                        //     nlohmann::json frame_lpr_result = GCLpr::GetInstance().lpr_remote(frame_img);
                        //     track_obj.frame_lpr_result = frame_lpr_result;

                        //     LOG_INFO("track_id=%d idx=%d frame_lpr_result=%s", track_obj.track_id, i, frame_lpr_result.dump().c_str());
                        // }
                    }
                    frame_img.release();
                }
                if (img_base64_list.size() > 30 || img_list.size() > 30) {
                    nlohmann::json lpr_batch_result = use_local_lpr ? GCLpr::GetInstance().lpr_local_batch(img_list, plate_type_list) : GCLpr::GetInstance().lpr_remote_batch(img_base64_list);
                    if (lpr_batch_result["errcode"] == 0) {
                        nlohmann::json result_list = lpr_batch_result["data"];
                        int result_index = 0;
                        for (auto obj_index: lpr_index_list) {
                            nlohmann::json lpr_result = result_list[result_index];
                            // 为了兼容单个识别的版本，这里组装一下识别结果，保证与单个识别返回结构一致
                            inout_track.m_track_objs[obj_index].lpr_result["errcode"] = 0;
                            inout_track.m_track_objs[obj_index].lpr_result["data"] = lpr_result;
                            result_index++;
                        }

                        LOG_INFO("camera_key=%s track_id=%d size of img_base64_list=%d img_list=%d lpr_batch_result=%s", inout_track.m_camera_key.c_str(), inout_track.m_track_id, img_base64_list.size(), img_list.size(), lpr_batch_result.dump().c_str());

                        img_base64_list.clear();
                        img_list.clear();
                        plate_type_list.clear();
                        lpr_index_list.clear();
                    } else {
                        LOG_INFO("camera_key=%s track_id=%d size of img_base64_list=%d img_list=%d lpr_batch_result=%s", inout_track.m_camera_key.c_str(), inout_track.m_track_id, img_base64_list.size(), img_list.size(), lpr_batch_result.dump().c_str());
                    }
                }
            }
            // 每30张识别后剩余的车辆帧
            if (img_base64_list.size() > 0 || img_list.size() > 0) {
                nlohmann::json lpr_batch_result = use_local_lpr ? GCLpr::GetInstance().lpr_local_batch(img_list, plate_type_list) : GCLpr::GetInstance().lpr_remote_batch(img_base64_list);
                if (lpr_batch_result["errcode"] == 0) {
                    nlohmann::json result_list = lpr_batch_result["data"];
                    int result_index = 0;
                    for (auto obj_index: lpr_index_list) {
                        nlohmann::json lpr_result = result_list[result_index];
                        // 为了兼容单个识别的版本，这里组装一下识别结果，保证与单个识别返回结构一致
                        inout_track.m_track_objs[obj_index].lpr_result["errcode"] = 0;
                        inout_track.m_track_objs[obj_index].lpr_result["data"] = lpr_result;
                        result_index++;
                    }
                }

                LOG_INFO("camera_key=%s track_id=%d size of img_base64_list=%d img_list=%d lpr_batch_result=%s", inout_track.m_camera_key.c_str(), inout_track.m_track_id, img_base64_list.size(), img_list.size(), lpr_batch_result.dump().c_str());
                
                img_base64_list.clear();
                img_list.clear();
                plate_type_list.clear();
                lpr_index_list.clear();
            }
            // 行为判定结束，开始确定车牌、出入场时间、证据图等
            inout_track.End();

            logger->info("Inoutend_time: {:.3} s", sw);

        }
    }
}

void AIPlus::UploadStateImageThrd(void)
{
    while (b_upload_state_image_thread_run) {
        if (gc_app_config.push_to_algv) {
            sleep(5);
            continue;
        }
        std::map<std::string, GC_CAMERA_CONFIG>::iterator iter = gc_app_config.camera_config_map.begin();
        while (iter != gc_app_config.camera_config_map.end()) {
            {
                // std::lock_guard<std::mutex> l(gc_app_config_mutex);
                if (!iter->second.has_new_state_image) {
                    sleep(5);
                    continue;
                } else {
                    iter->second.has_new_state_image = false;
                }
            }
            std::string state_img_path(gc_app_config.base_config.alg_config.cache_dir);
            state_img_path.append(iter->first);
            state_img_path.append("/");
            state_img_path.append(STATE_IMAGE_NAME);      
            cv::Mat img;
            GCImageCache::GetInstance().Get(state_img_path.c_str(), img);
            if (img.empty()) {
                LOG_ERROR("state image is empty, sn=%s", iter->first);
                sleep(5);
                continue;
            }
            vector<cv::Point2i> points;
            // roi
            for (int i = 0; i < iter->second.roi.size(); i += 2) {
                points.emplace_back((int) (iter->second.roi[i]), (int) (iter->second.roi[i + 1]));
            }    
            cv::polylines(img, {points}, true, cv::Scalar(0, 0, 255), 4, 8, 0);
            // berthes
            int idx = 3;
            for (auto &berth: iter->second.berthes) {
                points.clear();
                vector<int> x_list;
                vector<int> y_list;
                for (int i = 0; i < berth.roi.size(); i += 2) {
                    points.emplace_back((int) (berth.roi[i]), (int) (berth.roi[i + 1]));
                    x_list.emplace_back((int) (berth.roi[i]));
                    y_list.emplace_back((int) (berth.roi[i + 1]));
                }
                cv::Scalar scalar = cv::Scalar(37 * idx % 255, 17 * idx % 255, 29 * idx % 255);
                cv::polylines(img, {points}, true, scalar, 2, 8, 0);
                // berth name
                std::sort(x_list.begin(), x_list.end());
                std::sort(y_list.begin(), y_list.end());
                cv::Point2i name_point((x_list[1] + x_list[0]) / 2, (y_list[y_list.size() - 1] + y_list[0]) / 2);
                if (iter->second.pos == GC_CAMERA_POS::POS_LEFT) {
                    name_point = cv::Point2i((x_list[x_list.size() - 1] + x_list[x_list.size() - 2]) / 2 - 150, (y_list[y_list.size() - 1] + y_list[0]) / 2);
                }                    
                cv::putText(img, berth.name, name_point, 3, 0.5, cv::Scalar(255, 255, 255), 3);
                cv::putText(img, berth.name, name_point, 3, 0.5, cv::Scalar(0, 255, 0), 1);
                idx += 10;
            }
            /*
            // head
            std::string head_tip = iter->second.road + " - " + iter->second.name;
            std::wstring_convert<std::codecvt_utf8<wchar_t>> converter;
            // std::string narrow = converter.to_bytes(wide_utf16_source_string);
            std::wstring w_head_tip = converter.from_bytes(head_tip);

            cv::Scalar font_size{ 26, 0.5, 0.1, 0 }; // (字体大小, 无效的, 字符间距, 无效的 }
            // CvxText::GetInstance().setFont(nullptr, &font_size, nullptr, 0);
            // CvxText::GetInstance().putText(img, w_head_tip.c_str(), cv::Point(2, 30), cv::Scalar(255, 255, 255));

            // font_size = cv::Scalar(24, 0.5, 0.1, 0);
            CvxText::GetInstance().setFont(nullptr, &font_size, nullptr, 0);
            CvxText::GetInstance().putText(img, w_head_tip.c_str(), cv::Point(2, 29), cv::Scalar(0, 0, 255));                
            // time and ip
            // cv::rectangle(img, cv::Point2i(0, gc_app_config.base_config.alg_config.subscribe_yuv_img_h - 20), cv::Point2i(gc_app_config.base_config.alg_config.subscribe_yuv_img_w, gc_app_config.base_config.alg_config.subscribe_yuv_img_h), cv::Scalar(255, 255, 255), cv::FILLED);

            std::string foot_str(state_image.time_str);
            foot_str += " " + iter->second.ip  + ", s=" + to_string(iter->second.state)
                + ", m=" + to_string(iter->second.model)
                + ", a=" + to_string(iter->second.alg_type)
                + ", ast: " + app_start_time
            ;
            #ifdef BUILD_INFO
            foot_str += " " + std::string(BUILD_INFO);
            #endif
            cv::putText(img, foot_str, cv::Point2i(2, gc_app_config.base_config.alg_config.subscribe_yuv_img_h - 4), 3, 0.6, cv::Scalar(255, 255, 255), 3);
            cv::putText(img, foot_str, cv::Point2i(2, gc_app_config.base_config.alg_config.subscribe_yuv_img_h - 4), 3, 0.6, cv::Scalar(0, 255, 0), 1);
            */

            Middleware::GetInstance().Upload_parking_status_image(iter->first, img, iter->second.new_state_image_time_str.c_str());

            img.release();

            LOG_INFO("state image has been uploaded, sn=%s", iter->first);
            iter ++;
        }
        sleep(5);
    }
}

void AIPlus::ClearThrd(void)
{
    // struct timespec time1 = {0, 0};
    // struct timespec time2 = {0, 0};
    // while (b_clear_thread_run) {
    //     if (m_chan_yuv_image_data.size() < 300) {
    //         usleep(100000);
    //         continue;
    //     }
    //     GCYuvImage yuv_image;
    //     yuv_image << m_chan_yuv_image_data;
    //     std::string cmd("rm -rf ");
    //     cmd.append(yuv_image.img_path);
    //     int ret = system(cmd.c_str());
    //     if (ret != 0) {
    //         LOG_ERROR("clear yuv image failed, cmd=%s", cmd.c_str());
    //     }
    // }
}

void AIPlus::AlgvThrd(void)
{
    std::map<std::string, GC_CAMERA_CONFIG>::iterator iter = gc_app_config.camera_config_map.begin();
    std::ifstream fs;
    while (iter != gc_app_config.camera_config_map.end()) {
        // if (iter->second.debug) {
            sleep(10);
            std::string state_img_path(gc_app_config.base_config.alg_config.cache_dir);
            state_img_path.append(iter->first);
            state_img_path.append("/");
            state_img_path.append(STATE_IMAGE_NAME);
            fs.open(state_img_path);
            if (fs.good()) {
               m_algv.SyncCameraConfig(iter->first, state_img_path.c_str());
            } else {
                LOG_WARN("camera_key=%s state_img_path is empty.",  iter->first.c_str());
            }
            fs.close();
        // }
        iter ++;
    }
    sleep(120);
    while (b_algv_thread_run) {
        iter = gc_app_config.camera_config_map.begin();
        // 取第一个相机区域信息同步算法配置（包括补丁更新）
        m_algv.GetAppConfig(iter->second.area.c_str());
        // 同步相机配置
        while (iter != gc_app_config.camera_config_map.end()) {
            std::string state_img_path(gc_app_config.base_config.alg_config.cache_dir);
            state_img_path.append(iter->first);
            state_img_path.append("/");
            state_img_path.append(STATE_IMAGE_NAME);            
            fs.open(state_img_path);
            if (fs.bad() || !iter->second.debug) {
                m_algv.SyncCameraConfig(iter->first);
            } else {
                m_algv.SyncCameraConfig(iter->first, state_img_path.c_str());
            }

            LOG_INFO("camera_key=%s state_img_path=%s", iter->first.c_str(), state_img_path.c_str());

            iter ++;
        }
        sleep(600);
    }
}

void AIPlus::StartEventSubscriberThread(void)
{
    // 采用C++ 11
    // b_event_subscriber_thread_run = TRUE;
    // auto funcRead = std::bind(&AIPlus::EventSubscriberThrd, this);
    // std::thread thrDecode(funcRead);
    // m_event_subscriber_thread = std::move(thrDecode);    

    // LOG_ERROR("Create the thread:EventSubscriberThrd() successfully");

    // return;
}

void AIPlus::EventSubscriberThrd(void)
{
    // EventSubscriber subscriber;
    // subscriber.Init();
    // subscriber.Subscribe("1");

    // while (1) {
    //     subscriber.ReadEvent();
    // }
    // return;
}

void AIPlus::AppEpollThrd(void){
      //向网关注册
    if(registerGateway())
    {
        LOG_INFO("registerGateway end begin start server\n");
        //启动服务域套接字
        startServer();
    }
}

struct sdc_yuv_frame_stru AIPlus::CombineYuvData(struct sdc_yuv_data_stru *data,uint32_t w,uint32_t h, bool keep_ratio)
{
    int i = 0;
    ssize_t retn;
    char buffer[1024];
    char filename[128] = {0};
    struct iovec iov[4];
    struct sdc_common_head_stru* head = (struct sdc_common_head_stru*)buffer;
    struct sdc_extend_head_stru qf_head = {
        .type = 0x01,.length = 8,.reserve = 0, //reserve为0表示合成yuv，1-99表示合成jpeg，数字代表图片质量
    };

    struct sdc_yuv_frame_stru frames[1];
    frames[0] = data->frame;
    struct sdc_resolution_stru combined_resolution;
    combined_resolution.width = w;
    combined_resolution.height = h;
    // LOG_DEBUG("w,h:%d,%d\n",data->frame.width,data->frame.height);

    //combine方法拷贝帧
    struct sdc_yuv_frame_stru yuv_img;
    struct sdc_yuv_frame_stru ret = Combined(fd_codec,frames,&combined_resolution,&yuv_img, keep_ratio);
    //以下逻辑在此处保存一张yuv，供调测使用
    // FILE *fpYuv = NULL;
    // fpYuv = fopen("test_b.yuv","w+");
    // if(fpYuv != NULL){
    //     LOG_DEBUG("frame.width=%d,height=%d,addr_virt:%llu\n",ret.width,ret.height,ret.addr_virt);
    //     (void)fwrite((const void *)ret.addr_virt,ret.size,1,fpYuv);
    //     (void)fflush(fpYuv);
    //     (void)fclose(fpYuv);
    // }
    return ret;
}

struct sdc_yuv_frame_stru AIPlus::Combined(int fd,const struct sdc_yuv_frame_stru frames[4],
    const struct sdc_resolution_stru *combined_resolution,struct sdc_yuv_frame_stru *yuv_img, bool keep_ratio)
{
    int nret;
    uint32_t max_wh = std::max(combined_resolution->width, combined_resolution->height);
    float ratio = std::min(combined_resolution->width * 1.0f / frames[0].width, combined_resolution->height * 1.0f / frames[0].height);
    /*合成后图片分辨率由combined_resolution决定*/
    struct sdc_combined_yuv_param_stru param = {
        .width = combined_resolution->width,
        .height = combined_resolution->height,
        //.yuv_cnt = 4;可以将4张图片合成一张图
        .yuv_cnt = 1,
    };
    struct sdc_combined_yuv_stru combined_yuv[1] = {{0}};

    /*头部信息设置正确的head_length和content_length*/
    struct sdc_common_head_stru head;
    head.version = SDC_VERSION;
    head.url = SDC_URL_COMBINED_IMAGE;
    head.method = SDC_METHOD_CREATE;
    head.head_length = sizeof(head);
    head.content_length = sizeof(param) + sizeof(combined_yuv);

    combined_yuv[0].frame = frames[0];
    combined_yuv[0].origin_region.w = frames[0].width;
    combined_yuv[0].origin_region.h = frames[0].height;
    combined_yuv[0].combined_region.w = keep_ratio ? frames[0].width * ratio : combined_resolution->width;
    combined_yuv[0].combined_region.h = keep_ratio ? frames[0].height * ratio : combined_resolution->height;;
    combined_yuv[0].combined_region.x = 0;
    combined_yuv[0].combined_region.y = 0;

    // 以上数据组合发送
    struct iovec iov[] = {
        {.iov_base = &head,.iov_len = sizeof(head)},
        {.iov_base = &param,.iov_len = sizeof(param)},
        {.iov_base = combined_yuv,.iov_len = sizeof(combined_yuv)},
    };

    nret = writev(fd,iov,sizeof(iov) / sizeof(iov[0]));
    if (nret < 0){
        LOG_ERROR("write fd failed!\n");
        exit(1);
    }
    char buffer[1024];
    nret = read(fd_codec,buffer,sizeof(buffer));
    if (nret < 0){
    LOG_ERROR("read from fd_codec failed!\n");
    exit(1);
    }
    
    if(nret > sizeof(buffer)){
        LOG_ERROR("read from fd_codec truncated: %zu > %zu\n",nret,sizeof(buffer));
        exit(1);
    }
    struct sdc_common_head_stru *headEx = (struct sdc_common_head_stru *)buffer;
    if(headEx->code != SDC_CODE_200){
        LOG_ERROR("read from fd_codec err: code = %u\n",headEx->code);
        exit(1);
    }

    if (headEx->code != SDC_CODE_200 || headEx->content_length != sizeof(*yuv_img)){
        exit(1);
    }
    yuv_img = (struct sdc_yuv_frame_stru *)(buffer + headEx->head_length);
    // LOG_DEBUG("frame.width=%d,height=%d,addr_virt:%llu\n",yuv_img->width,yuv_img->height,yuv_img->addr_virt);
    return *yuv_img;

}

void AIPlus::combined_yuv_free(int fd, const struct sdc_yuv_frame_stru *yuv_frame)
{
    /** 指定释放数据的类型为jpeg */
    struct sdc_extend_head_stru yuv_head = { SDC_HEAD_COMBINED_CONTENT_TYPE, sizeof(yuv_head), 0 };
    /** 头部信息设置正确的head_length和content_length */
    struct sdc_common_head_stru head;
    head.version = SDC_VERSION;
    head.url = SDC_URL_COMBINED_IMAGE;
    head.method = SDC_METHOD_DELETE;
    head.head_length = sizeof(head) + sizeof(yuv_head);
    head.content_length = sizeof(*yuv_frame);  
    /** 以上数据组合发送 */
    struct iovec iov[] = {
        { .iov_base = (void *)&head, .iov_len = sizeof(head) },
        { .iov_base = &yuv_head, .iov_len = sizeof(yuv_head) },
        { .iov_base = (void *)yuv_frame, .iov_len = sizeof(*yuv_frame) },
    };
    (void)writev(fd, iov, sizeof(iov) / sizeof(iov[0]));
}

int AIPlus::encode_jpeg(sdc_yuv_frame_s &yuv_frame, sdc_osd_region_s &osd_region, sdc_jpeg_frame_s &jpeg_frame) const
{
	sdc_encode_jpeg_param_s param;
	memset_s(&param, sizeof(param), 0, sizeof(param));
	param.qf = 80; // TODO 使用工具相机配置
	param.osd_region_cnt = 1;
	param.region = { 0, 0, yuv_frame.width, yuv_frame.height };
	param.frame = yuv_frame;

	sdc_common_head_s head;
	memset_s(&head, sizeof(head), 0, sizeof(head));
	head.version = SDC_VERSION; // 0x5331
	head.url = SDC_URL_ENCODED_JPEG; // 0x00
	head.method = SDC_METHOD_CREATE;
	head.head_length = sizeof(head);
	head.content_length = sizeof(param) + sizeof(osd_region);

	// std::cout << sizeof(param) << " " <<  sizeof(osd_region) << " " << head.content_length << " " << sizeof(jpeg_frame) << "\n";
	
	struct iovec iov[3];
	iov[0].iov_base = &head;
	iov[0].iov_len = sizeof(head);
	iov[1].iov_base = &param;
	iov[1].iov_len = sizeof(param);
	iov[2].iov_base = (void *)&osd_region;
	iov[2].iov_len = sizeof(osd_region);

	int32_t nret = writev(fd_codec, iov, 3);
	if(nret < 0) {
		LOG_ERROR("Write the iovec failed, errno: %d, errmsg: %s", errno, strerror(errno));
		return errno;
	}

	iov[1].iov_base = &jpeg_frame;
	iov[1].iov_len = sizeof(jpeg_frame);
	nret = readv(fd_codec, iov, 2);
	if(nret < 0) {
		LOG_ERROR("Read the jpeg frame failed, errno: %d, errmsg: %s", errno, strerror(errno));
		return errno;
	}

	// std::cout << head.head_length << " " << sizeof(head) << " " << head.content_length << " " << sizeof(jpeg_frame) << "\n";

	if(head.head_length != sizeof(head) || head.content_length != sizeof(jpeg_frame)) {
		LOG_ERROR("Translate the Yuv frame to jpeg frame failed");
		return EIO;
	}
	// LOG_DEBUG("Translate the Yuv frame to jpeg frame successfully");
	return HW_OK;
}

int AIPlus::free_encoded_jpeg(sdc_jpeg_frame_s &jpeg_frame) const
{
	sdc_common_head_s head;
	memset_s(&head, sizeof(head), 0, sizeof(head));
	head.version = SDC_VERSION; // 0x5331
	head.url = SDC_URL_ENCODED_JPEG; // 0x00
	head.method = SDC_METHOD_DELETE;
	head.head_length = sizeof(head);
	head.content_length = sizeof(jpeg_frame);

	struct iovec iov[2];
	iov[0].iov_base = (void *)&head;
	iov[0].iov_len = sizeof(head);
	iov[1].iov_base = (void *)&jpeg_frame;
	iov[1].iov_len = sizeof(jpeg_frame);
	
	int32_t nret = writev(fd_codec, iov, 2);
	if (nret < 0) {
		LOG_ERROR("Write the iovec failed, errno: %d, errmsg: %s", errno, strerror(errno));
        return errno;
	}
	// LOG_DEBUG("Write the iovec successfully");

	return HW_OK;
}

int AIPlus::save_jpeg(sdc_jpeg_frame_s &jpeg_frame, string &jpegPath) const
{
    std::ofstream fs(jpegPath.c_str(), std::fstream::binary);
    fs.write((const char*)jpeg_frame.addr_virt, jpeg_frame.size);
    fs.close();
    /* 
    int32_t fd = -1;
    if ((fd = open(jpegPath.c_str(), O_RDWR | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR)) < 0) {
        LOG_ERROR("Open the jpeg file failed, file_path: %s, errno: %d, errmsg: %s", jpegPath.c_str(), errno,
                  strerror(errno));
        return ERR;
    }

    ssize_t writeLen = 0;
    if ((writeLen = write(fd, (void *)jpeg_frame.addr_virt, jpeg_frame.size)) < 0) {
        LOG_ERROR("Write the jpeg file failed, errno: %d, errmsg: %s", errno, strerror(errno));
        close(fd);
        return errno;
    }
    if (writeLen != (ssize_t)jpeg_frame.size) {
        LOG_ERROR("Write the jpeg file truncted, write_size: %ld, real_size: %u", writeLen, jpeg_frame.size);
        close(fd);
        return errno;
    }

    LOG_DEBUG("Save the jpeg file successfully, file_path: %s, real_size: %u", jpegPath.c_str(), jpeg_frame.size);
    close(fd);
    */

    return HW_OK;
}

bool AIPlus::ConstructTlvMetaData(GCTrackData &yuv_data)
{
    // 设置检测目标参数
    SDC_SSD_RESULT_S stResult;
    stResult.numOfObject = 20;                  // 设置推理目标最大数目
    // stResult.thresh = m_confidenceThres;  // 设置可信度阈值
    SDC_SSD_OBJECT_INFO_S objectArr[20];        // 推理结果保存数组
    stResult.pObjInfo = objectArr;        
    ConvertForwardResult(yuv_data.objBoxs, stResult);
    cv::Mat img = cv::imread(yuv_data.img_path);
    for (size_t i = 0; i < stResult.numOfObject; i++) {
        LOG_INFO("stResult.pObjInfo[i].confidence=%f", stResult.pObjInfo[i].confidence);
        if (stResult.pObjInfo[i].confidence > 0.5) {
            std::vector<unsigned char> jpgData;
            if(CropMat(img, &stResult.pObjInfo[i], jpgData) == -1) {
                LOG_ERROR("CropMat frame stResult error");
                continue;
            }

            const std::string ALGTYPE_DETDOG = "plate";
            const std::string APP_NAME= "AIPlus";
            const std::string Alarm_Info = "Alarm_Info";     // 元数据告警类型
            const std::string uuidInfo = "uuid";             // 元数据UUID类型
            const uint64_t ptsTime = 1040001;                // 元数据UUID类型
            auto tlvBuf = std::make_shared<TlvBuf>(1024);
            const int64_t snapofst = 1;

            // 算法头
            uint32_t metaData[1] ={0};
            INT64 ret = tlvBuf->AppandBufHead(METADATA_TYPE, sizeof(metaData));
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            // 算法分类名
            ret = tlvBuf->AppandBuf(ALG_TYPE, ALGTYPE_DETDOG.c_str(), ALGTYPE_DETDOG.length());
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            // 元数据名
            ret = tlvBuf->AppandBuf(META_NAME, APP_NAME.c_str(), APP_NAME.size());
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");
            
            // 元数据类型掩码 //fix me
            UINT64 mask = 3;
            ret = tlvBuf->AppandBuf(METATYPE_MASK, (char *)&mask, sizeof(mask));
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            // UUID
            ret = tlvBuf->AppandBuf(ALARM_NAME, uuidInfo.c_str(), uuidInfo.size());
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            // CHANNEL_ID
            const int64_t ch_yuvChnId = DEFAULT_CHANNEL_ID;
            ret = tlvBuf->AppandBuf(CHANNEL_ID, (char *)&ch_yuvChnId, sizeof(ch_yuvChnId));
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            // pts
            ret = tlvBuf->AppandBuf(PTS, (char *)&ptsTime, sizeof(ptsTime));
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            // snap pts
            ret = tlvBuf->AppandBuf(PIC_SNAPSHOT_TIMEMS, (char *)&ptsTime, sizeof(ptsTime));
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            // snap zone
            const int64_t zone1 = 8;  
            ret = tlvBuf->AppandBuf(PIC_SNAPSHOT_TZONE, (char *)&zone1, sizeof(zone1));
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");


            ret = tlvBuf->AppandBuf(PIC_SNAPSHOT_DSTOFFSET, (char *)&snapofst, sizeof(snapofst));
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf"); 

            const std::string agent = "app_agent";
            ret = tlvBuf->AppandBuf(PRODUCER_NAME, agent.c_str(), agent.size());
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            //----------------------------------------------------------------------------------//
            const uint32_t obj_id1 = 1;
            ret = tlvBuf->AppandBuf(OBJ_ID,  (char *)&obj_id1, sizeof(obj_id1));
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            const uint32_t g_id1 = 1;
            ret = tlvBuf->AppandBuf(GLOBAL_OBJID,  (char *)&g_id1, sizeof(g_id1));
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            const std::string info = "ren";
            ret = tlvBuf->AppandBuf(DESCRIBE_INFO, info.c_str(), info.size());
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");
            
            const META_RECT_S rect1 = {1, 2 ,3, 4};
            ret = tlvBuf->AppandBuf(OBJ_POS_R, (char *)&rect1, sizeof(rect1));
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            // 抠图属性，数据
            const std::string attrInfo1 = "glass";
            const std::string attrInfoval1 = "yes";
            ret = tlvBuf->AppandPair(OBJ_ATTR, attrInfo1.c_str(), attrInfo1.size(),
                attrInfoval1.c_str(), attrInfoval1.size());
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            const std::string attrInfo11 = "hair";
            const std::string attrInfoval11 = "yes";
            ret = tlvBuf->AppandPair(OBJ_ATTR, attrInfo11.c_str(), attrInfo11.size(),
                attrInfoval11.c_str(), attrInfoval11.size());
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");
            
            ret = tlvBuf->AppandBuf(CLOSEUP_PIC, (char *)jpgData.data(), jpgData.size());
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            const std::string piclenInfo1 = "piclen";
            const std::string piclenInfoVal1 = "large";
            ret = tlvBuf->AppandPair(PIC_ATTR, piclenInfo1.c_str(), piclenInfo1.size(),
                piclenInfoVal1.c_str(), piclenInfoVal1.size());
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            const std::string piclenInfo12 = "picwigh";
            const std::string piclenInfoVal12 = "small";
            ret = tlvBuf->AppandPair(PIC_ATTR, piclenInfo12.c_str(), piclenInfo12.size(),
                piclenInfoVal12.c_str(), piclenInfoVal12.size());
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            // 全景图属性，数据
            ret = tlvBuf->AppandBuf(PANORAMA_PIC, (char *)jpgData.data(), jpgData.size());
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            const std::string piclenInfo2 = "piclen";
            const std::string piclenInfoVal2 = "large";
            ret = tlvBuf->AppandPair(PIC_ATTR, piclenInfo2.c_str(), piclenInfo2.size(),
                piclenInfoVal2.c_str(), piclenInfoVal2.size());
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            const std::string piclenInfo22 = "picwigh";
            const std::string piclenInfoVal22 = "small";
            ret = tlvBuf->AppandPair(PIC_ATTR, piclenInfo22.c_str(), piclenInfo22.size(),
                piclenInfoVal22.c_str(), piclenInfoVal22.size());
            ITGT_RETURN_VAL_IF_FAIL(LOGDOGDET, ret == ITGT_SUCCESS, ret, "Faile to Append Buf");

            // // 清除所有元数据
            // if(SDC_UtilsEventDel(fd_event, 0, 0, (char *)APP_NAME.c_str()) != 0){
            //     LOG_DEBUG("SDC_UtilsEventDel error"); 
            // };

            // // 清除所有元数据
            // if(SDC_UtilsEventDel(fd_event, 0, 0, (char *)agent.c_str()) != 0){
            //     LOG_DEBUG("SDC_UtilsEventDel error"); 
            // };

            // // 清除所有元数据
            // if(SDC_UtilsEventDel(fd_event, 0, 0, (char *)ALGTYPE_DETDOG.c_str()) != 0){
            //     LOG_DEBUG("SDC_UtilsEventDel error"); 
            // };

            char buffer[50];
            if (file_num < 2) {
                sprintf(buffer, "./bin/%d.bin", file_num++);
                FILE *fp = fopen(buffer, "wb");
                fwrite(tlvBuf->data(), tlvBuf->CurrentIndex(), 1, fp);
                fclose(fp);
            }

            // if(SDC_UtilsEventPublish(fd_event, 0,(UINT64)(tlvBuf->CurrentIndex()), tlvBuf->data(), pts) != 0){
            //     LOG_DEBUG("SDC_UtilsEventPublish det error"); 
            // };
        }
    }

    return true;
}

}