﻿#include <QCoreApplication>
#include <mutex>

extern "C" {
#include <libavutil/time.h>
#include <libavutil/frame.h>
#include <libavutil/imgutils.h>
}
#ifdef _WIN32
#include <windows.h>
#define KEYDOWN(vk) ( 0x8000 & ::GetAsyncKeyState(vk) )
#endif
#include "Ipcmsg.h"
#include "yolo_v2_class.hpp"
#include "acceleratetransfer.h"
#ifdef OPENCV
#include <opencv2/opencv.hpp>
#endif

static std::mutex yolomu;
static std::shared_ptr<Detector> deepModelCommon;
static int net_w,net_h;
static bool quit=false;

static AVFrame* alloc_picture(enum AVPixelFormat pix_fmt, int width, int height)
{
    AVFrame* picture;
    int ret;

    picture = av_frame_alloc();
    if (!picture)
        return NULL;

    picture->format = pix_fmt;
    picture->width = width;
    picture->height = height;

    /* allocate the buffers for the frame data */
    ret = av_frame_get_buffer(picture, 0);
    if (ret < 0) {
        fprintf(stderr, "Could not allocate frame data.\n");
        av_log(NULL, AV_LOG_ERROR, "%s.\n", stderr);
        exit(1);
    }
    return picture;
}

static bool copy_array_frame(char* pbuffer,AVFrame** tmp){
    if(pbuffer==0)
        return false;
    LPDATA_HEADER lp = (LPDATA_HEADER)pbuffer;
    int width = lp->width;
    int height = lp->height;
    long long pktdts = lp->pkt_dts;
    //校验视频数据是否合法
    if (width <= 0 || height <= 0 || pktdts<=0) {
        return false;
    }
    if (*tmp == NULL) {
        *tmp = alloc_picture((AVPixelFormat)lp->type,lp->width,lp->height);
        printf("tmp->linesize[0]: %d\n", (*tmp)->linesize[0]);
        printf("tmp->linesize[1]: %d\n", (*tmp)->linesize[1]);
        printf("tmp->linesize[2]: %d\n", (*tmp)->linesize[2]);
        printf("tmp->linesize[3]: %d\n", (*tmp)->linesize[3]);
    }
    if (*tmp == NULL) {
        return false;
    }
    int bytewidth0 = av_image_get_linesize((AVPixelFormat)(*tmp)->format, (*tmp)->width, 0);//获取Y通道每行像素的字节数
    int bytewidth1 = av_image_get_linesize((AVPixelFormat)(*tmp)->format, (*tmp)->width, 1);//获取U通道每行像素的字节数
    int bytewidth2 = av_image_get_linesize((AVPixelFormat)(*tmp)->format, (*tmp)->width, 2);//获取V通道每行像素的字节数
    int bytewidth3 = av_image_get_linesize((AVPixelFormat)(*tmp)->format, (*tmp)->width, 3);//获取alpha通道每行像素的字节数
    //
    av_image_copy_plane((*tmp)->data[0], (*tmp)->linesize[0], (uint8_t*)pbuffer + lp->offset[0], lp->linesize[0], bytewidth0,lp->height);
    av_image_copy_plane((*tmp)->data[1], (*tmp)->linesize[1], (uint8_t*)pbuffer + lp->offset[1], lp->linesize[1], bytewidth1, lp->height/2);
    av_image_copy_plane((*tmp)->data[2], (*tmp)->linesize[2], (uint8_t*)pbuffer + lp->offset[2], lp->linesize[2], bytewidth2, lp->height/2);
    av_image_copy_plane((*tmp)->data[3], (*tmp)->linesize[3], (uint8_t*)pbuffer + lp->offset[3], lp->linesize[3], bytewidth3, lp->height);

    (*tmp)->pkt_dts = pktdts;
    (*tmp)->pts = (*tmp)->pkt_dts;
    return true;
}
#ifdef OPENCV
static cv::Mat avframe_cvmat(AVFrame* frame){
    int width = frame->width, height = frame->height;
    cv::Mat mat;
    if(frame->format==AV_PIX_FMT_YUV420P){
        cv::Mat tmp_img = cv::Mat::zeros( height*3/2, width, CV_8UC1 );
        cv::Mat mat_uint8;
        memcpy(tmp_img.data, frame->data[0], width*height );
        memcpy(tmp_img.data + width*height, frame->data[1], width*height/4 );
        memcpy(tmp_img.data + width*height*5/4, frame->data[2], width*height/4);
        cv::cvtColor(tmp_img, mat_uint8,cv::COLOR_YUV2BGR_I420);
        //cv::normalize(mat_uint8,mat,1.0/255,0);
        //return mat;
        return mat_uint8;

    }
    else if(frame->format!=AV_PIX_FMT_RGB24||frame->format!=AV_PIX_FMT_BGR24){
        const AVPixFmtDescriptor* desc = av_pix_fmt_desc_get((AVPixelFormat)frame->format);
        int channels = desc->nb_components;
        if(channels > 3)
            channels = 3;
        cv::Mat mat_uint8 = cv::Mat::zeros(height, width, CV_8UC3);
        memcpy(mat_uint8.data,frame->data[0],width*height*channels);
        //cv::cvtColorTwoPlane();
        //cv::normalize(mat_uint8,mat,1.0/255,0);
        //return mat;
        return mat_uint8;
    }
    return cv::Mat();
}

static cv::Mat save_rgb_from_packet_planes(const char* path,const int w, const int h, const int type, const unsigned char* planes){
	assert(CV_8UC3==type);
    cv::Mat mat_uint8 = cv::Mat::zeros(h, w, type);
	memcpy(mat_uint8.data,planes,w*h*3);
	if(strlen(path)>0)
        cv::imwrite(path,mat_uint8);
    return mat_uint8;
}

static cv::Mat save_rgb_from_uniform_planes(const char* path,const int w, const int h, const int type, const float* planes){
    assert(CV_8UC3==type);
    cv::Mat mat_uint8 = cv::Mat::zeros(h, w, type);
    for(int i=0;i<h;i++){
        for(int k=0;k<3;k++){
            for(int j=0;j<w;j++){
                mat_uint8.data[i*w*3+j*3+k] = (unsigned char)(255*planes[k*w*h+i*w+j]);
            }
        }
    }
    if(strlen(path)>0)
        cv::imwrite(path,mat_uint8);
    return mat_uint8;
}
#endif

static void init_yolo(const std::string cfg, const std::string weights){
    if(deepModelCommon.get()==nullptr){
        yolomu.lock();
        if(deepModelCommon.get()!=nullptr){
            yolomu.unlock();
            return;
        }
        std::shared_ptr<Detector> det = std::make_shared<Detector>(cfg,weights);
        det->nms = 0.1;
        av_usleep(1000*2000);
        deepModelCommon = det;
        net_w = deepModelCommon->get_net_width();
        net_h = deepModelCommon->get_net_height();
        yolomu.unlock();
    }
}

#ifndef _WIN32

void clear_sem_t(const std::string id){
    if(id.size()==0)
        return;
    sem_t* m_pSem=sem_open(id.data(),O_RDWR);
    if(m_pSem != SEM_FAILED){
        int v = 0;
        sem_getvalue(m_pSem,&v);
        printf("current semphore value is:%d\n",v);
        sem_close(m_pSem);
        sem_unlink(id.data());
    }
}
#endif

static void on_exit(int code){
    quit = true;
}

int main(int argc, char *argv[])
{
    QCoreApplication a(argc, argv);
    std::string cfg_path="/home/ysq/yolov3files/indoor/indoor.cfg";
    std::string weight_path="/home/ysq/yolov3files/indoor/indoor.weights";
    
    IPC_MSG msg;
    #ifdef _WIN32
    msg.Create("20211208",false,DEFAULT_MAPSIZE);
    CProcessMutex mutex("20211207_mutex");
    #else
    msg.Create(20211208,false,DEFAULT_MAPSIZE);
    CProcessMutex mutex("20211207_mutex",false);
    signal(SIGINT,on_exit);
    #endif
    AVFrame *frame;
    std::vector<bbox_t> objects;
    int objcount = 0;
    init_yolo(cfg_path, weight_path);
    AccelerateTransfer acc;
    AccelerateTransfer::opengl_init();
    bool isaccInit=false;
    float *data=nullptr;
    float wk = 1.0, hk = 1.0;

    for(;!quit;){
        bool isLockAble = mutex.Lock();
        bool res_copy=false;
        if (isLockAble) {
            char* pbuffer = (char*)msg.GetBuffer();
            res_copy = copy_array_frame(pbuffer,&frame);
            if(objcount>0){
                LPDATA_HEADER lp = (LPDATA_HEADER)pbuffer;
                memset(lp->preserverd,'\0',sizeof(lp->preserverd));
                int i = 0;
                for (; i < objcount; i++) {
                    struct bbox_t& obx = objects.at(i);
                    int box_topw = obx.x*wk;
                    int box_toph = obx.y*hk;
                    int box_width = obx.w*wk;
                    int box_height = obx.h*hk;
                    lp->boxes[i][0] = box_topw;
                    lp->boxes[i][1] = box_toph;
                    lp->boxes[i][2] = box_topw + box_width;
                    lp->boxes[i][3] = box_toph + box_height;
                    lp->boxes[i][4] = obx.obj_id;
                    if(i>25)
                        break;
                }
                lp->boxNum = i;
            }
            mutex.UnLock();
        }
        if(res_copy){
            /**/
            if(!isaccInit){
                acc.transfer_init(net_w,net_h,"Wind");
                acc.prepare_context(frame->width,frame->height,4);
                data = new float[net_w*net_h*3];
                wk = (float)frame->width / net_w;
                hk = (float)frame->height / net_h;
                isaccInit = true;
            }
            acc.update_texture((AccelerateTransfer::YUVCHANNEL)0,frame->data[0]);
            acc.update_texture((AccelerateTransfer::YUVCHANNEL)1,frame->data[1]);
            acc.update_texture((AccelerateTransfer::YUVCHANNEL)2,frame->data[2]);
            acc.draw_once();
            int _w = net_w,_h=net_h;
            image_t img={_w,_h,3,data};
            int size0 = acc.read_pixel_channel((AccelerateTransfer::RGBCHANNEL)2,_w*_h*4,_w,_h,(unsigned char*)data);//B
            int size1 = acc.read_pixel_channel((AccelerateTransfer::RGBCHANNEL)1,_w*_h*4,_w,_h,(unsigned char*)data+size0);//G
            acc.read_pixel_channel((AccelerateTransfer::RGBCHANNEL)0,_w*_h*4,_w,_h,(unsigned char*)data+size0+size1);//R
            //cv::Mat cvimg = save_rgb_from_uniform_planes("",_w,_h,CV_8UC3,data);
            objects = deepModelCommon->detect(img,0.25);
            objcount = objects.size();
            //printf("objcount is: %ld\n", objects.size());
        }
#ifdef _WIN32
        if (KEYDOWN(VK_ESCAPE))//按下ESC按键，退出程序
           quit=true;
#endif
    }
    return a.exec();
}
