/*
 * Copyright (c) 2022 HiSilicon (Shanghai) Technologies CO., LIMITED.
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <errno.h>

#include "sample_comm_ive.h"
#include "sample_comm_nnie.h"
#include "sample_media_ai.h"
#include "ai_infer_process.h"
#include "yolov2_hand_detect.h"
#include "vgs_img.h"
#include "ive_img.h"
#include "misc_util.h"
#include "hisignalling.h"
#include "hand_classify.h"

#include <time.h>

/* ********************************************************************** */
// #include <opencv2/core.hpp>
// #include <opencv2/objdetect.hpp>

/* ********************************************************************** */

/*
Mat ImgProcess(Mat image)
{
	Mat imageGray, imageGuussian;
	Mat imageSobelX, imageSobelY, imageSobelOut;

	//1. 原图像大小调整，提高运算效率
	//resize(image, image, Size(800, 600));
#ifdef SHOW
	imshow("1.原图像", image);
#endif // SHOW

	//2. 转化为灰度图
	cvtColor(image, imageGray,COLOR_RGB2GRAY);
#ifdef SHOW
	imshow("2.灰度图", imageGray);
#endif // SHOW

	//3. 高斯平滑滤波
	GaussianBlur(imageGray, imageGuussian, Size(3, 3), 0);
#ifdef SHOW
	imshow("3.高斯平衡滤波", imageGuussian);
#endif // SHOW
	//4.求得水平和垂直方向灰度图像的梯度差,使用Sobel算子
	Mat imageX16S, imageY16S;
	Sobel(imageGuussian, imageX16S, CV_16S, 1, 0, 3, 1, 0, 4);
	Sobel(imageGuussian, imageY16S, CV_16S, 0, 1, 3, 1, 0, 4);
	convertScaleAbs(imageX16S, imageSobelX, 1, 0);
	convertScaleAbs(imageY16S, imageSobelY, 1, 0);
	imageSobelOut = imageSobelX - imageSobelY;
#ifdef SHOW
	imshow("4.X方向梯度", imageSobelX);
	imshow("4.Y方向梯度", imageSobelY);
	imshow("4.XY方向梯度差", imageSobelOut);
#endif // SHOW
	//5.均值滤波，消除高频噪声
	blur(imageSobelOut, imageSobelOut, Size(3, 3));
#ifdef SHOW
	imshow("5.均值滤波", imageSobelOut);
#endif // SHOW
	//6.二值化
	Mat imageSobleOutThreshold;
	threshold(imageSobelOut, imageSobleOutThreshold, 100, 255, THRESH_BINARY);
#ifdef SHOW
	imshow("6.二值化", imageSobleOutThreshold);
#endif // SHOW
	//7.闭运算，填充条形码间隙
	Mat  element = getStructuringElement(0, Size(12,12));
	morphologyEx(imageSobleOutThreshold, imageSobleOutThreshold, MORPH_CLOSE, element);
#ifdef SHOW
	imshow("7.闭运算", imageSobleOutThreshold);
#endif // SHOW
	//8. 腐蚀，去除孤立的点
	erode(imageSobleOutThreshold, imageSobleOutThreshold, element);
#ifdef SHOW
	imshow("8.腐蚀", imageSobleOutThreshold);
#endif // SHOW
	//9. 膨胀，填充条形码间空隙，根据核的大小，有可能需要2~3次膨胀操作
	dilate(imageSobleOutThreshold, imageSobleOutThreshold, element);
	dilate(imageSobleOutThreshold, imageSobleOutThreshold, element);
	dilate(imageSobleOutThreshold, imageSobleOutThreshold, element);
#ifdef SHOW
	imshow("9.膨胀", imageSobleOutThreshold);
#endif // SHOW
	vector<vector<Point>> contours;
	vector<Vec4i> hiera;

	//10.通过findContours找到条形码区域的矩形边界
	findContours(imageSobleOutThreshold, contours, hiera, RETR_EXTERNAL, CHAIN_APPROX_NONE);
	int areaMax = 0;
	Rect rectResult;
	for (int i = 0; i < contours.size(); i++)
	{
		Rect rect = boundingRect((Mat)contours[i]);
		if((rect.height*rect.width)>=areaMax)
		{
			rectResult = rect;
		}
		else
		{
			cout << "检测不到条形码" << endl;
			//exit(1);
		}
		areaMax = rectResult.height*rectResult.width;
		
	}
	rectangle(image, rectResult, Scalar(255), 2);
	waitKey(1000);
#ifdef SHOW
	imshow("10.找出二维码矩形区域", image);
#endif // SHOW
	return image;
}
*/

#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif
#endif /* End of #ifdef __cplusplus */

#define HAND_FRM_WIDTH     640
#define HAND_FRM_HEIGHT    384
#define DETECT_OBJ_MAX     32
#define RET_NUM_MAX        3
#define DRAW_RETC_THICK    8    // Draw the width of the line
#define WIDTH_LIMIT        32
#define HEIGHT_LIMIT       32
#define IMAGE_WIDTH        224  // The resolution of the model IMAGE sent to the classification is 224*224
#define IMAGE_HEIGHT       224
// #define MODEL_FILE_GESTURE    "/userdata/models/hand_classify/hand_gesture.wk" // darknet framework wk model
#define MODEL_FILE_GESTURE    "/userdata/models/hand_classify/face_classify.wk" // darknet framework wk model
#define USLEEP_TIME   100 // 100: usleep time, in microseconds

static int biggestBoxIndex;
static IVE_IMAGE_S img;
static DetectObjInfo objs[DETECT_OBJ_MAX] = {0};
static RectBox boxs[DETECT_OBJ_MAX] = {0};
static RectBox objBoxs[DETECT_OBJ_MAX] = {0};
static RectBox remainingBoxs[DETECT_OBJ_MAX] = {0};
static RectBox cnnBoxs[DETECT_OBJ_MAX] = {0}; // Store the results of the classification network
static RecogNumInfo numInfo[RET_NUM_MAX] = {0};
static IVE_IMAGE_S imgIn;
static IVE_IMAGE_S imgDst;
static VIDEO_FRAME_INFO_S frmIn;
static VIDEO_FRAME_INFO_S frmDst;
int uartFd = 0;

char buf[15]; 
char rev_buf[15]; 
uint32_t rev_num = 0;

uint32_t count = 0;


/* --------------------------------------------------------------------------------------------------------------------------1 */

// static IVE_SRC_IMAGE_S pstSrc;
// static IVE_DST_IMAGE_S pstDst;
// static IVE_CSC_CTRL_S stCscCtrl;

// static HI_VOID IveImageParamCfg(IVE_SRC_IMAGE_S *pstSrc, IVE_DST_IMAGE_S *pstDst,
//     VIDEO_FRAME_INFO_S *srcFrame)
// {
//     pstSrc->enType = IVE_IMAGE_TYPE_YUV420SP;
//     pstSrc->au64VirAddr[0] = srcFrame->stVFrame.u64VirAddr[0];
//     pstSrc->au64VirAddr[1] = srcFrame->stVFrame.u64VirAddr[1];
//     pstSrc->au64VirAddr[2] = srcFrame->stVFrame.u64VirAddr[2]; // 2: Image data virtual address

//     pstSrc->au64PhyAddr[0] = srcFrame->stVFrame.u64PhyAddr[0];
//     pstSrc->au64PhyAddr[1] = srcFrame->stVFrame.u64PhyAddr[1];
//     pstSrc->au64PhyAddr[2] = srcFrame->stVFrame.u64PhyAddr[2]; // 2: Image data physical address

//     pstSrc->au32Stride[0] = srcFrame->stVFrame.u32Stride[0];
//     pstSrc->au32Stride[1] = srcFrame->stVFrame.u32Stride[1];
//     pstSrc->au32Stride[2] = srcFrame->stVFrame.u32Stride[2]; // 2: Image data span

//     pstSrc->u32Width = srcFrame->stVFrame.u32Width;
//     pstSrc->u32Height = srcFrame->stVFrame.u32Height;

//     pstDst->enType = IVE_IMAGE_TYPE_U8C3_PACKAGE;
//     pstDst->u32Width = pstSrc->u32Width;
//     pstDst->u32Height = pstSrc->u32Height;
//     pstDst->au32Stride[0] = pstSrc->au32Stride[0];
//     pstDst->au32Stride[1] = 0;
//     pstDst->au32Stride[2] = 0; // 2: Image data span
// }

// static HI_S32 yuvFrame2rgb(VIDEO_FRAME_INFO_S *srcFrame, IPC_IMAGE *dstImage)
// {
//     IVE_HANDLE hIveHandle;
//     HI_S32 s32Ret = 0;
//     stCscCtrl.enMode = IVE_CSC_MODE_PIC_BT709_YUV2RGB; // IVE_CSC_MODE_VIDEO_BT601_YUV2RGB
//     IveImageParamCfg(&pstSrc, &pstDst, srcFrame);

//     s32Ret = HI_MPI_SYS_MmzAlloc_Cached(&pstDst.au64PhyAddr[0], (void **)&pstDst.au64VirAddr[0],
//         "User", HI_NULL, pstDst.u32Height*pstDst.au32Stride[0] * 3); // 3: multiple
//     if (HI_SUCCESS != s32Ret) {
//         HI_MPI_SYS_MmzFree(pstDst.au64PhyAddr[0], (void *)pstDst.au64VirAddr[0]);
//         SAMPLE_PRT("HI_MPI_SYS_MmzFree err\n");
//         return s32Ret;
//     }

//     s32Ret = HI_MPI_SYS_MmzFlushCache(pstDst.au64PhyAddr[0], (void *)pstDst.au64VirAddr[0],
//         pstDst.u32Height*pstDst.au32Stride[0] * 3); // 3: multiple
//     if (HI_SUCCESS != s32Ret) {
//         HI_MPI_SYS_MmzFree(pstDst.au64PhyAddr[0], (void *)pstDst.au64VirAddr[0]);
//         return s32Ret;
//     }
//     // 3: multiple
//     memset_s((void *)pstDst.au64VirAddr[0], pstDst.u32Height*pstDst.au32Stride[0] * 3,
//         0, pstDst.u32Height*pstDst.au32Stride[0] * 3); // 3: multiple
//     HI_BOOL bInstant = HI_TRUE;

//     s32Ret = HI_MPI_IVE_CSC(&hIveHandle, &pstSrc, &pstDst, &stCscCtrl, bInstant);
//     if (HI_SUCCESS != s32Ret) {
//         HI_MPI_SYS_MmzFree(pstDst.au64PhyAddr[0], (void *)pstDst.au64VirAddr[0]);
//         return s32Ret;
//     }

//     if (HI_TRUE == bInstant) {
//         HI_BOOL bFinish = HI_TRUE;
//         HI_BOOL bBlock = HI_TRUE;
//         s32Ret = HI_MPI_IVE_Query(hIveHandle, &bFinish, bBlock);
//         while (HI_ERR_IVE_QUERY_TIMEOUT == s32Ret) {
//             usleep(100); // 100: usleep time
//             s32Ret = HI_MPI_IVE_Query(hIveHandle, &bFinish, bBlock);
//         }
//     }
//     dstImage->u64PhyAddr = pstDst.au64PhyAddr[0];
//     dstImage->u64VirAddr = pstDst.au64VirAddr[0];
//     dstImage->u32Width = pstDst.u32Width;
//     dstImage->u32Height = pstDst.u32Height;

//     return HI_SUCCESS;
// }

// static HI_S32 frame2Mat(VIDEO_FRAME_INFO_S *srcFrame, Mat &dstMat)
// {
//     HI_U32 w = srcFrame->stVFrame.u32Width;
//     HI_U32 h = srcFrame->stVFrame.u32Height;
//     int bufLen = w * h * 3;
//     HI_U8 *srcRGB = NULL;
//     IPC_IMAGE dstImage;
//     if (yuvFrame2rgb(srcFrame, &dstImage) != HI_SUCCESS) {
//         SAMPLE_PRT("yuvFrame2rgb err\n");
//         return HI_FAILURE;
//     }
//     srcRGB = (HI_U8 *)dstImage.u64VirAddr;
//     dstMat.create(h, w, CV_8UC3);
//     memcpy_s(dstMat.data, bufLen * sizeof(HI_U8), srcRGB, bufLen * sizeof(HI_U8));
//     HI_MPI_SYS_MmzFree(dstImage.u64PhyAddr, (void *)&(dstImage.u64VirAddr));
//     return HI_SUCCESS;
// }

/* --------------------------------------------------------------------------------------------------------------------------1 */

HI_S32 Yolo2HandDetectResnetClassifyLoad(uintptr_t* model)
{
    SAMPLE_SVP_NNIE_CFG_S *self = NULL;
    HI_S32 ret;

    ret = CnnCreate(&self, MODEL_FILE_GESTURE);
    *model = ret < 0 ? 0 : (uintptr_t)self;
    HandDetectInit(); // Initialize the hand detection model
    SAMPLE_PRT("Load hand detect claasify model success\n");
    /* uart open init */
    uartFd = UartOpenInit();
    if (uartFd < 0) {
        printf("uart1 open failed\r\n");
    } else {
        printf("uart1 open successed\r\n");
    }
    return ret;
}

HI_S32 Yolo2HandDetectResnetClassifyUnload(uintptr_t model)
{
    CnnDestroy((SAMPLE_SVP_NNIE_CFG_S*)model);
    HandDetectExit(); // Uninitialize the hand detection model
    SAMPLE_PRT("Unload hand detect claasify model success\n");

    return 0;
}

/* Get the maximum hand */
static HI_S32 GetBiggestHandIndex(RectBox boxs[], int detectNum)
{
    HI_S32 handIndex = 0;
    HI_S32 biggestBoxIndex = handIndex;
    HI_S32 biggestBoxWidth = boxs[handIndex].xmax - boxs[handIndex].xmin + 1;
    HI_S32 biggestBoxHeight = boxs[handIndex].ymax - boxs[handIndex].ymin + 1;
    HI_S32 biggestBoxArea = biggestBoxWidth * biggestBoxHeight;

    for (handIndex = 1; handIndex < detectNum; handIndex++) {
        HI_S32 boxWidth = boxs[handIndex].xmax - boxs[handIndex].xmin + 1;
        HI_S32 boxHeight = boxs[handIndex].ymax - boxs[handIndex].ymin + 1;
        HI_S32 boxArea = boxWidth * boxHeight;
        if (biggestBoxArea < boxArea) {
            biggestBoxArea = boxArea;
            biggestBoxIndex = handIndex;
        }
        biggestBoxWidth = boxs[biggestBoxIndex].xmax - boxs[biggestBoxIndex].xmin + 1;
        biggestBoxHeight = boxs[biggestBoxIndex].ymax - boxs[biggestBoxIndex].ymin + 1;
    }

    if ((biggestBoxWidth == 1) || (biggestBoxHeight == 1) || (detectNum == 0)) {
        biggestBoxIndex = -1;
    }

    return biggestBoxIndex;
}

/* hand gesture recognition info */
static void HandDetectFlag(const RecogNumInfo resBuf)
{
    HI_CHAR *gestureName = NULL;
    float score;
    score = resBuf.score * 100 / 4096.0;
    UartRead(uartFd, rev_buf, sizeof(rev_buf),100);
    rev_num = strlen(rev_buf);
    // printf("-------------------------------------rev_num:%d-------------------------------\n",rev_num);
    // printf("-------------------------------------rev_buf:%s-------------------------------\n",rev_buf);   
    if(rev_num)
    {
        // printf("rev_buf[0] is %c __________________________________________\n", rev_buf[0]);
        if(rev_buf[0] == '0')
        {
            
            if(score < 100)
            {
                printf("*********************** score: %d***********************\n ", score);
                    if(score > 75)
                    {
                        switch (resBuf.num) {
                            case 0u:
                                gestureName = "Kang";
                                //UartSendRead(uartFd, Kang); 
                                sprintf(buf,"####20192333001");//kang
                                UartSend(uartFd, buf,strlen(buf)+1);
                                // usleep(USLEEP_TIME);
                                SAMPLE_PRT("-------------------------------------------------------------name----:%s ----score----:%f\n", gestureName, score);
                                break;
                            case 1u:
                                gestureName = "JunBin";
                                sprintf(buf,"####20192333002");
                                UartSend(uartFd, buf,strlen(buf)+1);
                                SAMPLE_PRT("-------------------------------------------------------------name----:%s ----score----:%f\n", gestureName, score);
                                break;
                            case 2u:
                                gestureName = "XianDa";
                                sprintf(buf,"####20192333003");
                                UartSend(uartFd, buf,strlen(buf)+1);
                                SAMPLE_PRT("-------------------------------------------------------------name----:%s ----score----:%f\n", gestureName, score);
                                break;
                            default:
                                gestureName = "others";
                                //UartSendRead(uartFd, InvalidGesture); 
                                //SAMPLE_PRT("----name----:%s\n", gestureName);
                                break;
                        }
                        SAMPLE_PRT("face detection success\n");
                    }
                    // else
                    // {
                    //     sprintf(buf,"**2484962124536");
                    //     UartSend(uartFd, buf,strlen(buf)+1);
                    // }
            }
        }
    rev_num = 0;
    }
}

HI_S32 Yolo2HandDetectResnetClassifyCal(uintptr_t model, VIDEO_FRAME_INFO_S *srcFrm, VIDEO_FRAME_INFO_S *dstFrm)
{
    SAMPLE_SVP_NNIE_CFG_S *self = (SAMPLE_SVP_NNIE_CFG_S*)model;
    HI_S32 resLen = 0;
    int objNum;
    int ret;
    int num = 0;

    

    ret = FrmToOrigImg((VIDEO_FRAME_INFO_S*)srcFrm, &img);
    SAMPLE_CHECK_EXPR_RET(ret != HI_SUCCESS, ret, "hand detect for YUV Frm to Img FAIL, ret=%#x\n", ret);

    time_t timep1;
    time_t timep2;
    struct tm *p1;
    struct tm *p2;

    time(&timep1);
    p1=gmtime(&timep1);
    printf("%d\n",p1->tm_sec); /*获取当前秒*/

    objNum = HandDetectCal(&img, objs); // Send IMG to the detection net for reasoning

    time (&timep2);
    p2=gmtime(&timep2);
    printf("face detect cost %ds, fps is %f", (p2->tm_sec - p1->tm_sec), 1 / (p2->tm_sec - p1->tm_sec));

    for (int i = 0; i < objNum; i++) {
        cnnBoxs[i] = objs[i].box;
        RectBox *box = &objs[i].box;
        RectBoxTran(box, HAND_FRM_WIDTH, HAND_FRM_HEIGHT,
            dstFrm->stVFrame.u32Width, dstFrm->stVFrame.u32Height);
        boxs[i] = *box;
    }
    biggestBoxIndex = GetBiggestHandIndex(boxs, objNum);
    //SAMPLE_PRT("biggestBoxIndex:%d, objNum:%d\n", biggestBoxIndex, objNum);

    // When an object is detected, a rectangle is drawn in the DSTFRM
    if (biggestBoxIndex >= 0) {
        objBoxs[0] = boxs[biggestBoxIndex];
        MppFrmDrawRects(dstFrm, objBoxs, 1, RGB888_GREEN, DRAW_RETC_THICK); // Target hand objnum is equal to 1

        for (int j = 0; (j < objNum) && (objNum > 1); j++) {
            if (j != biggestBoxIndex) {
                remainingBoxs[num++] = boxs[j];
                // others hand objnum is equal to objnum -1
                MppFrmDrawRects(dstFrm, remainingBoxs, objNum - 1, RGB888_RED, DRAW_RETC_THICK);
            }
        }

        // Crop the image to classification network
        ret = ImgYuvCrop(&img, &imgIn, &cnnBoxs[biggestBoxIndex]);
        SAMPLE_CHECK_EXPR_RET(ret < 0, ret, "ImgYuvCrop FAIL, ret=%#x\n", ret);

        if ((imgIn.u32Width >= WIDTH_LIMIT) && (imgIn.u32Height >= HEIGHT_LIMIT)) {
            COMPRESS_MODE_E enCompressMode = srcFrm->stVFrame.enCompressMode;
            ret = OrigImgToFrm(&imgIn, &frmIn);
            frmIn.stVFrame.enCompressMode = enCompressMode;
            //SAMPLE_PRT("crop u32Width = %d, img.u32Height = %d\n", imgIn.u32Width, imgIn.u32Height);
            ret = MppFrmResize(&frmIn, &frmDst, IMAGE_WIDTH, IMAGE_HEIGHT);
            ret = FrmToOrigImg(&frmDst, &imgDst);

            
            time(&timep1);
            p1=gmtime(&timep1);
            printf("%d\n",p1->tm_sec); /*获取当前秒*/

            ret = CnnCalU8c1Img(self,  &imgDst, numInfo, sizeof(numInfo) / sizeof((numInfo)[0]), &resLen);

            time (&timep2);
            p2=gmtime(&timep2);
            printf("face classify cost %ds, fps is %f", (p2->tm_sec - p1->tm_sec), 1 / (p2->tm_sec - p1->tm_sec));

            SAMPLE_CHECK_EXPR_RET(ret < 0, ret, "CnnCalU8c1Img FAIL, ret=%#x\n", ret);
            HI_ASSERT(resLen <= sizeof(numInfo) / sizeof(numInfo[0]));
            HandDetectFlag(numInfo[0]);
            MppFrmDestroy(&frmDst);
        }
        IveImgDestroy(&imgIn);
    }
    else
    {
        // /*----------------------------------------------------------------------------------------------------------------------------------------*/

        // Mat matImage;
        // HI_CHAR *resString = NULL;
        // QRCodeDetector qrdetector;

        // frame2Mat(srcFrm, matImage);
        // resString = qrdetector.detectAndDecode(matImage);

        // printf(" ------------------------------------------------------------------------------------------------------------%s \n", resString);

        // /*----------------------------------------------------------------------------------------------------------------------------------------*/

        UartRead(uartFd, rev_buf, sizeof(rev_buf),100);
        rev_num = strlen(rev_buf);
        printf("------------------------------------------------------Code_rev_num:%d------------------------------------------------------\n",rev_num);   
        if(rev_num)    
        {
         printf("------------------------------------------------------Code_rev_num:%c------------------------------------------------------\n",rev_buf[0]);              
            if(rev_buf[0] == '1')
            {
                if(count == 0)
                {
                    sprintf(buf,"**A125634897102");
                    UartSend(uartFd, buf,strlen(buf)+1);
                    count += 1;
                }
                else if(count == 1)
                {
                    sprintf(buf,"**B673598223444");
                    UartSend(uartFd, buf,strlen(buf)+1);
                    count += 1;
                }
                else if(count == 2)
                {
                    sprintf(buf,"**C111222555411");
                    UartSend(uartFd, buf,strlen(buf)+1);
                    count = 0;
                }
            }
            rev_num = 0;
        }
    }

    return ret;
}

#ifdef __cplusplus
#if __cplusplus
}
#endif
#endif /* End of #ifdef __cplusplus */
