#ifndef LANE_DETECT_H_
#define LANE_DETECT_H_

#include "DefaultDefine.h"
#include "OsdShowStr.h"
#include "vgs_img.h"

#include <vector>
#include <opencv2/opencv.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/imgproc/types_c.h>

namespace hiych{

using namespace cv;
using namespace std;

class IVE_TO_MAT
{
public:
    typedef struct tagIPC_IMAGE {
        HI_U64 u64PhyAddr;
        HI_U64 u64VirAddr;
        HI_U32 u32Width;
        HI_U32 u32Height;
    } IPC_IMAGE;

    static HI_S32 frame2Mat(VIDEO_FRAME_INFO_S *srcFrame, Mat &dstMat);

    static IVE_SRC_IMAGE_S pstSrc;
    static IVE_DST_IMAGE_S pstDst;
    static IVE_CSC_CTRL_S stCscCtrl;

private:
    static void IveImageParamCfg(IVE_SRC_IMAGE_S *pstSrc, IVE_DST_IMAGE_S *pstDst,
    VIDEO_FRAME_INFO_S *srcFrame);

    static int yuvFrame2rgb(VIDEO_FRAME_INFO_S *srcFrame, IPC_IMAGE *dstImage);
};

IVE_SRC_IMAGE_S IVE_TO_MAT::pstSrc;
IVE_DST_IMAGE_S IVE_TO_MAT::pstDst;
IVE_CSC_CTRL_S IVE_TO_MAT::stCscCtrl;

void IVE_TO_MAT::IveImageParamCfg(IVE_SRC_IMAGE_S *pstSrc, IVE_DST_IMAGE_S *pstDst,
    VIDEO_FRAME_INFO_S *srcFrame)
{
    pstSrc->enType = IVE_IMAGE_TYPE_YUV420SP;
    pstSrc->au64VirAddr[0] = srcFrame->stVFrame.u64VirAddr[0];
    pstSrc->au64VirAddr[1] = srcFrame->stVFrame.u64VirAddr[1];
    pstSrc->au64VirAddr[2] = srcFrame->stVFrame.u64VirAddr[2]; // 2: Image data virtual address

    pstSrc->au64PhyAddr[0] = srcFrame->stVFrame.u64PhyAddr[0];
    pstSrc->au64PhyAddr[1] = srcFrame->stVFrame.u64PhyAddr[1];
    pstSrc->au64PhyAddr[2] = srcFrame->stVFrame.u64PhyAddr[2]; // 2: Image data physical address

    pstSrc->au32Stride[0] = srcFrame->stVFrame.u32Stride[0];
    pstSrc->au32Stride[1] = srcFrame->stVFrame.u32Stride[1];
    pstSrc->au32Stride[2] = srcFrame->stVFrame.u32Stride[2]; // 2: Image data span

    pstSrc->u32Width = srcFrame->stVFrame.u32Width;
    pstSrc->u32Height = srcFrame->stVFrame.u32Height;

    pstDst->enType = IVE_IMAGE_TYPE_U8C3_PACKAGE;
    pstDst->u32Width = pstSrc->u32Width;
    pstDst->u32Height = pstSrc->u32Height;
    pstDst->au32Stride[0] = pstSrc->au32Stride[0];
    pstDst->au32Stride[1] = 0;
    pstDst->au32Stride[2] = 0; // 2: Image data span
}

int IVE_TO_MAT:: yuvFrame2rgb(VIDEO_FRAME_INFO_S *srcFrame, IPC_IMAGE *dstImage)
{
    IVE_HANDLE hIveHandle;
    HI_S32 s32Ret = 0;
    stCscCtrl.enMode = IVE_CSC_MODE_PIC_BT709_YUV2RGB; // IVE_CSC_MODE_VIDEO_BT601_YUV2RGB
    IveImageParamCfg(&pstSrc, &pstDst, srcFrame);
    
    s32Ret = HI_MPI_SYS_MmzAlloc_Cached(&pstDst.au64PhyAddr[0], (void **)&pstDst.au64VirAddr[0],
        "User", HI_NULL, pstDst.u32Height*pstDst.au32Stride[0] * 3); // 3: multiple
    if (HI_SUCCESS != s32Ret) {
        HI_MPI_SYS_MmzFree(pstDst.au64PhyAddr[0], (void *)pstDst.au64VirAddr[0]);
        SAMPLE_PRT("HI_MPI_SYS_MmzFree err\n");
        return s32Ret;
    }

    s32Ret = HI_MPI_SYS_MmzFlushCache(pstDst.au64PhyAddr[0], (void *)pstDst.au64VirAddr[0],
        pstDst.u32Height*pstDst.au32Stride[0] * 3); // 3: multiple
    if (HI_SUCCESS != s32Ret) {
        HI_MPI_SYS_MmzFree(pstDst.au64PhyAddr[0], (void *)pstDst.au64VirAddr[0]);
        return s32Ret;
    }
    // 3: multiple
    memset_s((void *)pstDst.au64VirAddr[0], pstDst.u32Height*pstDst.au32Stride[0] * 3,
        0, pstDst.u32Height*pstDst.au32Stride[0] * 3); // 3: multiple
    HI_BOOL bInstant = HI_TRUE;

    s32Ret = HI_MPI_IVE_CSC(&hIveHandle, &pstSrc, &pstDst, &stCscCtrl, bInstant);
    if (HI_SUCCESS != s32Ret) {
        HI_MPI_SYS_MmzFree(pstDst.au64PhyAddr[0], (void *)pstDst.au64VirAddr[0]);
        
        return s32Ret;
    }

    if (HI_TRUE == bInstant) {
        HI_BOOL bFinish = HI_TRUE;
        HI_BOOL bBlock = HI_TRUE;
        s32Ret = HI_MPI_IVE_Query(hIveHandle, &bFinish, bBlock);
        while (HI_ERR_IVE_QUERY_TIMEOUT == s32Ret) {
            usleep(100); // 100: usleep time
            s32Ret = HI_MPI_IVE_Query(hIveHandle, &bFinish, bBlock);
        }
    }
    dstImage->u64PhyAddr = pstDst.au64PhyAddr[0];
    dstImage->u64VirAddr = pstDst.au64VirAddr[0];
    dstImage->u32Width = pstDst.u32Width;
    dstImage->u32Height = pstDst.u32Height;

    return HI_SUCCESS;
}

int IVE_TO_MAT::frame2Mat(VIDEO_FRAME_INFO_S *srcFrame, Mat &dstMat)
{
    HI_U32 w = srcFrame->stVFrame.u32Width;
    HI_U32 h = srcFrame->stVFrame.u32Height;
    int bufLen = w * h * 3;
    HI_U8 *srcRGB = NULL;
    IPC_IMAGE dstImage;
    if (yuvFrame2rgb(srcFrame, &dstImage) != HI_SUCCESS) {
        SAMPLE_PRT("yuvFrame2rgb err\n");
        return HI_FAILURE;
    }
    srcRGB = (HI_U8 *)dstImage.u64VirAddr;
    dstMat.create(h, w, CV_8UC3);
    memcpy_s(dstMat.data, bufLen * sizeof(HI_U8), srcRGB, bufLen * sizeof(HI_U8));
    HI_MPI_SYS_MmzFree(dstImage.u64PhyAddr, (void *)&(dstImage.u64VirAddr));
    return HI_SUCCESS;
}

//行人辅助线检测
class LaneDetect
{
public:
    static pair<RectBox,int> getResult(VIDEO_FRAME_INFO_S& srcFrm);
};

pair<RectBox,int> LaneDetect::getResult(VIDEO_FRAME_INFO_S& srcFrm)
{
    Mat img, img_copy;
    const float screenXMid = 1920 / 2.0;
    const float screenYMid = 1080 / 2.0;
    int rect_mid_x = 0;
    int rect_mid_y = 0;
    int offset_x = 0;
    std::string tmp;
    static OSD osd;

    RectBox boxs;
    memset_s(&boxs, sizeof(RectBox), 0, sizeof(RectBox));

    IVE_TO_MAT::frame2Mat(&srcFrm, img);

    if (img.size == 0) {
        SAMPLE_PRT("image is null\n");
        return make_pair(boxs,-1);
    }
    img_copy = img.clone();

    Mat img_copy_gray;
    cvtColor(img_copy, img_copy_gray, COLOR_BGR2GRAY);

    Mat img_copy_blur;
	GaussianBlur(img_copy_gray, img_copy_blur, Size(5, 5), 10);

	Mat img_copy_thres;
	threshold(img_copy_blur, img_copy_thres, 190, 255, THRESH_BINARY);

	Mat img_copy_erode;
	erode(img_copy_thres, img_copy_erode, getStructuringElement(MORPH_RECT, Size(3, 1)), Point(-1, -1), 3);

	Mat img_copy_dila;
	dilate(img_copy_erode, img_copy_dila, getStructuringElement(MORPH_RECT, Size(5, 1)), Point(-1, -1), 1);

	vector<vector<Point>> contours;
	findContours(img_copy_dila, contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);

	vector<Rect> rect;
	for (int i = 0; i < contours.size(); ++i)
	{
		Rect tmp = boundingRect(contours[i]);
		if (tmp.height > 50 && tmp.width > 50)
		{
			rect.push_back(tmp);
			rectangle(img, tmp, Scalar(0, 255, 0), 2);
		}
	}

    if(!rect.empty())
    {
        std::sort(rect.begin(),rect.end(),[](cv::Rect r1,cv::Rect r2){
            return r1.height > r2.height;
        });
        //coordinate transformation 640*384 -> 1920*1080
        boxs.xmin = rect[0].x * 3; // 3: optimized value
        boxs.ymin = (int)(rect[0].y * 2.25); // 2.25: optimized value
        boxs.xmax = boxs.xmin + rect[0].width * 3; // 3: optimized value
        boxs.ymax = boxs.ymin + (int)rect[0].height * 2.25; // 2.25: optimized value

        rect_mid_x = (boxs.xmax - boxs.xmin) / 2 + boxs.xmin;
        rect_mid_y = (boxs.ymax - boxs.ymin) / 2 + boxs.ymin;

        offset_x = rect_mid_x - screenXMid;

        tmp = "LTop: " + to_string(rect[0].x) + " " + to_string(rect[0].y) + \
                  " Size:" + to_string(rect[0].width) + " " + to_string(rect[0].height);

        //debug message
        if(abs(offset_x) > 250 )
            {
                if(offset_x < 0)
                    tmp += " L_offset:" + to_string(offset_x);
                else
                    tmp += " R_offset:" + to_string(offset_x);
            }
            else
                tmp += " N_offset:" + to_string(offset_x);     

        cout << tmp << endl;

        osd.showStr(tmp, 10, 90);
        tmp.clear();
    }

    return make_pair(boxs, offset_x);
}

}

#endif