#include "cvideocaptrue.h"
#include <QCoreApplication>
#include <QDateTime>
#include "photowgt.h"

CVideocaptrue::CVideocaptrue(QObject* parent):QThread(parent)
{
    m_bStop = false;
    m_bRegister = false;
    m_bStartCheck = false;
    m_vMatFaceCheck.clear();

    QString strAppPath = QCoreApplication::applicationDirPath();
    QString strXmlPath = strAppPath + "\\" + "haarcascades" + "\\" + "haarcascade_frontalface_alt.xml";

    if (!m_cascade.load(strXmlPath.toStdString().c_str()))
    {
        qDebug("load haarcascades failed! path= %s\n", strXmlPath.toStdString().c_str());
    }

    qRegisterMetaType<QVector<Mat>>("QVector<Mat>");
    connect(this, SIGNAL(sigRegister(QString,QString,QByteArray)),
            this, SLOT(slotRegister(QString,QString,QByteArray)));
    connect(this, SIGNAL(sigRequestFaceCheck(QVector<Mat>)),
            this, SLOT(slotRequestFaceCheck(QVector<Mat>))); 

    detector = new seeta::FaceDetection("F:\\partTimeJob\\Faceproject\\NetfaceContrast\\bin\\seeta_fd_frontal_v1.0.bin");
    detector->SetMinFaceSize(40);
    detector->SetScoreThresh(2.f);
    detector->SetImagePyramidScaleFactor(0.8f);
    detector->SetWindowStep(4, 4);
}

CVideocaptrue::~CVideocaptrue()
{
    m_bStop = true;
    quit();
    wait();
}

void CVideocaptrue::UserReg(QString strNo, QString strName)
{
    m_strNo = strNo;
    m_strName = strName;
    m_bRegister = true;
    qDebug("[Register-start] Student Number=%s, Name=%s\n",
           strNo.toStdString().c_str(), strName.toStdString().c_str());
}

void CVideocaptrue::StartFaceCheck()
{
    m_bStartCheck = true;
}

int CVideocaptrue::OneFaceDetect(cv::Mat& newImgMat)
{
    cv::Mat img_gray;
    if (newImgMat.channels() != 1)
        cv::cvtColor(newImgMat, img_gray, cv::COLOR_BGR2GRAY);
    else
        img_gray = newImgMat;

    seeta::ImageData img_data;
    img_data.data = img_gray.data;
    img_data.width = img_gray.cols;
    img_data.height = img_gray.rows;
    img_data.num_channels = 1;

    long t0 = cv::getTickCount();
    std::vector<seeta::FaceInfo> faces = detector->Detect(img_data);
    long t1 = cv::getTickCount();
    double secs = (t1 - t0)/cv::getTickFrequency();

    cv::Rect face_rect;
    int32_t num_face = static_cast<int32_t>(faces.size());

    for (int32_t i = 0; i < num_face; i++) {
        face_rect.x = faces[i].bbox.x;
        face_rect.y = faces[i].bbox.y;
        face_rect.width = faces[i].bbox.width;
        face_rect.height = faces[i].bbox.height;

        cv::rectangle(newImgMat, face_rect, CV_RGB(0, 0, 255), 4, 8, 0);
        break;  //only proc one face
    }

    return 0;
}

bool CVideocaptrue::SaveJpg(Mat &matImg, QString strFileName)
{
    return !imwrite(strFileName.toStdString().c_str(), matImg);
}

void CVideocaptrue::run()
{   
    static int nFaceNum = 1;
    cv::VideoCapture camCapture;
    cv::Mat newFrame;
    while(!m_bStop)
    {
        if (camCapture.isOpened())
        {
            camCapture >> newFrame;
            if (!newFrame.empty())
            {
                PhotoWgt* pPhotoWidget = (PhotoWgt*)this->parent();
                if(m_bStartCheck)
                {
//                   if(nFaceNum > 4)
//                   {
//                       m_bStartCheck = false;
//                       nFaceNum = 1;
//                   }
//                   else
//                   {
//                       pPhotoWidget->ShowFaceImg(newFrame, nFaceNum);
//                       nFaceNum++;
//                   }
                   m_bStartCheck = false;
                   OneFaceDetect(newFrame);
                   pPhotoWidget->ShowFaceImg(newFrame, nFaceNum);
                }

                //warning: Consumption cpu                
                pPhotoWidget->ShowVideoImg(newFrame);
            }
            else
            {
                camCapture.release();
            }
        }
        else
        {
            if (!camCapture.open(0))
            { 
            }
        }

        if (cv::waitKey(100) >= 0)
        {
            break;
        }
        QThread::msleep(1);
    }
    camCapture.release();
    qDebug("video captrue exit normal!");
}

void CVideocaptrue::slotFaceDectReturn(QString strId, QVector<int> vResult, bool bResult)
{
    if(bResult)
    {
        for(int i = 0; i < vResult.size(); i++)
        {
            int nExist = vResult.at(i);
            if(i > m_vMatFaceCheck.size())
            {
                qDebug("[FACE DETECT] invalid result index=%d\n", i);
            }
            else
            {
//                MainFormWidget* p = (MainFormWidget*)this->parent();
//                p->ShowStrangerImg(m_vMatFaceCheck.at(i), nExist);
                qDebug("[FACE DETECT] show the stranger index=%d\n", i);
            }
        }
    }
    else
    {
        qDebug("[FACE DETECT] http response err!");
//        MainFormWidget* p = (MainFormWidget*)this->parent();
//        p->ShowDetectTip(tr("The server has no response or data prase error!"));
    }
}

void CVideocaptrue::slotRegister(QString strId, QString strName, QByteArray imgs)
{
    // send regist face http request

}

void CVideocaptrue::slotRequestFaceCheck(QVector<Mat> vMatFace)
{
    QStringList strList;
    vector<uchar> vecImg;            
    vector<int> vecCompression_params;
    vecCompression_params.push_back(CV_IMWRITE_JPEG_QUALITY);
    vecCompression_params.push_back(90);
    m_vMatFaceCheck = vMatFace;
    for(int i = 0; i < vMatFace.size(); i++)
    {
        Mat matImg = vMatFace.at(i);

        if(!imencode(".jpg", matImg, vecImg, vecCompression_params))
        {
            qDebug("Regsiter matfaceimg trans to jpg failed!\n");
        }

        QByteArray imgRegisterFace = QByteArray((const char*)vecImg.data(), vecImg.size());
        QByteArray imgbase64 = imgRegisterFace.toBase64();
        strList.append(imgbase64);
    }
    //m_pHttp->RequsetDect("1", strList);
}

