#include <time.h>
#include <stdio.h>
#include <QtTest/QTest>

#include "utils.h"
#include "mainwindow.h"
#include "ui_mainwindow.h"

using namespace std;
using namespace cv;
using namespace cv::gpu;

struct rectCentre
{
    int num;
    float x;
    float y;
    float z;
};

float imgscale=1; // 1 for 160x120, 2 for 320x240, 4 for 640x480

MainWindow::MainWindow(QWidget *parent) :
    QMainWindow(parent), ui(new Ui::MainWindow),
    mVidFileSelected(false), mCasFileSelected(false),
    mStopFlag(false)
{
    ui->setupUi(this);
    ui->LblVideoFileName->setText("Video file name");
    ui->BtnProc->setEnabled(false);
    ui->BtnStop->setEnabled(false);
}

MainWindow::~MainWindow()
{
    delete ui;
}

void MainWindow::on_BtnVideoFileOpen_clicked()
{
    //Q File Dialog Method
    QString fileName = QFileDialog::getOpenFileName(this, tr("Open Video"), ".", tr("Video Files (*.avi)"));
    QFile file(fileName);
    if(file.exists()) //Check whether file is valid
    {
      mAviFileName = fileName;
      cout << "filename is " << mAviFileName.toUtf8().constData() << endl;//Convert Qstring to String
      QFileInfo info(file);
      ui->LblVideoFileName->setText(info.fileName());//Display video file name in Label
    }
    mVidFileSelected = true;
    setProcBtnEnable();
}

void MainWindow::on_BtnCascadeOpen_clicked()
{
    QString fileName2 = QFileDialog::getOpenFileName(this, tr("Open Cascade Files"), ".", tr("Cascade Files (*.xml)"));
    QFile file2(fileName2);
    if(file2.exists()) //Check whether file is valid
    {
      mCasFileName = fileName2;
      cout << "filename is " << mCasFileName.toUtf8().constData() << endl;//Convert Qstring to String
      QFileInfo info2(file2);
      ui->LblCascadeFileName->setText(info2.fileName());//Display cascade file name in Label
    }
    mCasFileSelected = true;
    setProcBtnEnable();
}

void MainWindow::setProcBtnEnable()
{
  if(mVidFileSelected==true && mCasFileSelected==true)
    ui->BtnProc->setEnabled(true);
  else
    ui->BtnProc->setEnabled(false);
}

void MainWindow::on_BtnProc_clicked()
{
    ui->BtnProc->setEnabled(false);
    ui->BtnStop->setEnabled(true);
    mStopFlag = 0;
    ui->CbGpuEnable->setEnabled(false);

    Mat frame, frame_gray;
    GpuMat frame_gpu;
    CascadeClassifier csdFontalFace;
    CascadeClassifier_GPU csdFontalFace_gpu;

    if(ui->CbGpuEnable->isChecked())
      csdFontalFace_gpu.load(mCasFileName.toUtf8().constData());
    else
      csdFontalFace.load(mCasFileName.toUtf8().constData());

    VideoCapture capture(mAviFileName.toUtf8().constData());

    if (!capture.isOpened())
        std::cout << "Error in Open Video File";
    double rate = capture.get(CV_CAP_PROP_FPS);
    bool stop(false);
    // Delay between each frame in ms
    // corresponds to video frame rate
    int frame_delay_ms= 1000/rate;

    // for all frames in video
    long long acc_proc_ms = 0;
    int num_frames = 0;
    while (!stop && !mStopFlag)
    {
      struct timespec tm0, tm1;
      clock_gettime(CLOCK_MONOTONIC, &tm0);

      // read next frame if any
      if (!capture.read(frame))
          break;
      cvtColor(frame, frame_gray, CV_BGR2GRAY);
      frame_gpu.upload(frame_gray);

      if(ui->CbGpuEnable->isChecked())
        detectFaceEyes_GPU(csdFontalFace_gpu, frame_gpu, frame, true);
      else
        detectFaceEyes_CPU(csdFontalFace, frame_gray, frame, true);

      clock_gettime(CLOCK_MONOTONIC, &tm1);

      int proc_ms = timespecDiffMs(&tm1, &tm0);
      int actual_delay_ms = frame_delay_ms - proc_ms > 0 ?
                  frame_delay_ms - proc_ms : 1;
      acc_proc_ms += proc_ms;
      num_frames++;
      int avg_proc_ms = ((double)acc_proc_ms + 0.5) / num_frames;

      char str_proc_ms[200];
      sprintf(str_proc_ms, "avg time: %d ms", avg_proc_ms);

      //cout << "proc_ms:\t" << proc_ms << endl;
      //cout << "avg_proc_ms:\t" << avg_proc_ms << endl;
      //cout << "str_proc_ms:\t" << string(str_proc_ms) << endl;

      putText(frame, string(str_proc_ms), Point(10, 20),
              FONT_HERSHEY_COMPLEX_SMALL, 0.7, cvScalar(0,0,250), 1, CV_AA);

      cv::cvtColor(frame, frame, CV_BGR2RGB);//Qimage use RGB on Default
      QImage img= QImage((const unsigned char*)(frame.data), frame.cols, frame.rows, QImage::Format_RGB888);

      // display on label
      ui->LblVideoDisplay->setPixmap(QPixmap::fromImage(img));
      // resize the label to fit the image
      ui->LblVideoDisplay->resize(ui->LblVideoDisplay->pixmap()->size());

      // introduce a delay
      // or press key to stop
      QTest::qSleep(actual_delay_ms);

      //Fixme: why this waitKey is needed for video to display corectly
      if (waitKey(1)>=0)
          stop= true;
    }
    // Close the video file.
    // Not required since called by destructor
    //capture.release();

    setProcBtnEnable();
    ui->BtnStop->setEnabled(false);
    ui->CbGpuEnable->setEnabled(true);
}

void MainWindow::on_BtnStop_clicked()
{
    mStopFlag = 1;
}

void MainWindow::detectFaceEyes_GPU(CascadeClassifier_GPU& csdFontalFace_gpu, GpuMat& input_img_gpu, Mat& input_img, bool bDraw)
{

    GpuMat objBuf;
    Mat objHost;

    unsigned int x1,y1,x2,y2;

    int iNumDetects = csdFontalFace_gpu.detectMultiScale(input_img_gpu, objBuf, 1.2, 4); //detect faces

    objBuf.colRange(0, iNumDetects).download(objHost);

    Rect* faces = objHost.ptr<Rect>();

    if (bDraw)
    {
        for (int i=0; i < iNumDetects; i++)
        { //draw all faces

            x1 = faces[i].x; y1 = faces[i].y;
            x2 = x1 + faces[i].width; y2 = y1 + faces[i].height;
            rectangle(input_img, Point(x1,y1), Point(x2,y2), Scalar(0,0,255), 2);
         }
     }
}

void MainWindow::detectFaceEyes_CPU(CascadeClassifier& csdFontalFace, Mat& input_gray_img, Mat& input_img, bool bDraw)
{
    vector<Rect> facesN;
    unsigned int x1,y1,x2,y2;
    facesN.clear();
    csdFontalFace.detectMultiScale(input_gray_img, facesN, 1.2, 4); //detect faces

    if (!facesN.empty())
    {
        if (bDraw)
        {
            for (unsigned i=0; i<facesN.size(); i++)
            { //draw all faces
                Rect thisRect= facesN[i];
                x1 = thisRect.x; y1 = thisRect.y;
                x2 = x1 + thisRect.width; y2 = y1 + thisRect.height;
                rectangle(input_img, Point(x1,y1), Point(x2,y2), Scalar(0,0,255), 2);
            }
        }
    }
}
