/**

Title   : VideoDisplayWdiget.cpp
Author  : Manikantan Nambi
URL :

Description : Function definitions for MM3AWidget.h
Created : 1/1/11
Modified   : 2/14/11

*/


#include <iostream>
#include <fstream>

#include <QtOpenGL/QtOpenGL>
#include <QTimer>
#include <QDebug>


#include <opencv/cv.h>
#include <opencv/highgui.h>
#include <newmat11/newmat.h>

#include "src/VideoWidget.h"

using namespace std;
using namespace cv;

extern CvMat *position_points;
extern CvPoint probeTip;
extern int Scount;


float FRAME_RATES = 60.0; // display frame rate (Hz)

VideoWidget::VideoWidget(QWidget *parent, Joystick *joystick) : QGLWidget(parent) {

    calibration_done = 0; // calibration flag
    blob_detect = 0; // blob detection
    cvthreshold = 50; // opencv threshold value
    cvthresholdMax = 255; // max value for threshold
    blobthreshold = 0 ;

    // corners
    c1.x = 50; c1.y = 50;
    c2.x = 590; c2.y = 50;
    c3.x = 590; c3.y = 430;
    c4.x = 50; c4.y = 430;

    // defiing color
    color.val[0] = 127;
    color.val[1] = 127;
    color.val[2] = 127;
    color.val[3] = 0.0;

    // target point;
    target.x = 300;
    target.y = 200;

    cFrame = cvCreateImage(cvSize(640, 480),32 , 3);
    frame = cvCreateImage(cvSize(640, 480),IPL_DEPTH_8U , 1); // capture frame
    gray_image = cvCreateImage(cvSize(640, 480),IPL_DEPTH_8U , 1); // capture frame


    blobImage = cvCreateImage(cvSize(640, 480),8 , 1); // blob frame

    setFixedSize(640, 480); // set fixed size for the video window

    refresh_timer = new QTimer(); // camera capture timer

    camera = cvCreateCameraCapture(300); // initilizae camera

    //camera=NULL;

    if(camera!=NULL) {  // if camera is found
        qDebug() <<"\nCamera registered. Address = "<< camera<<endl;

        int value = cvSetCaptureProperty(camera, CV_CAP_PROP_FPS, FRAME_RATES);         // set frame rate for the camera

        // qDebug()<< "Frame rate = "<<cvGetCaptureProperty(camera, CV_CAP_PROP_FPS)<< " "<< value;         // output FPS
        cvSetCaptureProperty(camera, CV_CAP_PROP_FORMAT, CV_8UC1);
        //        qDebug() << cvSetCaptureProperty(camera, CV_CAP_PROP_FOURCC, 1);
        //        qDebug() << cvSetCaptureProperty(camera, CV_CAP_PROP_MODE, CV_);
        qDebug() << frame->nChannels;

        frame = cvQueryFrame(camera);        // obtain frame from camera




        connect(refresh_timer, SIGNAL(timeout(void)), this, SLOT(updateImage(void)));         // connect timer to updateImage function
    } else {
        qDebug()<< "Connect camera"<<endl; // if camera is not found
    }

    refresh_timer->start(1000.0/FRAME_RATES);  // set the refresh timer to fire at FRAME_RATES Hz

    tempJoystick = joystick;    // object to store joystick address

    Scount = 0;     // total number of steps taken

    position_points = cvCreateMat(100000, 2, CV_32FC1);     // array to store end-effector location in mm

    time(&start);     // start the clock
    count =0; // fps counter

    return;
}

VideoWidget::~VideoWidget() {

    cvReleaseCapture(&camera); // release camera

    refresh_timer->stop(); // stop timer

    qDebug()<< "nanoCalib::VideoWidget:: camera released";

    qApp->quit();
    // closing video writer
    //if(videoPtr!=NULL)
    //cvReleaseVideoWriter(&videoPtr);

    return;
}

void VideoWidget::close()
{
    cvReleaseCapture(&camera); // release camera

    refresh_timer->stop(); // stop timer

    qDebug()<<"videoWidgetClose";

    // closing video writer
    if(videoPtr!=NULL)
        //cvReleaseVideoWriter(&videoPtr);

        return;
}

void VideoWidget::initializeGL() {

    glClearColor(1.0, 1.0, 1.0, 1.0);

    glEnable(GL_DEPTH_TEST);
    glEnable(GL_LINE_SMOOTH);

    return;
}

/** \brief handles all the video processing tasks right from capturing an image from the camera to displaying the blob image on the screen.
 *
 *
*/

void VideoWidget::paintGL() {

    // clear buffers
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    // capture frame from camera
    if(camera!=NULL) {
        frame = cvQueryFrame(camera); // get frame
        cvCvtColor(frame, gray_image, CV_BGR2GRAY);



        // check if origin of the image is the bottom left
//                if(gray_image->origin = IPL_ORIGIN_BL) {
//                    cvFlip(gray_image,gray_image,0);
//                } else {}



        if(calibration_done) { // wrap image if calirbation is done
            //cFrame = cvCloneImage(gray_image);
        }

        // cvCircle(gray_image, Point(100, 100), 5, Scalar(255, 255, 255), 5, 8, 0);
//        cvShowImage("Image", gray_image);


        // if blob detecting is on
        http://www.eng.auburn.edu/~troppel/internal/sparc/TourBot/TourBot%20References/cvdocuments/cvBlobsLib.html
        if(blob_detect) {
            cvThreshold(gray_image,blobImage, cvthreshold, cvthresholdMax, CV_THRESH_BINARY_INV); // threshold image

            blobs = CBlobResult(blobImage, NULL, 255);                                   // store blobs in blobImage
            blobs.Filter(blobs, B_EXCLUDE, CBlobGetArea(), B_LESS,100 ); // filter to remove small blobs
//            blobs.Filter(blobs, B_EXCLUDE, CBlobGetArea(), B_GREATER, 10000);


            noOfBlobs = blobs.GetNumBlobs();                                                  // get total number of blobs
            tipBlob = 0;                                                                                        // initializing to zero


            if(noOfBlobs>0) {                                                                               // if blobs are found
                for (int k=0; k<noOfBlobs;k++) {                                                    // finding aspect ratio of blobs
                    currentBlob = blobs.GetBlob(k);
                    double area = currentBlob->Area();
                    double aR = area/currentBlob->Perimeter();
                    if((aR>1) && (aR<50.0)) {                                                         // filtering criteria to find the end-effector blob
                        tipBlob = k;
                    }
                }
            }
            else {
                tipBlob = -1;
            }


            if(tipBlob!=-1 && tipBlob<10.0 ) {                                                      // if end-effector blob is found
                currentBlob = blobs.GetBlob(tipBlob);
                currentBlob->FillBlob( blobImage, CV_RGB(255,255,255));                              // fills the blob with color

                probeTip.x = currentBlob->MaxX();                                              // get max x value of probe tip



                for(int l = 20; l < (blobImage->height-20) ; l++) {                                 // finding tip of end-effector
                    CvScalar tip;
                    if(probeTip.x-1 < blobImage->width) {
                        tip = cvGet2D(blobImage,l,probeTip.x-1);                                  // getting color value of each point on the periphery of the end-effector
                        // http://www.cognotics.com/opencv/servo_2007_series/part_1/page_4.html
                        // finding y co ordinate of tip
                        if(tip.val[0]== 255) {
                            probeTip.y = l;
                        }
                    } else
                        probeTip.y = 0;


                }


                // draw circle on probeTip
                cvCircle(blobImage,probeTip,10,color, 1 , 8 ,0);
                //cvCircle(gray_image,probeTip,10,color, 1 , 8 ,0);

                // save to file --- redundant.
                //                if(running) {
                //                    // saving probe tip point to text file
                //                    fp_actualPosition = fopen("saved/actualPosition.txt","a");
                //                    fprintf(fp_actualPosition,"%d %d \n", probeTip.x,probeTip.y);
                //                    fclose(fp_actualPosition);
                //                }

                // drawing axis
                cvLine(blobImage,x1axis1,x2axis1,color,1,8,0);    // x-axis
                cvLine(blobImage,y1axis1,y2axis1,color,1,8,0);    // y-axis

                cvLine(blobImage,x1axis2,x2axis2,color,1,8,0);    // x-axis
                cvLine(blobImage,y1axis2,y2axis2,color,1,8,0);    // y-axis

                // drawing circles at boundaries
                cvCircle(blobImage, c1, 5, color, 2 , 8 ,0);
                cvCircle(blobImage, c2, 5, color, 2 , 8 ,0);
                cvCircle(blobImage, c3, 5, color, 2 , 8 ,0);
                cvCircle(blobImage, c4, 5, color, 2 , 8 ,0);

                // joystick velocity vector
                CvPoint pt1;
                pt1.x = probeTip.x;
                pt1.y = probeTip.y;

                CvPoint pt2;
                pt2.x = pt1.x + 2 * tempJoystick->velMag() * cos(tempJoystick->velDir());
                pt2.y = pt1.y + 2 * tempJoystick->velMag() * sin(tempJoystick->velDir());

                cvLine(blobImage, pt1, pt2, CV_RGB(50,50,50), 2, 8, 0);                 // drawing joystick input line

                cvCircle(blobImage, target, 5, color, 2 , 8 ,0); // drawing target point

                // drawing history of the end-effector tip
                int x,y;
                for(int i = 2; i<Scount;i++) {
                    x = int(CV_MAT_ELEM( *position_points, float, i-1, 0 ));
                    y = int(CV_MAT_ELEM( *position_points, float, i-1, 1 ));
                    cvCircle(blobImage, cvPoint(x, y), 1,color, 1 , 8 ,0);
                }
                //qDebug()<< "Scount"<<Scount<< x<<" " <<y;
            }

            gray_image =(blobImage);
        }

        // write to video
        //if(videoPtr!=NULL)
        //cvWriteFrame(videoPtr, gray_image);
        glDrawPixels(gray_image->width, gray_image->height, GL_LUMINANCE, GL_UNSIGNED_BYTE, gray_image->imageData);

    }



    ++count;

    /** Display FPS */
    //        if(count%100 == 0) {
    //            time(&end);
    //            sec = difftime(end,start);
    //            fps = count/sec;
    //            if(fps>25.0) qDebug() << __LINE__ << Q_FUNC_INFO << "FPS = "<<fps << " " << cvGetCaptureProperty( camera, CV_CAP_PROP_FPS);
    //        }

    return;
}

void VideoWidget::updateImage() {

    updateGL();

    return;
}

void VideoWidget::rec(int state) {

    if(state) {
        QFileDialog dialog(this);
        dialog.setAcceptMode(QFileDialog::AcceptSave);
        videoFileName = dialog.getSaveFileName(this, tr("Save video as"));

        if(!videoFileName.isEmpty())
            videoPtr = cvCreateVideoWriter(videoFileName.toStdString().c_str(), CV_FOURCC('X','V','I','D'), FRAME_RATES, cvSize(frame->width, frame->height), 0);
    }
}

void VideoWidget::cvThresholdF(int value) {
    cvthreshold = value;
    qDebug()<<"vidoWidget():cvThresold"<<cvthreshold;
}

void VideoWidget::cvThresholdMaxF(int value) {
    cvthresholdMax = value;
    qDebug()<<"vidoWidget():cvThresholdMax"<<cvthresholdMax;
}

void VideoWidget::blobThresholdF(int value) {
    blobthreshold = value;
    qDebug()<<"vidoWidget():blobthreshold"<<blobthreshold;
}

void VideoWidget::initAxis(int xlim1, int xlim2, int ylim1, int ylim2 ) {
    // axis locations. setting values from x and y-axis lines
    x1axis1.x = 0;
    x1axis1.y = ylim1;
    x2axis1.x = 640;
    x2axis1.y = ylim1;

    y1axis1.x = xlim1;
    y1axis1.y = 0;
    y2axis1.x = xlim1;
    y2axis1.y = 480;

    x1axis2.x = 0;
    x1axis2.y = ylim2;
    x2axis2.x = 640;
    x2axis2.y = ylim2;

    y1axis2.x = xlim2;
    y1axis2.y = 0;
    y2axis2.x = xlim2;
    y2axis2.y = 480;
}



