/**
 * Uses the CPP API of OpenCV
 * Problems related to timeouts do not occur
 */
#if 0
#include <iostream>
#include <iomanip>

#include "opencv2/calib3d/calib3d.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/features2d/features2d.hpp"
#include <iostream>
#include <list>
#include <vector>

using namespace std;
using namespace cv;

#if (defined(_WIN32) || defined(__WIN32__) || defined(__TOS_WIN__) || defined(__WINDOWS__) || (defined(__APPLE__) & defined(__MACH__)))
#include <cv.h>
#include <highgui.h>
#else
#include <opencv/cv.h>
#include <opencv/highgui.h>
#endif

#include <cvblob.h>
using namespace cvb;


int main()
{
  cvNamedWindow("colourImage", CV_WINDOW_AUTOSIZE);
  cvNamedWindow("greyImage", CV_WINDOW_AUTOSIZE);
  //cvNamedWindow("movingAverage", CV_WINDOW_AUTOSIZE);
  cvNamedWindow("difference", CV_WINDOW_AUTOSIZE);
  //cvNamedWindow("motionHistory", CV_WINDOW_AUTOSIZE);

/*
  CvCapture *capture = cvCaptureFromCAM(0);
  cvGrabFrame(capture);
  IplImage *img = cvRetrieveFrame(capture);
  CvSize imgSize = cvGetSize(img);
  IplImage *frame = cvCreateImage(imgSize, img->depth, img->nChannels);
*/

   // CvCapture is from old interface - don't use anymore!
   //CvCapture *input = cvCaptureFromCAM(0);

   VideoCapture input;
   input.open(0);
   if (!input.isOpened())
   {
	   //help(av);
	   cout << "capture device failed to open!" << endl;
	   return 1;
   }
   input.set(CV_CAP_PROP_FRAME_WIDTH, 640);
   input.set(CV_CAP_PROP_FRAME_HEIGHT, 480);

   Mat frame;

/*
   cvGrabFrame(input);
   IplImage *img = cvRetrieveFrame(input);
   CvSize imgSize = cvGetSize(img);

   IplImage* greyImage = cvCreateImage( imgSize, IPL_DEPTH_8U, 1);
   IplImage* colourImage = 0;
   IplImage* movingAverage = cvCreateImage( imgSize, IPL_DEPTH_32F, 3);
   IplImage* difference = 0;
   IplImage* temp = 0;
   IplImage* motionHistory = cvCreateImage( imgSize, IPL_DEPTH_8U, 3);

   CvRect bndRect = cvRect(0,0,0,0);

   CvPoint pt1, pt2;

   CvFont font;

   int prevX = 0;
   int numPeople = 0;

   char wow[65];

   int avgX = 0;

   bool first = true;

   int closestToLeft = 0;
   int closestToRight = 320;
*/

  bool quit = false;
  //while (!quit&&cvGrabFrame(input))
  for(;;)
  {
	  input >> frame;
	  if (frame.empty())
		  break;

	  //cvShowImage("colourImage", frame);
	  imshow("colourImage", frame);

    //IplImage *img = cvRetrieveFrame(input);
	 //colourImage = cvRetrieveFrame(input);

    //cvConvertScale(img, frame, 1, 0);

    //cvShowImage("red_object_tracking", colourImage);
#if 0
     if( !colourImage )
     {
         break;
     }

     if(first)
     {
         difference = cvCloneImage(colourImage);
         temp = cvCloneImage(colourImage);
         cvConvertScale(colourImage, movingAverage, 1.0, 0.0);
         first = false;
     }
     else
     {
         cvRunningAvg(colourImage, movingAverage, 0.020, NULL);
     }


     cvConvertScale(movingAverage,temp, 1.0, 0.0);

     cvAbsDiff(colourImage,temp,difference);

     cvCvtColor(difference,greyImage,CV_RGB2GRAY);

     cvThreshold(greyImage, greyImage, 70, 255, CV_THRESH_BINARY);

     cvDilate(greyImage, greyImage, 0, 18);
     cvErode(greyImage, greyImage, 0, 10);

     CvMemStorage* storage = cvCreateMemStorage(0);
     CvSeq* contour = 0;

     cvFindContours( greyImage, storage, &contour, sizeof(CvContour), CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );

     // Iterate through contour
     for( ; contour != 0; contour = contour->h_next )
     {
         bndRect = cvBoundingRect(contour, 0);
         pt1.x = bndRect.x;
         pt1.y = bndRect.y;
         pt2.x = bndRect.x + bndRect.width;
         pt2.y = bndRect.y + bndRect.height;

         avgX = (pt1.x + pt2.x) / 2;
/*
     if(avgX > 90 && avgX < 250)
     {
         if(closestToLeft >= 88 && closestToLeft <= 90)
         {
             if(avgX > prevX)
             {
                 numPeople++;
                 closestToLeft = 0;
             }
         }
         else if(closestToRight >= 250 && closestToRight <= 252)
         {
             if(avgX < prevX)
             {
                 numPeople++;
                 closestToRight = 320;
             }
         }
*/
         cvRectangle(colourImage, pt1, pt2, CV_RGB(255,0,0), 1);
      //}
/*
     if(avgX > closestToLeft && avgX <= 90)
     {
         closestToLeft = avgX;
     }

     if(avgX < closestToRight && avgX >= 250)
     {
         closestToRight = avgX;
     }
*/
     prevX = avgX;
     prevX = avgX;

     }

     cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX, 0.8, 0.8, 0, 2);

     sprintf(wow,"%d",numPeople);
     cvPutText(colourImage, wow, cvPoint(60, 200), &font, cvScalar(0, 0, 300));

     cvShowImage("colourImage", colourImage);
     cvShowImage( "greyImage", greyImage );
     //cvShowImage("movingAverage", movingAverage);
     cvShowImage("difference", difference);
     //cvShowImage("motionHistory", motionHistory);
     //cvShowImage( "ForegroundCodeBook", ImaskCodeBook);
#endif
     cvWaitKey(30);
     //cvWriteFrame(outputMovie, colourImage);


    char k = cvWaitKey(10)&0xff;
    switch (k)
    {
      case 27:
      case 'q':
      case 'Q':
        quit = true;
        break;
      case 's':
      case 'S':
    	/*
        for (CvBlobs::const_iterator it=blobs.begin(); it!=blobs.end(); ++it)
        {
          std::stringstream filename;
          filename << "redobject_blob_" << std::setw(5) << std::setfill('0') << blobNumber << ".png";
          cvSaveImageBlob(filename.str().c_str(), img, it->second);
          blobNumber++;
          std::cout << filename.str() << " saved!" << std::endl;
        }
        */
        break;
    }


  }

  //cvReleaseImage(&frame);
  /*
  cvReleaseImage(&temp);
  cvReleaseImage(&difference);
  cvReleaseImage(&greyImage);
  cvReleaseImage(&movingAverage);
  cvDestroyWindow("colourImage");
  cvReleaseCapture(&input);
  */
  //frameNumber++;
  cvDestroyWindow("red_object_tracking");

  return 0;
}

#endif
