//
//#include <opencv2/video/tracking.hpp>
//#include <opencv2/imgproc/imgproc.hpp>
//#include <opencv2/highgui/highgui.hpp>
//
//#include <iostream>
//#include <ctype.h>
//
//#include <ctime>
//
//using namespace cv;
//using namespace std;
//
//LPSTR loadAudioBlock(const char* filename, DWORD* blockSize);
//
//void help()
//{
//	cout << "\nThis is a demo that shows mean-shift based tracking\n"
//		 <<   "You select a color objects such as your face and it tracks it.\n"
//		 <<   "This reads from video camera (0 by default, or the camera number the user enters\n"
//		 << "Call:\n"
//		 << "\n./camshiftdemo [camera number]"
//		 << "\n" << endl;
//
//	cout << "\n\nHot keys: \n"
//        "\tESC - quit the program\n"
//        "\tc - stop the tracking\n"
//        "\tb - switch to/from backprojection view\n"
//        "\th - show/hide object histogram\n"
//        "To initialize tracking, select the object with mouse\n" << endl;
//}
//
//Mat image;
//
//bool backprojMode = false;
//bool selectObject = false;
//int trackObject = 0;
//bool showHist = true;
//Point origin;
//Rect selection;
//int vmin = 10, vmax = 256, smin = 30;
//
//void onMouse( int event, int x, int y, int, void* )
//{
//    if( selectObject )
//    {
//        selection.x = MIN(x, origin.x);
//        selection.y = MIN(y, origin.y);
//        selection.width = std::abs(x - origin.x);
//        selection.height = std::abs(y - origin.y);
//
//        selection &= Rect(0, 0, image.cols, image.rows);
//    }
//
//    switch( event )
//    {
//    case CV_EVENT_LBUTTONDOWN:
//        origin = Point(x,y);
//        selection = Rect(x,y,0,0);
//        selectObject = true;
//        break;
//    case CV_EVENT_LBUTTONUP:
//        selectObject = false;
//        if( selection.width > 0 && selection.height > 0 )
//            trackObject = -1;
//        break;
//    }
//}
//
//
//
//int main_cam_shift_demo( int argc, char** argv )
//{
//    VideoCapture cap;
//    Rect trackWindow;
//    RotatedRect trackBox;
//    int hsize = 16;
//    float hranges[] = {0,180};
//    const float* phranges = hranges;
//
//    if( argc == 1 || (argc == 2 && strlen(argv[1]) == 1 && isdigit(argv[1][0])))
//        cap.open(argc == 2 ? argv[1][0] - '0' : 0);
//    else if( argc == 2 )
//        cap.open(argv[1]);
//
//    if( !cap.isOpened() )
//    {
//    	help();
//        cout << "***Could not initialize capturing...***\n";
//        return 0;
//    }
//
//    help();
//
//    namedWindow( "Histogram", 1 );
//    namedWindow( "CamShift Demo", 1 );
//    setMouseCallback( "CamShift Demo", onMouse, 0 );
//    createTrackbar( "Vmin", "CamShift Demo", &vmin, 256, 0 );
//    createTrackbar( "Vmax", "CamShift Demo", &vmax, 256, 0 );
//    createTrackbar( "Smin", "CamShift Demo", &smin, 256, 0 );
//
//    Mat hsv, hue, mask, hist, histimg = Mat::zeros(200, 320, CV_8UC3), backproj;
//    
//    for(;;)
//    {
//        Mat frame;
//        cap >> frame;
//        if( frame.empty() )
//            break;
//
//        frame.copyTo(image);
//        cvtColor(image, hsv, CV_BGR2HSV);
//
//        if( trackObject )
//        {
//            int _vmin = vmin, _vmax = vmax;
//
//            inRange(hsv, Scalar(0, smin, MIN(_vmin,_vmax)),
//                    Scalar(180, 256, MAX(_vmin, _vmax)), mask);
//            int ch[] = {0, 0};
//            hue.create(hsv.size(), hsv.depth());
//            mixChannels(&hsv, 1, &hue, 1, ch, 1);
//
//            if( trackObject < 0 )
//            {
//                Mat roi(hue, selection), maskroi(mask, selection);
//                calcHist(&roi, 1, 0, maskroi, hist, 1, &hsize, &phranges);
//                normalize(hist, hist, 0, 255, CV_MINMAX);
//                
//                trackWindow = selection;
//                trackObject = 1;
//
//                histimg = Scalar::all(0);
//                int binW = histimg.cols / hsize;
//                Mat buf(1, hsize, CV_8UC3);
//                for( int i = 0; i < hsize; i++ )
//                    buf.at<Vec3b>(i) = Vec3b(saturate_cast<uchar>(i*180./hsize), 255, 255);
//                cvtColor(buf, buf, CV_HSV2BGR);
//                    
//                for( int i = 0; i < hsize; i++ )
//                {
//                    int val = saturate_cast<int>(hist.at<float>(i)*histimg.rows/255);
//                    rectangle( histimg, Point(i*binW,histimg.rows),
//                               Point((i+1)*binW,histimg.rows - val),
//                               Scalar(buf.at<Vec3b>(i)), -1, 8 );
//                }
//            }
//
//            calcBackProject(&hue, 1, 0, hist, backproj, &phranges);
//            backproj &= mask;
//            trackBox = CamShift(backproj, trackWindow,
//                                TermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ));
//
//            /*cout<<"____" <<meanShift(backproj, trackWindow,
//                                TermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ));
//            trackBox.center = Point2f(trackWindow.x,trackWindow.y);
//            trackBox.size = trackWindow.size();*/
//
//            if( backprojMode )
//                cvtColor( backproj, image, CV_GRAY2BGR );
//            if(trackBox.size.height>=0 && trackBox.size.width>=0)
//              ellipse( image, trackBox, Scalar(0,0,255), 3, CV_AA );
//            else
//              cout<<"????\n";
//            cout<<"o objeto esta na posicao: "<<trackBox.center.x<<" / "<<trackBox.center.y<<endl;
//        }
//
//        if( selectObject && selection.width > 0 && selection.height > 0 )
//        {
//            Mat roi(image, selection);
//            bitwise_not(roi, roi);
//        }
//
//        if(image.size[0]>=0 && image.size[1]>=0)
//          imshow( "CamShift Demo", image );
//        else
//          cout<<"!!!!!\n";
//        imshow( "Histogram", histimg );
//
//        char c = (char)waitKey(10);
//        if( c == 27 )
//            break;
//        switch(c)
//        {
//        case 'b':
//            backprojMode = !backprojMode;
//            break;
//        case 'c':
//            trackObject = 0;
//            histimg = Scalar::all(0);
//            break;
//        case 'h':
//            showHist = !showHist;
//            if( !showHist )
//                destroyWindow( "Histogram" );
//            else
//                namedWindow( "Histogram", 1 );
//            break;
//        default:
//            ;
//        }
//    }
//
//    return 0;
//}
//
//
//
//int teste1()
//{  
//  char c;
//
//  // Open the file.
//  IplImage *img = cvLoadImage("teste4.jpg");
//  if (!img) {
//          printf("Error: Couldn't open the image file.\n");
//          cin>>c;
//          return 1;
//  }
//
//  // Display the image.
//  cvNamedWindow("Image:", CV_WINDOW_AUTOSIZE);
//  cvShowImage("Image:", img);
//
//  IplImage *templ = cvLoadImage("bola.jpg");
//  if (!img) {
//          printf("Error: Couldn't open the image file.\n");
//          cin>>c;
//          return 1;
//  }
//
//  Mat result;
//
//  clock_t t1 = clock();
//    matchTemplate(img, templ, result, CV_TM_SQDIFF);
//    Point pos_max;
//    Point pos_min;
//    minMaxLoc(result,0,0,&pos_min,&pos_max);
//  clock_t t2 = clock();
//
//  cout<< "tempo da operacao:  "<< (double(t2-t1))/CLOCKS_PER_SEC <<endl;
//
//  // Display the result.
//  cvNamedWindow("result:", CV_WINDOW_AUTOSIZE);
//  cvShowImage("result:", &result.operator IplImage());
//
//  cout<<"min:  "<<pos_min<<endl;
//  cout<<"max:  "<<pos_max<<endl;
//  cout<<"size:  "<<result.size[0]<<" / "<<result.size[1]<<endl;
//
//
//  VideoCapture cap(0); // open the default camera
//  if(!cap.isOpened())  // check if we succeeded
//  {          
//    printf("Error: Couldn't open the camera.\n");
//    cin>>c;
//    return -1;
//  }
//
//
//  Mat frame;
//  cap >> frame; // get a new frame from camera
//  cout<<frame.size[0]<<"__"<<frame.size[1]<<endl;
//  cvNamedWindow("camera:", CV_WINDOW_AUTOSIZE);
//  cvShowImage("camera:", &frame.operator IplImage());
//  Sleep(500);
//
//
//
//  // Wait for the user to press a key in the GUI window.
//  cvWaitKey(0);
//
//  // Free the resources.
//  cvDestroyWindow("Image:");
//  cvReleaseImage(&img);
//        
//  return 0;
//}
//
//int teste2()
//{
//   CvCapture* capture = cvCaptureFromCAM( CV_CAP_ANY );
//   if ( !capture ) {
//     fprintf( stderr, "ERROR: capture is NULL \n" );
//     getchar();
//     return -1;
//   }
//   // Create a window in which the captured images will be presented
//   cvNamedWindow( "mywindow", CV_WINDOW_AUTOSIZE );
//   // Show the image captured from the camera in the window and repeat
//   while ( 1 ) {
//     // Get one frame
//     IplImage* frame = cvQueryFrame( capture );
//     if ( !frame ) {
//       fprintf( stderr, "ERROR: frame is null...\n" );
//       getchar();
//       break;
//     }
//     cvShowImage( "mywindow", frame );
//     // Do not release the frame!
//     //If ESC key pressed, Key=0x10001B under OpenCV 0.9.7(linux version),
//     //remove higher bits using AND operator
//     if ( (cvWaitKey(10) & 255) == 27 ) break;
//   }
//   // Release the capture device housekeeping
//   cvReleaseCapture( &capture );
//   cvDestroyWindow( "mywindow" );
//
//   return 0;
//}
//
//void writeAudioBlock(HWAVEOUT hWaveOut, LPSTR block, DWORD size)
//{
//  WAVEHDR header;
//  /*
//   * initialise the block header with the size
//   * and pointer.
//   */
//  ZeroMemory(&header, sizeof(WAVEHDR));
//  header.dwBufferLength = size;
//  header.lpData = block;
//  /*
//   * prepare the block for playback
//   */
//  waveOutPrepareHeader(hWaveOut, &header, sizeof(WAVEHDR));
//  /*
//   * write the block to the device. waveOutWrite returns immediately
//   * unless a synchronous driver is used (not often).
//   */
//  waveOutWrite(hWaveOut, &header, sizeof(WAVEHDR));
//  /*
//   * wait a while for the block to play then start trying
//   * to unprepare the header. this will fail until the block has
//   * played.
//   */
//  Sleep(500);
//  while(waveOutUnprepareHeader(
//      hWaveOut, 
//      &header, 
//      sizeof(WAVEHDR) ) == WAVERR_STILLPLAYING)
//    Sleep(100);
//}
//
//
//LPSTR loadAudioBlock(const char* filename, DWORD* blockSize)
//{
//  HANDLE hFile= INVALID_HANDLE_VALUE;
//  DWORD size = 0;
//  DWORD readBytes = 0;
//  void* block = NULL;
//  /*
//   * open the file
//   */
//  if((hFile = CreateFile(
//    filename,
//    GENERIC_READ,
//    FILE_SHARE_READ,
//    NULL,
//    OPEN_EXISTING,
//    0,
//    NULL
//  )) == INVALID_HANDLE_VALUE)
//  return NULL;
//  /*
//   * get it's size, allocate memory and read the file
//   * into memory. don't use this on large files!
//   */
//  do {
//  if((size = GetFileSize(hFile, NULL)) == 0) 
//  break;
//  if((block = HeapAlloc(GetProcessHeap(), 0, size)) == NULL)
//  break;
//  ReadFile(hFile, block, size, &readBytes, NULL);
//  } while(0);
//  CloseHandle(hFile);
//  *blockSize = size;
//  return (LPSTR)block;
//}
//
//
//int teste3()
//{
//  HWAVEOUT hWaveOut; /* device handle */
//  WAVEFORMATEX wfx; /* look this up in your documentation */
//  MMRESULT result;/* for waveOut return values */
//  /*
//   * first we need to set up the WAVEFORMATEX structure. 
//   * the structure describes the format of the audio.
//   */
//  wfx.nSamplesPerSec = 44100; /* sample rate */
//  wfx.wBitsPerSample = 16; /* sample size */
//  wfx.nChannels = 2; /* channels*/
//  /*
//   * WAVEFORMATEX also has other fields which need filling.
//   * as long as the three fields above are filled this should
//   * work for any PCM (pulse code modulation) format.
//   */
//  wfx.cbSize = 0; /* size of _extra_ info */
//  wfx.wFormatTag = WAVE_FORMAT_PCM;
//  wfx.nBlockAlign = (wfx.wBitsPerSample >> 3) * wfx.nChannels;
//  wfx.nAvgBytesPerSec = wfx.nBlockAlign * wfx.nSamplesPerSec;
//  /*
//   * try to open the default wave device. WAVE_MAPPER is
//   * a constant defined in mmsystem.h, it always points to the
//   * default wave device on the system (some people have 2 or
//   * more sound cards).
//   */
//  if(waveOutOpen(
//    &hWaveOut, 
//    WAVE_MAPPER, 
//    &wfx, 
//    0, 
//    0, 
//  CALLBACK_NULL
//  ) != MMSYSERR_NOERROR) {
//    fprintf(stderr, "unable to open WAVE_MAPPER device\n");
//    return 1;
//  }
//  /*
//   * device is now open so print the success message
//   * and then close the device again.
//   */
//  printf("The Wave Mapper device was opened successfully!\n");
//
//  const size_t samples_size = 1000000;
//  char samples[samples_size];
//
//  //double amplitude = 1;
//  //double scale = .2;
//
//  //for(int i=0; i<samples_size; ++i)
//  //  samples[i]= 32767 * amplitude * sin(i*scale) ;
//
//  //writeAudioBlock(hWaveOut,samples,samples_size);
//
//  LPSTR block;/* pointer to the block */
//  DWORD blockSize;/* holds the size of the block */
//  if((block = loadAudioBlock("teste.raw", &blockSize)) == NULL)
//    fprintf(stderr, "Unable to load file\n");
//
//  writeAudioBlock(hWaveOut, block, blockSize); 
//
//  waveOutClose(hWaveOut);
//  return 0;
//}
//
//int main2( int argc, char** argv )
//{
//  try
//  {
//    //if(PlaySound(NULL, NULL, SND_ALIAS) == TRUE)
//    //  cout<<"bum!\n";
//    //return main_cam_shift_demo(argc,argv);
//    return teste3();
//  }
//  catch(exception& e)
//  {
//    cout<<"error: "<<e.what()<<endl;
//    char answer;
//    cin>>answer;
//    return 666;
//  }
//}