#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <iostream>
#include "mytest.h"
#include <string>
#include <vector>
#include <fstream>

#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/contrib/contrib.hpp>
#include <opencv2/gpu/gpu.hpp>
#include <opencv2/nonfree/gpu.hpp>

#include <stdlib.h>
/*int main( int argc, char** argv )
{ 
     cv::Mat image1, image2;  
     if(argc>1)
	image1 = cv::imread(argv[1]);
     if(argc>2)
	image2 = cv::imread(argv[2]);
     if((argc>1 && !image1.data) || (argc>2 && !image2.data))
     {
	std::cout << "Unable to load files!\n";
	return 1;
     }
	
     // my_templateMatchShow(image1, image2, 3); 
      //my_videotemplateMatchSIFT(image2, 3);
      
      cv::Mat img1 = cv::imread("im1.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat img2 = cv::imread("im2.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat img3 = cv::imread("im3.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat img4 = cv::imread("im4.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat img5 = cv::imread("im5.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat img6 = cv::imread("im6.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat img7 = cv::imread("im7.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat img8 = cv::imread("im8.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat img9 = cv::imread("im9.png", CV_LOAD_IMAGE_GRAYSCALE);
	
      
      cv::cvtColor(image1, image1, CV_BGR2GRAY);
      cv::cvtColor(image2, image2, CV_BGR2GRAY);
      
      //my_SIFTTest(image1, image2, 3, false);
       //my_SURFTest(image1, image2);
      
      
//       my_SURFTest(img1, image2);
//       my_SURFTest(img2, image2);
//       my_SURFTest(img3, image2);
//       my_SURFTest(img4, image2);
//       my_SURFTest(img5, image2);
//       my_SURFTest(img6, image2);
//       my_SURFTest(img7, image2);
//       my_SURFTest(img8, image2);
//       my_SURFTest(img9, image2);
      
      cv::Mat imH1 = cv::imread("imH1.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat imH2 = cv::imread("imH2.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat imH3 = cv::imread("imH3.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat imH4 = cv::imread("imH4.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat imH5 = cv::imread("imH5.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat imH6 = cv::imread("imH6.png", CV_LOAD_IMAGE_GRAYSCALE);
      cv::Mat imH7 = cv::imread("imH7.png", CV_LOAD_IMAGE_GRAYSCALE);
      
      my_SURFTest(imH1, image2);
      my_SURFTest(imH2, image2);
      my_SURFTest(imH3, image2);
      my_SURFTest(imH4, image2);
      my_SURFTest(imH5, image2);
      my_SURFTest(imH6, image2);
      my_SURFTest(imH7, image2);
      
//        cv::Mat imX1 = cv::imread("imX1.png", CV_LOAD_IMAGE_GRAYSCALE);
//       cv::Mat imX2 = cv::imread("imX2.png", CV_LOAD_IMAGE_GRAYSCALE);
//       cv::Mat imX3 = cv::imread("imX3.png", CV_LOAD_IMAGE_GRAYSCALE);
//       cv::Mat imX4 = cv::imread("imX4.png", CV_LOAD_IMAGE_GRAYSCALE);
//       cv::Mat imX5 = cv::imread("imX5.png", CV_LOAD_IMAGE_GRAYSCALE);
//       cv::Mat imX6 = cv::imread("imX6.png", CV_LOAD_IMAGE_GRAYSCALE);
//       cv::Mat imX7 = cv::imread("imX7.png", CV_LOAD_IMAGE_GRAYSCALE);
//       
//       my_SURFTest(imX1, image2);
//       my_SURFTest(imX2, image2);
//       my_SURFTest(imX3, image2);
//       my_SURFTest(imX4, image2); //       my_SURFTest(imX5, image2);
//       my_SURFTest(imX6, image2);
//       my_SURFTest(imX7, image2);
      
      
      
      my_SURFTest(img1, image2);
      my_SURFTest(img2, image2);
      my_SURFTest(img3, image2);
      my_SURFTest(img4, image2);
      my_SURFTest(img5, image2);
      my_SURFTest(img6, image2);
      my_SURFTest(img7, image2);
      my_SURFTest(img8, image2);
      my_SURFTest(img9, image2);
      
      
//       my_SIFTTest(img1, image2, 3, false);
//       my_SIFTTest(img2, image2, 3, false);
//       my_SIFTTest(img3, image2, 3, false);
//       my_SIFTTest(img4, image2, 3, false);
//       my_SIFTTest(img5, image2, 3, false);
//       my_SIFTTest(img6, image2, 3, false);
//       my_SIFTTest(img7, image2, 3, false);
//       my_SIFTTest(img8, image2, 3, false);
//       my_SIFTTest(img9, image2, 3, false);
     
	
      return 0;
} */


     /*cv::Mat image1, image2;  
     cv::Mat edges, dst;
     int threshold=30, threshold2=60, maxThreshold=100;
     int HoughPoints=100;*/
     
/*void CannyThreshold(int thresh, void* image)
{
  cv::Mat dst;
  cv::blur(cv::Mat(image), dst, cv::Size(3,3));
  cv::Canny(cv::Mat(image), dst, thresh, thresh*3);
  
  cv::imshow( WINDOW, dst );
}*/
     
     /*
void HoughTesting(int, void*)
{
  cv::Mat dst;
  dst = cv::Scalar::all(0);
  image1.copyTo(dst);
     if(0)
     {
	std::vector<cv::Vec4i> lines;
	cv::HoughLinesP(edges, lines, 1, CV_PI/180, 50, 50, 10 );
	for( size_t i = 0; i < lines.size(); i++ )
	{
	  cv::Vec4i l = lines[i];
	  cv::line( image1, cv::Point(l[0], l[1]), cv::Point(l[2], l[3]), cv::Scalar(0,0,255), 3, CV_AA);
	}
     }else{
     std::vector<cv::Vec2f> lines;
  cv::HoughLines(edges, lines, 1, CV_PI/120, HoughPoints, 0, 0 );

  for( size_t i = 0; i < lines.size(); i++ )
  {
     float rho = lines[i][0], theta = lines[i][1];
     cv::Point pt1, pt2;
     double a = cos(theta), b = sin(theta);
     double x0 = a*rho, y0 = b*rho;
     pt1.x = cvRound(x0 + 1000*(-b));
     pt1.y = cvRound(y0 + 1000*(a));
     pt2.x = cvRound(x0 - 1000*(-b));
     pt2.y = cvRound(y0 - 1000*(a));
     cv::line( dst, pt1, pt2, cv::Scalar(0,0,0), 1, CV_AA);
  }  
     }
   cv::imshow( "window", dst );
}*/
     
/*int main( int argc, char** argv )
{ 
     if(argc>1)
	image1 = cv::imread(argv[1]);
     if(argc>2)
	image2 = cv::imread(argv[2]);
     if((argc>1 && !image1.data) || (argc>2 && !image2.data))
     {
	std::cout << "Unable to load files!\n";
	return 1; }
     
     int mode=3;
      cv::namedWindow("window", CV_WINDOW_NORMAL);
     
//      cv::cvtColor(image1, image1, CV_RGB2GRAY);
//      
//      cv::namedWindow("window", CV_WINDOW_NORMAL);
//      
//      // cv::createTrackbar( "Min Threshold:", "window", &threshold, maxThreshold, CannyThreshold );
//       cv::createTrackbar( "Hough Points:", "window", &HoughPoints, 300, HoughTesting);
//       
//      cv::blur(image1, edges, cv::Size(3, 3));
//      
//      cv::Canny(edges, edges, threshold, threshold*2);
//      cv::imshow("window", edges);
//      cv::waitKey(0);
     
     //HoughTesting(0, 0);
     
    // cv::Ptr<cv::gpu::GeneralizedHough_GPU> hg = cv::gpu::
     //gh.create(cv::GHT_POSITION);
     
     //cv::imshow("window", edges);
//      cv::Mat dst;
//      dst = cv::Scalar::all(0);

//     image1.copyTo( dst, edges);
      
     
     
     //GHT 
     if(mode == 2)
     {
     cv::Mat origin = cv::imread("temp.jpeg");
     cv::Mat temp = cv::imread("countour2.bmp");
     cv::Mat image = cv::imread("imX6.png");
     
    
      //cv::cvtColor(temp, temp, CV_RGB2GRAY);
//      cv::blur(temp, temp, cv::Size(3, 3));
//      cv::Canny(temp, temp, 20, 40);
     
     
     //cv::imshow("window", temp);
     //cv::imwrite("mytemp.jpeg", temp);
     
//      cv::Mat tt = cv::imread("contour.bmp");
//      cv::cvtColor(tt, tt, CV_RGB2GRAY);
//      tt.at<uint>(50, 19) = 127;
//      cv::imwrite("countour2.bmp", tt);
      
     
     
	GHTDetect(origin, temp, image);
	
	
    }else if(mode ==3)
    {
	cv::Mat edges;
	cv::blur(image1, image1, cv::Size(3, 3));
	//cv::Canny(image1, edges, 8, 10); //20 40 is good
	cv::cvtColor(image1, image1, CV_RGB2GRAY);
	cv::adaptiveThreshold(image1, edges, 255, CV_ADAPTIVE_THRESH_GAUSSIAN_C, CV_THRESH_BINARY, 71, 2);
	
	//template contours44444
	cv::cvtColor(image2, image2, CV_RGB2GRAY);
	cv::blur(image2, image2, cv::Size(3, 3));
	cv::Canny(image2, image2, 40, 120);
	//cv::imshow("window", imagenn2);
	std::vector<std::vector<cv::Point> > temp_contours;
	cv::findContours(image2, temp_contours, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE);
	
	std::vector<std::vector<cv::Point> > contours;
	std::vector<cv::Vec4i> hierarchy;
	cv::findContours(edges, contours, hierarchy, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE, cv::Point(0, 0));
	
	cv::Mat output = cv::Mat::zeros( image1.size(), CV_8UC3 );

	for(int i=0; i<contours.size(); ++i)
	{
	    cv::approxPolyDP(cv::Mat(contours[i]), contours[i], 5, true);
	}
	
	for(int i=0; i<contours.size(); ++i)
	{
	  cv::Scalar color = cv::Scalar( rand()%255, rand()%255, rand()%255);
	  //cv::drawContours(output, contours, i, color);
	  
	  if(contours[i].size() >= 10)
	  {
	    std::cout << "sup: ";
// 	    for(int j=0; j<contours[i].size(); ++j)
// 	    {
// 	      std::cout << contours[i][j].x << "    " << contours[i][j].y << "   "; 
// 	    }
	   //for(int j=0; j<temp_contours.size(); ++j)
	   //{
	    
	   
	     double similarity = cv::matchShapes(contours[i], temp_contours[0], CV_CONTOURS_MATCH_I3, 1);
	    std::cout << similarity << std::endl;
	    
	    if(similarity<0.4) 
	      cv::drawContours(output, contours, i, color, 2, 8, hierarchy, 0, cv::Point() );
	   //}
	    std::cout << std::endl;
	  }
	}
      
      cv::imshow("window", output);
      
      
    }
      
     cv::waitKey(0);
}	*/
int main_video(cv::Mat image1, cv::Mat image2, bool bUseArgs, int mode, int use_temp);
     
int main( int argc, char** argv )
{
      //load arguments if valid
     cv::Mat image1, image2;
     if(argc>1)
	image1 = cv::imread(argv[1], CV_LOAD_IMAGE_GRAYSCALE);
     if(argc>2)
	image2 = cv::imread(argv[2], CV_LOAD_IMAGE_GRAYSCALE);
     if((argc>1 && !image1.data) || (argc>2 && !image2.data))
     {
	std::cout << "Unable to load files!\n";
	return 1; 
    }
     //Parameters//////////////////////////////////////////////////////////////////
     bool bUseArgs=false; //use images in arguments or not
     int mode=5; //0-template matching, 1-SIFT/SURF, 2-GHT, 3-FindContours
     int use_temp = 0; //0-cross, 1-H
     //Video///////////////////////////////////////////////////////////////////////// 
     bool bUseVideo=true;
     if(bUseVideo)
     {
	main_video(image1, image2, bUseArgs, mode, use_temp);
	return 0;
     }
     //Preparations//////////////////////////////////////////////////////////////////
     //create window
      std::string windowName = "AR Drone Detecting Tool";
      cv::namedWindow(windowName, CV_WINDOW_NORMAL);
      //choose template
      cv::Mat temp;
      if(bUseArgs)
	  temp = image2;
      else if(use_temp==0)
	  temp = cv::imread("temp5.png", CV_LOAD_IMAGE_GRAYSCALE);
      else if(use_temp==1)
	  temp = cv::imread("tempH1.png", CV_LOAD_IMAGE_GRAYSCALE);
      
      //load test images
      std::vector<cv::Mat> images;
      if(bUseArgs)
	images.push_back(image1);
      else if(use_temp==0)
      { //load cross test images
	 images.push_back(cv::imread("imX1.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("imX2.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("imX3.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("imX4.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("imX6.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("imX7.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im1.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im2.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im3.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im4.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im5.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im6.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im7.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im8.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im9.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	
      }else if(use_temp==1)
      { //load 'H' test images
	 images.push_back(cv::imread("im1.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im2.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im3.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im4.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im5.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im6.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im7.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im8.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("im9.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("imH1.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("imH2.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("imH3.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("imH4.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("imH5.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("imH6.png", CV_LOAD_IMAGE_GRAYSCALE )); 
	 images.push_back(cv::imread("imH7.png", CV_LOAD_IMAGE_GRAYSCALE )); 
      }
      
      for(std::vector<cv::Mat>::iterator i=images.begin(); i!=images.end(); ++i)
      {
	  cv::Mat image = *i;
	  if(mode==0) //template matching
	  {
	    cv::destroyWindow(windowName);
	    my_templateMatchShow(image, temp, 3); 
	    //multiscale 
// 	    cv::Mat result;
// 	    my_templateMatchMultiScale(image, temp, result, 3, 0.92 );
// 	    
// 	    double minVal, maxVal; 
// 	    cv::Point minLoc, maxLoc, matchPoint;
// 	    cv::minMaxLoc(result, &minVal, &maxVal, &minLoc, &maxLoc, cv::Mat());
// 	    matchPoint = minLoc;
// 	    cv::rectangle(image, matchPoint, cv::Point(matchPoint.x+temp.rows, matchPoint.y+temp.cols), cv::Scalar::all(100), 2, 8, 0);
// 	    cv::imshow(windowName, image);
	    
	  }else if(mode==1) //SIFT/SURF
	  {
	    cv::destroyWindow(windowName);
	    //my_SIFTTest(image, temp, 3, false); 
	    //SURF
	    my_SURFTest(image, temp);
	    
	  }else if(mode==2) //GHT (only for cross right now)
	  { 
	    cv::destroyWindow(windowName);
	    cv::Mat origin = cv::imread("temp.jpeg");
	    if(use_temp==0) temp = cv::imread("contour2.bmp");
	    
	    GHTDetect(origin, temp, image);
	    
	  }else if(mode==3) //contours
	  {//use with template temp5.png
	      cv::Mat edges;
	      cv::blur(image, image, cv::Size(3, 3));
	      //cv::Canny(image, edges, 8, 10); //20 40 is good
	      cv::adaptiveThreshold(image, edges, 255, CV_ADAPTIVE_THRESH_GAUSSIAN_C, CV_THRESH_BINARY, 91, 2);
	  
	      //template contours
	      //best template is temp5.png
	      cv::blur(temp, temp, cv::Size(3, 3));
	      cv::Canny(temp, temp, 40, 120);
	      //cv::imshow(windowName, temp);
	      std::vector<std::vector<cv::Point> > temp_contours;
	      cv::findContours(temp, temp_contours, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE);
	  
	      std::vector<std::vector<cv::Point> > contours;
	      std::vector<cv::Vec4i> hierarchy;
	      cv::findContours(edges, contours, hierarchy, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE, cv::Point(0, 0));
	  
	      cv::Mat output = cv::Mat::zeros( image.size(), CV_8UC3 );
	      //approximate contours
	      for(int i=0; i<contours.size(); ++i)
	      {
		  cv::approxPolyDP(cv::Mat(contours[i]), contours[i], 1, true);
	      }
	  
	      for(int i=0; i<contours.size(); ++i)
	      {
		//cv::Scalar color = cv::Scalar( rand()%255, rand()%255, rand()%255);
		cv::Scalar color = cv::Scalar(255, 255, 255);
		//cv::drawContours(output, contours, i, color);
	    
		if(contours[i].size() >= 10)
		{
		  std::cout << "sup: ";
		//for(int j=0; j<temp_contours.size(); ++j)
		//{
		  double similarity = cv::matchShapes(contours[i], temp_contours[0], CV_CONTOURS_MATCH_I1, 1);
		  std::cout << similarity << std::endl;
		  
		  //if(similarity<10) 
		    cv::drawContours(image, contours, i, color, 2, 8, hierarchy, 0, cv::Point() );
		//}
		}
	      }
	    cv::imshow(windowName, image);
	  }else if(mode==4)//track bars
	  {
	    int thresh=20;
	    cv::createTrackbar("Canny Edge Threshold", windowName, &thresh, 100);
	    
	    cv::blur(image, image, cv::Size(3, 3));
	    cv::Mat dst;
	    while(true)
	    {
		 cv::Canny(image, dst, thresh, thresh*3);
		 //cv::morphologyEx(dst, dst, CV_MOP_CLOSE, cv::getStructuringElement(CV_SHAPE_RECT, cv::Size(3, 3)));
	         //if(thresh%2==0) thresh+=1;
		 //cv::adaptiveThreshold(image, dst, 255, CV_ADAPTIVE_THRESH_GAUSSIAN_C, CV_THRESH_BINARY, thresh, 2);
		 cv::imshow(windowName, dst);
		 
		 if(cv::waitKey(50)==27)
		   break;
	      
	    }
	  }else if(mode==5)
	  {
	    cv::CascadeClassifier classifier;
	    if(use_temp==0)
	      classifier.load("cascade3.xml");
	    else if(use_temp==1)
	      classifier.load("cascadeH.xml");
	    std::vector<cv::Rect> obj;
	    cv::Size temp_size;
	    if(use_temp==0)
	      temp_size=cv::Size(40, 40); //training was made in 26 23
	    else if(use_temp==1)
	      temp_size=cv::Size(68, 44);
	   classifier.detectMultiScale(image, obj, 1.1, 5, 0 | CV_HAAR_SCALE_IMAGE, temp_size); //40 40 for cross
	    for(size_t i=0; i < obj.size(); i++)
	    {
	      cv::Point center( obj[i].x + obj[i].width*0.5, obj[i].y + obj[i].height*0.5 );
	      cv::ellipse( image, center, cv::Size( obj[i].width*0.5, obj[i].height*0.5), 0, 0, 360, cv::Scalar( 255, 0, 255 ), 4, 8, 0 );
	    }
	    cv::imshow(windowName, image);
	    
	  }else if(mode==-1)//for testing purposes
	  {
// 	    cv::Mat t = cv::imread("temp5.png", CV_LOAD_IMAGE_GRAYSCALE); 
// 	    cv::threshold(t, t, 127, 255, CV_THRESH_OTSU);
// 	    cv::imwrite("bin_temp.png", t);
	    
// 	    cv::Mat Neg = cv::imread("tempH1.png", CV_LOAD_IMAGE_GRAYSCALE);
// 	    cv::resize(Neg, Neg, cv::Size(26, 23));
// 	    cv::imwrite("neg500.png", Neg);
	    
	    std::ofstream file;
	    file.open("neg.txt");
	    std::string text;
	    for(int i=0; i<=500; ++i)
	    {
	       text = "negs/neg";
		 std::ostringstream ss;
		  ss << i;
	       text.append(ss.str());
	       text.append(".png");
	       file << text << std::endl;
	      
	    }
	    file.close();
	   /* 
	    int x, y; x=0; y=0;
	    cv::RNG rng;
	    std::string filename;
	    for(int i=400; i<500; ++i)
	    {
		x=rng.uniform(1, Neg.cols-140);
		y=rng.uniform(1, Neg.rows-140);
		cv::Mat patch=Neg(cv::Rect(x, y, 114, 102));
		//patch.resize(cv::Size(26, 23));
		cv::resize(patch, patch, cv::Size(26, 23));
		
		 std::ostringstream ss;
		  ss << i;
		filename = "neg";
		filename.append(ss.str());
		filename.append(".png");
// 		filename = strcat("neg", std::to_string(i)); filename = strcat(filename, ".png");
		
		cv::imwrite(filename, patch);
	    }
	   */ 
	  }
	  
	  
	  cv::waitKey(0);
      }
  
  
  
      cv::waitKey(0);
}


int main_video(cv::Mat image1, cv::Mat image2, bool bUseArgs, int mode, int use_temp)
{
   //Windows and parameters///////////////////////////////////////////
   std::string windowName="AR Drone Video Detecting";
   cv::namedWindow(windowName, CV_WINDOW_NORMAL);  
   bool bResize=true;
   int skipframes=3, recommended_width=640, recommended_heigth=360;
   //Template///////////////////////////////////////////////////////// 
   cv::Mat temp;
   if(bUseArgs)
     temp = image2;
   else if(use_temp==0)
     temp = cv::imread("temp5.png", CV_LOAD_IMAGE_GRAYSCALE);
   else if(use_temp==1)
     temp = cv::imread("tempH1.png", CV_LOAD_IMAGE_GRAYSCALE);
   //Video////////////////////////////////////////////////////////////
    std::string video = "testHD.mp4";	
    cv::VideoCapture capture(video);
    if(!capture.isOpened())
    {  std::cerr << "Failed to open video file" << std::endl; return -1; }
    //Preparations///////////////////////////////////////////////////// 
    cv::Mat frame;
    double fps = capture.get(CV_CAP_PROP_FPS);
    int count=skipframes; 
    cv::Mat result; 
    double minVal, maxVal;
    cv::Point minLoc, maxLoc, matchPoint;
    //modes preparation/////////////////////////////////////////////////
    cv::CascadeClassifier classifier;
    if(use_temp==0)
      classifier.load("cascade3.xml");
    else if(use_temp==1)
      classifier.load("cascadeH.xml");
      cv::Size temp_size;
      if(use_temp==0)
	temp_size=cv::Size(30, 30); //training was made in 26 23
      else if(use_temp==1)
	temp_size=cv::Size(68, 44);
    std::vector<cv::Rect> obj;
    //Capture//////////////////////////////////////////////////////////
    while(true)
    {
	  capture >> frame;
	  if(!frame.data) break;
	  if(bResize) cv::resize(frame, frame, cv::Size(recommended_width, recommended_heigth)); 
	  my_convertToGray8Bit(frame);
	  if(++count>=skipframes)
	  {
		
		//modes//////////////////////////////////////////////////
		if(mode==0)//
		{
		}else if(mode==5)//Cascade Classifier
		{
		  
		  //cv::CascadeClassifier classifier("cascade3.xml");
		  //std::vector<cv::Rect> obj;
		  classifier.detectMultiScale(frame, obj, 1.1, 5, 0 | CV_HAAR_SCALE_IMAGE, temp_size); 
		  for(size_t i=0; i < obj.size(); i++)
		  {
		    cv::Point center( obj[i].x + obj[i].width*0.5, obj[i].y + obj[i].height*0.5 );
		    cv::ellipse( frame, center, cv::Size( obj[i].width*0.5, obj[i].height*0.5), 0, 0, 360, cv::Scalar( 255, 0, 255 ), 4, 8, 0 );
		  }
		cv::imshow(windowName, frame);
		}
		count=0;
		  
	  }else
	  {
	     if(mode==0)
	     {
	     }
	     else if(mode==5)
	     {
		  for(size_t i=0; i < obj.size(); i++)
		  {
		    cv::Point center( obj[i].x + obj[i].width*0.5, obj[i].y + obj[i].height*0.5 );
		    cv::ellipse( frame, center, cv::Size( obj[i].width*0.5, obj[i].height*0.5), 0, 0, 360, cv::Scalar( 255, 0, 255 ), 4, 8, 0 );
		  }
		cv::imshow(windowName, frame);
	     }
	    
	  }
      
      
      
      
	 cv::waitKey(1000/fps);
    }
    
  
}