#include "kmeans_def.hpp"
#include "kmeans_impl.hpp"




//Features as per Andres' WACV paper

inline
opt_feat::opt_feat(const std::string in_path, 
		   const std::string in_actionNames,  
		   const std::string in_feat_path,
		   const int in_col, 
		   const int in_row,
		   const int in_n_cent
		  )
:path(in_path), actionNames(in_actionNames), feat_path(in_feat_path), col(in_col), row(in_row), n_samples_tr(17), n_samples_te(8), N_cent(in_n_cent)
{
  THRESH = 0.000001;
  THRESH_2 = col*row*1/100;
   
  
  actions.load( actionNames );  //actions.print("All actions");
}


//*****************************************************************************************
///***************************************TRAINING***************************************** 
//*****************************************************************************************
inline
void
opt_feat::training_svm(std::string model_name, std::string kernel, float gamma)
{
  
  
  //features_training( );
  svm_model (model_name, kernel, gamma);
  
}

///inline void features_training
inline
void
opt_feat::features_training()
{
  
  isTesting = false;
  
  int num_samples;
  
  
  std::vector <vec> vec_training_set;
  std::vector <float> vec_lab_training_set;
  
  
  for (uword act = 0 ; act < actions.n_rows; ++act) {//actions.n_rows
    
    std::stringstream tmp_ss2;
    tmp_ss2 << path << actions(act)<<"/train/train_list.txt";    
    //cout << tmp_ss2.str()<< endl;
    //getchar();
    videos.load(tmp_ss2.str());
    //videos.print("All videos");
    vec dist_stein;
    
    for (uword vi = 0; vi <videos.n_rows; ++vi ){ 
      
      covs.clear();
      std::stringstream tmp_ss3;
      tmp_ss3 << path << actions(act)<<"/train/"<<  videos(vi);
      
      cout << "Calculating for video " << videos(vi) << endl;
      cov_matrices_svm(tmp_ss3.str()); //covs is calculated here 
      cout << " - Total # " << covs.size() << endl; //<< ". Doing for " << endl;
      
      
      
      for (uword i = 0; i < covs.size(); ++i) // Each covariance matrix per video testing_vi is classified
      {
	//cout << " "<< i;
	mat cov_i_te = covs.at(i);
	
	dist_stein.zeros(N_cent*actions.n_rows);
	uword posi = 0;
	
	
	for (uword act_tr = 0 ; act_tr < actions.n_rows; ++act_tr) {
	  
	  
	  std::stringstream tmp_ss4;
	  tmp_ss4 << feat_path << "ref_matrices" << "/fullCov_" << actions(act_tr); 
	  //cout << "Loading: " << tmp_ss4.str()<< endl;
	  field <mat> full_covs_vi;
	  full_covs_vi.load(tmp_ss4.str()); // 16 covariance matrices per action
	  
	  for (uword co_tr = 0 ; co_tr< full_covs_vi.n_rows; ++co_tr){
	    
	    //convertir con lo que Methash me explico y luego calcular distancia
	    mat cov_i_tr = full_covs_vi(co_tr) ;
	    
	    cov_i_tr = 0.5*(cov_i_tr + cov_i_tr.t());
	    vec D;    mat V;    eig_sym(D, V, cov_i_tr);   uvec q1 = find(D < THRESH);
	    if (q1.n_elem>0)
	    {
	      for (uword pos = 0; pos < q1.n_elem; ++pos)
	      {
		D( q1(pos) ) = THRESH;
		
	      }
	      cov_i_tr = V*diagmat(D)*V.t();  //
	    }
	    
	    double det_op1 = det( diagmat( (cov_i_tr + cov_i_te)/2 ) );
	    double det_op2 = det( diagmat( ( cov_i_tr%cov_i_te ) ) );
	    double dist = sqrt ( log( det_op1 ) -0.5*log( det_op2 ) );
	    dist_stein(posi) = dist;
	    posi++;
	    //dist_stein.t().print("dist??");
	    //getchar();

	  }
	  
	  
	}
	//dist_stein.t().print("dist??");
	//cout << " class " << actions(act) << endl;
	//getchar();
	vec_training_set.push_back(dist_stein);
	vec_lab_training_set.push_back(act); //adding label to this dist
	
	
      }
      
    }
  }
  
  
  
  
  
  
  int dim = N_cent*actions.n_rows;
  int num_vec = vec_training_set.size();
  mat mat_training_set(dim, num_vec);
  fvec lab_training_set(num_vec);
  
  cout << "num_vec " << num_vec << endl; ///Mira si dio bien
  for (uword j = 0; j < num_vec; ++j)
  {
    //cout << i << endl;
    mat_training_set.col(j) = vec_training_set.at(j);
    lab_training_set(j) = vec_lab_training_set.at(j);
    
  }
  
  cout << "Saving Training features " << endl;
  std::stringstream tmp1;
  tmp1 << feat_path << "train/" << "mat_training_set.dat";
  
  std::stringstream tmp2;
  tmp2 << feat_path << "train/" << "lab_training_set.dat";
  
  mat_training_set.save( tmp1.str() );//no cambiar nombre
  lab_training_set.save( tmp2.str() );//no cambiar nombre
  
}


///inline void svm_model

inline
void
opt_feat::svm_model(std::string model_name, std::string kernel, float gamma)
{
  
  mat mat_training_set;
  fvec lab_training_set; // debe ser tipo float
  
  
  std::stringstream tmp1;
  tmp1 << feat_path << "train/" << "mat_training_set.dat";
  //cout << tmp1.str() << endl;
  
  std::stringstream tmp2;
  tmp2 << feat_path << "train/" << "lab_training_set.dat";
  //cout << tmp2.str() << endl;
  
  mat_training_set.load( tmp1.str() ); //no cambiar nombre
  lab_training_set.load(tmp2.str()); //no cambiar nombre


///Feature Scaling
  cout << "Scaling Features" << endl;
  rowvec mean_dim = mean( mat_training_set.t() );
  rowvec std_dim = stddev( mat_training_set.t() );
  
  field<rowvec> scaling(2);
  scaling(0) = mean_dim;
  scaling(1) = std_dim;

  std::stringstream tmp_scal;
  tmp_scal<< feat_path << "train/" << "scaling_field" << N_cent << "_L" << L_segm  << ".dat";
  
  scaling.save(tmp_scal.str());
 
  mat_training_set.each_col()-=mean_dim.t() ;
  mat_training_set.each_col()/=std_dim.t() ;
  


  ///end Feature Scaling


  
  int dim = N_cent*actions.n_rows;
  int num_vec = lab_training_set.n_elem;
  
  //cout << num_vec << endl;
  //cout << dim << endl; 
  //getchar();
  
  
  
  
  ///SVM training model
  cv::Mat cvMatTraining(num_vec, dim, CV_32FC1);
  float fl_labels[num_vec];
  
  for (uword m=0; m<num_vec; ++m)
  {
    for (uword d=0; d<dim; ++d)
    {
      cvMatTraining.at<float>(m,d) = (float)mat_training_set(d,m); 
      
    }
    fl_labels[m] = lab_training_set(m);
  }
  
  
  
  cv::Mat cvMatLabels(num_vec, 1, CV_32FC1,fl_labels );
  
  
  // Train the SVM
  cout << "Training SVM" << endl;
  CvSVMParams params; 
  CvSVM SVM;
  if (kernel == "RBF")
  {
    params.svm_type    = CvSVM::C_SVC;
    params.kernel_type = CvSVM::RBF; 
    params.gamma = gamma;
    cout << "Kernel type is: "<<params.kernel_type << endl;
    cout << "Gamma " << setprecision(4) << fixed << params.gamma << endl;
    params.term_crit   = cvTermCriteria(CV_TERMCRIT_ITER,  (int)1e7, 1e-6);
  }
  
  else if (kernel == "LINEAR")
  {
    params.svm_type    = CvSVM::C_SVC;
    params.kernel_type = CvSVM::LINEAR;    
    params.term_crit   = cvTermCriteria(CV_TERMCRIT_ITER,  (int)1e7, 1e-6);
    cout << "Kernel type is: "<<params.kernel_type << endl;
    
    //cout << "Obtaining the optimal Parameters only for Linear Kernel" << endl;
    //SVM.train_auto(cvMatTraining, cvMatLabels, cv::Mat(), cv::Mat(), params);
    //params = SVM.get_params();

  }
  else
  {
    cout << "Please select one kernel" << endl;
    return;
  }
  
   //cout << "Obtaining the optimal Parameters" << endl;
  //SVM.train_auto(cvMatTraining, cvMatLabels, cv::Mat(), cv::Mat(), params);
  //params = SVM.get_params();
  
 
  //cout << "Obtaining the optimal Parameters" << endl;
  //SVM.train_auto(cvMatTraining, cvMatLabels, cv::Mat(), cv::Mat(), params);
  //params = SVM.get_params();
  
  SVM.train( cvMatTraining , cvMatLabels, cv::Mat(), cv::Mat(), params);
  
  
  std::stringstream tmp;
  tmp << "./svm_model/"<< model_name.c_str();
  cout << "Saving SVM: " << tmp.str().c_str() <<endl;
  SVM.save( tmp.str().c_str() );
  
  
}




  
  
  
//*****************************************************************************************
///***************************************TESTING***************************************** 
//*****************************************************************************************


inline
void
opt_feat::testing_svm(std::string path_multi, std::string model_name)
{
  
  //features_testing( path_multi );
  svm_predict( model_name );
  
  
}

///inline void features_testing

inline
void
opt_feat::features_testing(std::string path_multi)
{
  //cargar model
  isTesting = true;
  
  
  //cout <<"Calculating covariance descriptor per video"<< endl;
  

  actions.load( actionNames );
  //actions.print("All actions");
  
  std::stringstream tmp_ss2;
  tmp_ss2 << path_multi << "/multi_list.txt";
  
  std::stringstream tmp_ss3;
  tmp_ss3 << path_multi << "/list_labels.txt";
  

  
  
  videos.load(tmp_ss2.str()); 
  field<std::string> labels;
  labels.load(tmp_ss3.str()); 
  
  
  
  
  
  field<std::string> person;
  person.load("person_list.txt");
  
  
  vec dist_stein;
  int dim = N_cent*actions.n_rows;
  for (uword vi = 0; vi <videos.n_rows; ++vi ){ 
    
    
    
    
    std::stringstream tmp_lbs;
    tmp_lbs << path_multi << labels(vi);
    arma_multi_labels.load(tmp_lbs.str()); //labels are in a frame basis.
    
    
    
    covs.clear();
    label_multivideo.clear(); // For each segment a vector with label is store here
    
    
    
    std::stringstream tmp_ss3;
    tmp_ss3 << path_multi << videos(vi);
    
    cout << "Calculating for video " << videos(vi);
    cov_matrices_svm(tmp_ss3.str()); //covs is calculated here and label_multivideo
    cout << " - Total # " << covs.size() << endl; //<< ". Doing for " << endl;
    
    field<vec> arma_lab_covs( covs.size() );
    
    for (uword ci = 0; ci < covs.size(); ++ci) 
    {
      arma_lab_covs(ci) = label_multivideo.at(ci);
      
    }
    
    mat testing_vectors_video_i( N_cent*actions.n_rows, covs.size() );
    std::vector <vec> vec_testing_set;
    
    
    for (uword i = 0; i < covs.size(); ++i) // Each covariance matrix per video testing_vi is classified
      {
	//cout << " "<< i;
	
	mat cov_i_te = covs.at(i);
	
	dist_stein.zeros(N_cent*actions.n_rows);
	uword posi = 0;
	
	
	for (uword act_tr = 0 ; act_tr < actions.n_rows; ++act_tr) {
	  
	  std::stringstream tmp_ss4;
	  tmp_ss4 << feat_path << "ref_matrices" << "/fullCov_" << actions(act_tr); 
	  //cout << "Loading " << tmp_ss4.str()<< endl;
	  field <mat> full_covs_vi;
	  full_covs_vi.load(tmp_ss4.str()); // 16 covariance matrices per action group
	  
	  for (uword co_tr = 0 ; co_tr< full_covs_vi.n_rows; ++co_tr){
	    
	    //convertir con lo que Methash me explico y luego calcular distancia
	    mat cov_i_tr = full_covs_vi(co_tr) ;
	    
	    cov_i_tr = 0.5*(cov_i_tr + cov_i_tr.t());
	    vec D;    mat V;    eig_sym(D, V, cov_i_tr);   uvec q1 = find(D < THRESH);
	    if (q1.n_elem>0)
	    {
	      for (uword pos = 0; pos < q1.n_elem; ++pos)
	      {
		D( q1(pos) ) = THRESH;
		
	      }
	      cov_i_tr = V*diagmat(D)*V.t();  //
	    }
	    
	    double det_op1 = det( diagmat( (cov_i_tr + cov_i_te)/2 ) );
	    double det_op2 = det( diagmat( ( cov_i_tr%cov_i_te ) ) );
	    double dist = sqrt ( log( det_op1 ) -0.5*log( det_op2 ) );
	    dist_stein(posi) = dist;
	    posi++;
	    //dist_stein.t().print("dist??");
	    //getchar();
	  }
	}
	vec_testing_set.push_back(dist_stein);	
      }
      
      int num_vec = vec_testing_set.size();
      
      for (uword j = 0; j < num_vec; ++j)
      {
	//cout << i << endl;
	testing_vectors_video_i.col(j) = vec_testing_set.at(j);
	
      }
    
      std::stringstream test_fv;
      test_fv << feat_path << "test/person_" << person(vi) << ".dat";
      cout << "Saving at " << test_fv.str() << endl;
      testing_vectors_video_i.save(test_fv.str());
      
      std::stringstream test_lb;
      test_lb << feat_path << "test/person_lb_" << person(vi) << ".dat";
      cout << "Saving at " << test_lb.str() << endl;
      arma_lab_covs.save(test_lb.str());
      
  }
  
}



///inline svm_predict
inline
void
opt_feat::svm_predict(std::string model_name)
{
  cout << "loading SVM model for multi-action: ";
  
  std::stringstream tmp;
  tmp << "./svm_model/"<< model_name;
  cout << tmp.str() << endl;
  
  CvSVM SVM;
  SVM.load( tmp.str().c_str() );
  
  
  
  //actions.load( actionNames );
  //actions.print("All actions");
  
  
  field<std::string> person;
  person.load("person_list.txt");
  
  
  vec dist_stein;
  int dim = N_cent*actions.n_rows;
  float average = 0;
  for (uword pi = 0; pi <person.n_rows; ++pi ){ 
    
    
    
    double acc = 0; //accuracy
    double total_frames = 0;
    vec real_label; 
    std::stringstream test_fv;
    test_fv << feat_path << "test/person_" << person(pi) << ".dat";//haciendo para 18
    //cout << "Loading from " << test_fv.str() << endl;
    mat testing_vectors_video;
    
    
    testing_vectors_video.load(test_fv.str());
    //testing_vectors_video.load("mat_training_set.dat");
    
    int num_vec = testing_vectors_video.n_cols;
    int dim =  testing_vectors_video.n_rows;

   ///Scaling
    //cout << "Scaling " << endl;
    std::stringstream tmp_scal;
    tmp_scal<< feat_path << "train/" << "scaling_field" << N_cent << "_L" << L_segm  << ".dat";\
    field<rowvec> scaling;
    scaling.load( tmp_scal.str() );
    
    rowvec mean_dim = scaling(0);
    rowvec std_dim  = scaling(1);
        
    //cout <<  mean_dim  << endl;
    //cout <<  std_dim  << endl;
    
    testing_vectors_video.each_col()-=mean_dim.t() ;
    testing_vectors_video.each_col()/=std_dim.t() ;
    
    
    ///end Scaling
    
    
    field<vec> arma_lab_covs;
    std::stringstream test_lb;
    test_lb << feat_path << "test/person_lb_" <<  person(pi) << ".dat";
    //cout << "Loading from " << test_lb.str() << endl;
    arma_lab_covs.load(test_lb.str());
    
    
    cv::Mat cvMatTesting(num_vec, dim, CV_32FC1);
    
    
    //converting to OpenCV Mat
    for (uword m=0; m<num_vec; ++m)
    {
      for (uword d=0; d<dim; ++d)
      {
	cvMatTesting.at<float>(m,d) = (float)testing_vectors_video(d,m); 
	
	//if (cvMatTesting.at<float>(m,d) == (float)testing_vectors_video(d,m))
	//cout << cvMatTesting.at<float>(m,d) << " = " <<   (float)testing_vectors_video(d,m) << "?" << endl;
	//else
	//cout << "Pq no son iguales? " << cvMatTesting.at<float>(m,d) << " = " <<  testing_vectors_video(d,m) << "?" << endl;
	
      }
      
    }
    
    
    //cout << "It was classified as ";
    for (uword i=0; i< num_vec; ++i)
    {
      
      cv::Mat sampleMat =   cvMatTesting.row(i);
      //cout << testing_vectors_video.col(i).t();
      //cout << sampleMat << endl;
      //cout << cvMatTesting.row(i) << endl;
      //getchar();
      //cv::Mat sampleMat = ( cv::Mat_<float>(1,2) << cvMatTraining.at<float>(i,0) , cvMatTraining.at<float>(i,1) );
      
      float response = SVM.predict(sampleMat);
      //cout << response <<  " ";
      //cout << "Label is " << arma_lab_covs(i).t() << endl;
      //getchar();
      //cout << "Label is " << labels(i) <<" and it was classified as " << response << endl;
      
      real_label = arma_lab_covs(i);
      total_frames = total_frames + real_label.n_elem;
      vec vec_est_label(real_label.n_elem);
      vec_est_label.fill(response);
      uvec good = (vec_est_label == real_label);
      
      //good.t().print("Where are equal???");
      
      
      acc = acc + sum(good);
      
    }
    cout << endl;
    cout << "Doing for: person: " << person(pi) ;
    cout << " Performance in a frame basis " << setprecision(2) << fixed <<acc/total_frames*100 << " %" << endl;
    
    average = average + acc/total_frames;
  }
  
  cout << "Average for this SVM model " << setprecision(2) << fixed <<average/person.n_rows*100 << " %" << endl;
}





//*****************************************************************************************
///*************************CALCULATING COV for Training and Testing***********************
//*****************************************************************************************
inline 
void
opt_feat::cov_matrices_svm(std::string one_video)//covariance features per video
{
  
  cv::VideoCapture capVideo(one_video);
  //cout << one_video << endl;
  
  
  
  int num_frames = capVideo.get(CV_CAP_PROP_FRAME_COUNT);
  int L = 25; // L: segment length
  
  //double fps = capVideo.get(CV_CAP_PROP_FPS); //get the frames per seconds of the video
  //cout << "Frame per seconds : " << fps << endl;
  
  //cv::namedWindow("MyVideo",CV_WINDOW_AUTOSIZE); //create a window called "MyVideo"
  
  //double frmcount = capVideo.get(CV_CAP_PROP_FRAME_COUNT);
  //cout << "# of frames is: " << frmcount << endl;
  
  if( !capVideo.isOpened() )
  {
    cout << "Video couldn't be opened" << endl;
    return;
  }
  
  
  //cv::namedWindow("My Video", 1);
  running_stat_vec<vec> stats_video(true);
  //std::vector <vec> feature_vec_segm; // feature vector per segment
  //cout << "Frame: ";
  int t = 0;
  int N_vectors = 0;
  int n_segm = 0;
  
  
  vec label_segment;
  
  
  
  for (int i = 0; i< num_frames - L-1 ; i=i+L)
  {
    
    //cout << "i " << i << " " ;
    //getchar();
    label_segment.zeros(L+2); // I need two more frames to calculate optical flow
    capVideo.set(CV_CAP_PROP_POS_FRAMES,i);
    cv::Mat prevgray, gray, flow, cflow, frame, prevflow;
    cv::Mat ixMat, iyMat, ixxMat, iyyMat;
    
    //feature_vec_segm.clear();
    stats_video.reset();
    
    N_vectors = 0;
    n_segm = 0;
    
    //cout << "i= " << i << " ";
    
    //counts Por cada frame voy a acumular a que clase hace falta. Ese segmento tendra label de la clase mas popular
    //vec counts(actions.n_rows); 
    //counts.zeros();
    for( int j = 0; j<L+2; j++) //< L
    {
      
      
      
      //cout << "j = " << j ;
      //capVideo >> frame;
      //cout << " " << capVideo.get(CV_CAP_PROP_POS_FRAMES) ;
      //cout << "count: " << stats_video.count() << endl;
      t = capVideo.get(CV_CAP_PROP_POS_FRAMES);
      
      if (isTesting)
      {
	int lb = arma_multi_labels(t);
	label_segment(j) = lb;
      }
      //cout << "t= " << t << endl;
      
      bool bSuccess = capVideo.read(frame); // read a new frame from video
      
      if (!bSuccess) //if not success, break loop
      	{
	  cout << "Cannot read the frame from video file" << endl;
	  break;
	}
	
	cv::cvtColor(frame, gray, CV_BGR2GRAY);
      
      if( prevgray.data )
      {
	//cout << "Cuando entra aca?? en t= " << t << endl;
	cv::calcOpticalFlowFarneback(prevgray, 
				     gray, 
			      flow, 
			      0.5, //pyr_scale
			      3,   //levels
			      9,   //winsize
			      1,   //iterations
			      5,   //poly_n
			      1.1, //poly_sigma
			      0);  //flags
	//cv::calcOpticalFlowFarneback(bl_currentImg, bl_nextImg, flow, 0.5,  3, 5, 3, 5, 1.2, 0); 
	//cv::cvtColor(prevgray, cflow, CV_GRAY2BGR);
	//drawOptFlowMap(flow, cflow, 8, 1.5, CV_RGB(0, 255, 0));
	//cv::imshow("flow", cflow);
	
	
	cv::Sobel(gray, ixMat, CV_32F, 1, 0, 1);
	cv::Sobel(gray, iyMat, CV_32F, 0, 1, 1);
	cv::Sobel(gray, ixxMat, CV_32F, 2, 0, 1);
	cv::Sobel(gray, iyyMat, CV_32F, 0, 2, 1);
	
	float  ux = 0, uy = 0, vx = 0,  vy = 0;
	float u, v;
	//cout << "Llega a ciclo de Pixels???" << endl;
	//cout << "col: " << col << "- row " << row << endl;
	
	//printing frame number
	//cout << " " << t;
	if( prevflow.data )
	{
	  
	  
	  //counts(lb)++;
	  
	  
	  if (n_segm == L - 1)
	  {
	    mat cov = stats_video.cov();
	    
	    if (N_vectors > col*row/20)
	    {
	      cov = 0.5*(cov + cov.t());
	      //cout << "Aqui 0.5 ";
	      vec D;
	      mat V;
	      eig_sym(D, V, cov);
	      uvec q1 = find(D < THRESH);
	      //cout << "Aqui 1 ";
	      if (q1.n_elem>0)
	      {
		for (uword pos = 0; pos < q1.n_elem; ++pos)
		{
		  D( q1(pos) ) = THRESH;
		}
		//cout << "***cov_hat***" << endl;
		cov = V*diagmat(D)*V.t();  //
	      }  
	      covs.push_back(cov); //pushing a cube with 4 covariance matrices
	      //cout << "this segment is " << actions(index_video)  << endl;
	      
	      if (isTesting)
	      {
		label_multivideo.push_back(label_segment);
	      }
	      
	    }
	    else{
	      cout << ". Covariance discarded.";
	    }
	    
	    
	    
	  }
	  
	  
	  for (uword x = 0 ; x < col ; ++x ){
	    for (uword y = 0 ; y < row ; ++y ) {
	      
	      vec features_one_pixel(15);
	      u = flow.at<cv::Vec2f>(y, x)[0];
	      v = flow.at<cv::Vec2f>(y, x)[1];
	      
	      
	      float ix = ixMat.at<float>(y, x);
	      float iy = iyMat.at<float>(y, x);
	      
	      // grad direction &  grad magnitude
	      float gd = std::atan2(std::abs(iy), std::abs(ix));
	      float gm = std::sqrt(ix * ix + iy * iy);
	      
	      // x second grad
	      float ixx = ixxMat.at<float>(y, x);
	      // y second grad
	      float iyy = iyyMat.at<float>(y, x);
	      //du/dt
	      float ut = u - prevflow.at<cv::Vec2f>(y, x)[0];
	      // dv/dt
	      float vt = v - prevflow.at<cv::Vec2f>(y, x)[1];
	      //// divergence &  vorticity
	      //cout << "divergence &  vorticity" << endl;
	      if (x>0 && y>0 )
	      {
		ux = u - flow.at<cv::Vec2f>(y, x - 1)[0];
		uy = u - flow.at<cv::Vec2f>(y - 1, x)[0];
		vx = v - flow.at<cv::Vec2f>(y, x - 1)[1];
		vy = v - flow.at<cv::Vec2f>(y - 1, x)[1];
	      }
	      //int x_submat = x + rec.x;
	      //int y_submat = y + rec.y;
	      //cout << x_submat << "&" << y_submat << endl;
	      
	      ///OJO
	      features_one_pixel  << x << y << j << abs(ix) << abs(iy) << abs(ixx) 
	      << abs(iyy) << gm << gd <<  u << v << abs(ut) 
	      << abs(ut) << (ux - vy)  << (vx - uy);
	      //features_one_pixel.t().print("Features Current Pixel: ");
	      //getchar();
	      
	      if (!is_finite( features_one_pixel ) ){
		cout << "It's not FINITE... continue???" << endl;
		getchar(); 
	      }
	      
	      // Plotting Moving pixels
	      //cout << " " << gm;
	      // Empirically set to 40
	      if (gm>40) {
		frame.at<cv::Vec3b>(y,x)[0] = 0;
		frame.at<cv::Vec3b>(y,x)[1] = 0;
		frame.at<cv::Vec3b>(y,x)[2] = 255;
		//feature_vec_segm.push_back(features_one_pixel);
		stats_video(features_one_pixel);
		N_vectors++;}
	    }
	  }
	  n_segm++;
	}
      }
      
      if(cv::waitKey(30)>=0)
	break;
      //cout << " t: " <<t;
	std::swap(prevgray, gray);
	std::swap(prevflow, flow);//aca esta el problema.... cuando hay flow????
	
	
	//cv::imshow("color", frame);
	//cv::waitKey(5);
	//getchar();
    }
    
  }
}  






//*****************************************************************************************
///*************************CALCULATING REFERENCE COVARIANCE MATRICES**********************
//*****************************************************************************************
//In clustering_per_action(), All the reference covariance matrices are calculated.
inline
void
opt_feat::clustering_per_action()
{
  cout << "# clusters: " << N_cent << endl;
  
  
  
  //actions.load( actionNames );
  //actions.print("All actions");
  
  int n_train = n_samples_tr*actions.n_rows;
  std::vector < vec > video_i;
  
  ///OJO!!!!!!!!!!!!!!!!!!!!!!!! act = 0
  //cout << "OJO EMPIEZA EN 2, porque los otros dos ya los calcule"<< endl;
  for (uword act = 0 ; act < actions.n_rows; ++act) {
    
    
    //All feature vectors per action are stored in the following std:vector<vec>: feat_all_videos_action_i
    //
    feat_all_videos_action_i.clear(); 
    
    std::stringstream tmp_ss2;
    tmp_ss2 << path << actions(act)<<"/train/train_list.txt";
    //cout << tmp_ss2.str()<< endl;
    //getchar();
    videos.load(tmp_ss2.str());
    //videos.print("All videos");
    
    
    for (uword vi = 0; vi <videos.n_rows; ++vi ){ //videos.n_rows
      std::stringstream tmp_ss3;
      tmp_ss3 << path << actions(act)<<"/train/"<<  videos(vi);
      //cout << tmp_ss3.str()<< endl;
      
      cout << "Calculating features vector per video ..." << videos(vi) << endl;;
      feature_video(tmp_ss3.str()); //feat_all_video_action_i is calculated inside this method
      
    }
    
    cout << "Converting to Arma:" << endl;
    cout << feat_all_videos_action_i.size() << endl;
    mat mat_features(15,feat_all_videos_action_i.size());
    //cout << "Llega aqui? " << endl;
    for (uword i = 0; i < feat_all_videos_action_i.size(); ++i)
    {
      //cout << i << endl;
      mat_features.col(i) = feat_all_videos_action_i.at(i);
      
    }
    cout << "Kmeans ... " << endl;
    kmeans km(mat_features, N_cent);
    km.run(10);
    
    field <mat> full_covs_vi;
    full_covs_vi = km.get_covs();
    
    std::stringstream tmp_ss4;
    tmp_ss4 << feat_path << "ref_matrices" << "/fullCov_" << actions(act); 
    cout << endl;
    cout << "Saving in" << tmp_ss4.str()<< endl;
    full_covs_vi.save(tmp_ss4.str());
    
    
    
  }
}


inline 
void
opt_feat::feature_video(std::string one_video)
{
  
  std::vector < vec > feat_frame;
  
  
  cv::VideoCapture capVideo(one_video);
  //cout << one_video << endl;
  //double fps = capVideo.get(CV_CAP_PROP_FPS); //get the frames per seconds of the video
  //cout << "Frame per seconds : " << fps << endl;
  
  //cv::namedWindow("MyVideo",CV_WINDOW_AUTOSIZE); //create a window called "MyVideo"
  
  //double frmcount = capVideo.get(CV_CAP_PROP_FRAME_COUNT);
  //cout << "# of frames is: " << frmcount << endl;
  
  if( !capVideo.isOpened() )
  {
    cout << "Video couldn't be opened" << endl;
    return;
  }
  
  cv::Mat prevgray, gray, flow, cflow, frame, prevflow;
  cv::Mat ixMat, iyMat, ixxMat, iyyMat;
  //cv::namedWindow("My Video", 1);
  //running_stat_vec<vec> stats_video(true);
  //cout << "Frame: ";
  int t = 0;
  
  
  for(;;){
    
    //cout << t << " " ;
    
    bool bSuccess = capVideo.read(frame); // read a new frame from video
    
    if (!bSuccess) //if not success, break loop
      	{
	  //cout << "Cannot read the frame from video file" << endl;
	  break;
	}
	t++;
    cv::cvtColor(frame, gray, CV_BGR2GRAY);
    //cv::cvtColor(frame,gray, cv::COLOR_BGR2GRAY);//For Opencv 3.0 (installed at home)
    
    if( prevgray.data )
    {
      //cout << "Cuando entra aca?? en t= " << t << endl;
      cv::calcOpticalFlowFarneback(prevgray, 
				   gray, 
				   flow, 
				   0.5, //pyr_scale
				   3,   //levels
				   9,   //winsize
				   1,   //iterations
				   5,   //poly_n
				   1.1, //poly_sigma
				   0);  //flags
      
      
      cv::Sobel(gray, ixMat, CV_32F, 1, 0, 1);
      cv::Sobel(gray, iyMat, CV_32F, 0, 1, 1);
      cv::Sobel(gray, ixxMat, CV_32F, 2, 0, 1);
      cv::Sobel(gray, iyyMat, CV_32F, 0, 2, 1);
      
      float  ux = 0, uy = 0, vx = 0,  vy = 0;
      float u, v;
      
      if( prevflow.data )
      {
	
	for (uword x = 0 ; x < col ; ++x ){
	  for (uword y = 0 ; y < row ; ++y ) {
	    
	    vec features_one_pixel(15);
	    u = flow.at<cv::Vec2f>(y, x)[0];
	    v = flow.at<cv::Vec2f>(y, x)[1];
	    
	    //cout << "x= " << x << " - y= " << y << endl;
	    // x grad
	    //cout << " x y grad" << endl;
	    float ix = ixMat.at<float>(y, x);
	    //cout << " y grad" << endl;
	    float iy = iyMat.at<float>(y, x);
	    
	    // grad direction &  grad magnitude
	    //cout << "grad direction &  grad magnitude" << endl;
	    float gd = std::atan2(std::abs(iy), std::abs(ix));
	    float gm = std::sqrt(ix * ix + iy * iy);
	    
	    // x second grad
	    //cout << "x y  second grad " << endl;
	    float ixx = ixxMat.at<float>(y, x);
	    // y second grad
	    float iyy = iyyMat.at<float>(y, x);
	    
	    //du/dt
	    float ut = u - prevflow.at<cv::Vec2f>(y, x)[0];
	    // dv/dt
	    float vt = v - prevflow.at<cv::Vec2f>(y, x)[1];
	    
	    //// divergence &  vorticity
	    //cout << "divergence &  vorticity" << endl;
	    if (x>0 && y>0 )
	    {
	      ux = u - flow.at<cv::Vec2f>(y, x - 1)[0];
	      uy = u - flow.at<cv::Vec2f>(y - 1, x)[0];
	      vx = v - flow.at<cv::Vec2f>(y, x - 1)[1];
	      vy = v - flow.at<cv::Vec2f>(y - 1, x)[1];
	    }
	    //int x_submat = x + rec.x;
	    //int y_submat = y + rec.y;
	    //cout << x_submat << "&" << y_submat << endl;
	    
	    
	    
	    features_one_pixel  << x << y << t << abs(ix) << abs(iy) << abs(ixx) 
	    << abs(iyy) << gm << gd <<  u << v << abs(ut) 
	    << abs(ut) << (ux - vy)  << (vx - uy);
	    //features_one_pixel.t().print("Features Current Pixel: ");
	    //getchar();
	    
	    
	    if (!is_finite( features_one_pixel ) )
	    {
	      cout << "It's not FINITE... continue???" << endl;
	      getchar(); 
	    }
	    
	    // Plotting Moving pixels
	    //cout << " " << gm;
	    if (gm>40) // Empirically set to 40
			    {
			      frame.at<cv::Vec3b>(y,x)[0] = 0;
			      frame.at<cv::Vec3b>(y,x)[1] = 0;
			      frame.at<cv::Vec3b>(y,x)[2] = 255;
			      feat_frame.push_back(features_one_pixel);
			      
			    }
			    //cout << stats_video.cov() << endl;
	    //cout << stats_video.mean() << endl;
	  }
	}
	
	
	
      }
    }
    if(cv::waitKey(30)>=0)
      break;
    //cout << " t: " <<t;
      std::swap(prevgray, gray);
      std::swap(prevflow, flow);//aca esta el problema.... cuando hay flow????
      {
	//cout << "Number of feature vectors in current frame:  " << feat_frame.size() << endl;
	
	
	//****To print in frame Num of feature vectors ****
	//std::stringstream num_vec;
	//num_vec << feat_frame.size();
	//string text = num_vec.str();
	//int fontFace = cv::FONT_HERSHEY_SCRIPT_SIMPLEX;
	
	//double fontScale = 1;
	//int thickness = 1;  
	//cv::Point textOrg(10, 100);
	//cv::putText(frame, text, textOrg, fontFace, fontScale, cv::Scalar::all(255), thickness,8);
	//****end ****
	
	
	//       if (feat_frame.size() < THRESH_2) 
    //       {
      //       cv::Point textOrg(20, 20);
      //       cv::putText(frame, "Discarded", textOrg, fontFace, fontScale, cv::Scalar::all(255), thickness,6);
      //       }
      }
      if (feat_frame.size() > THRESH_2) 
      {
	//cout << "Used " << endl;
	//feat_video_i.insert(feat_video_i.end(), feat_frame.begin(), feat_frame.end()  );
	//cout << feat_all_videos_action_i.size() << "  +  "  << feat_frame.size();
	feat_all_videos_action_i.insert(feat_all_videos_action_i.end(), feat_frame.begin(), feat_frame.end()  );
	//cout << " = " << feat_all_videos_action_i.size() << endl;
      }
      //cout << "otro tam: " <<feat_video_i.size() << endl;
      
      
      
      //cv::imshow("color", frame);
      //cv::waitKey();
      //
      
      
      feat_frame.clear();
  }
  
  
}

//*****************************************************************************************
///*************************TESTING=TRAINING**********************
//*****************************************************************************************

//Loading the model and testing with same data that it was training
inline
void
opt_feat::testing_training (std::string model_name)
{
  
  cout << "***************** test = train ************************" << endl;
  cout << "loading SVM model for multi-action: ";
  
  std::stringstream tmp;
  tmp << "./svm_model/"<< model_name;
  cout << tmp.str() << endl;
  
  
  
  CvSVM SVM;
  SVM.load( tmp.str().c_str() );
  CvSVMParams params = SVM.get_params();
  
  cout << "Kernel type is: "<<params.kernel_type << endl;
   cout << "C value: "<<params.C << endl;
  mat training_data;
  fvec labels; 
  
  
  
    std::stringstream tmp1;
  tmp1 << feat_path << "train/" << "mat_training_set.dat";
  //cout << tmp1.str() << endl;
  
  std::stringstream tmp2;
  tmp2 << feat_path << "train/" << "lab_training_set.dat";
  //cout << tmp2.str() << endl;
  
  training_data.load( tmp1.str() ); //no cambiar nombre
  labels.load(tmp2.str()); //no cambiar nombre
  
  
  ///Scaling
    std::stringstream tmp_scal;
    tmp_scal<< feat_path << "train/" << "scaling_field" << N_cent << "_L" << L_segm  << ".dat";\
    field<rowvec> scaling;
    scaling.load(tmp_scal.str());
    
    rowvec mean_dim = scaling(0);
    rowvec std_dim  = scaling(1);
    
     training_data.each_col()-=mean_dim.t() ;
     training_data.each_col()/=std_dim.t() ;
   ///end scaling 

  
  int num_vec = training_data.n_cols;
  int dim =  training_data.n_rows;
  
  
  cv::Mat cvMatTraining(num_vec, dim, CV_32FC1);
  
  
  float fl_labels[num_vec] ;
  
  
  
  //cout << "cvMatTraining Rows: " << cvMatTraining.rows << ". Cols: " << cvMatTraining.cols << endl;
  //cout << "cvMatLabels Rows: " << cvMatLabels.rows << ". Cols: " << cvMatLabels.cols << endl;
  
  for (uword m=0; m<num_vec; ++m)
  {
    for (uword d=0; d<dim; ++d)
    {
      cvMatTraining.at<float>(m,d) = (float)training_data(d,m); 
      //cout << " OpenCV: " << cvMatTraining.at<float>(m,d) << " - Arma: " <<training_data(d,m); 
    }
    
    fl_labels[m] = labels(m);
    
    //cout <<" OpenCVLabel: " <<  fl_labels[m] << " ArmaLabel: " << labels(m) << endl;
    
  }
  
  cv::Mat cvMatLabels(num_vec, 1, CV_32FC1,fl_labels );
  
  float acc =0;
  for (uword i=0; i< labels.n_elem; ++i)
  {
    
    cv::Mat sampleMat =   cvMatTraining.row(i);
    //cv::Mat sampleMat = ( cv::Mat_<float>(1,2) << cvMatTraining.at<float>(i,0) , cvMatTraining.at<float>(i,1) );
    
    float response = SVM.predict(sampleMat);
    //cout << "Label is " << labels(i) <<" and it was classified as " << response << endl;
    if (labels(i)  ==  response )
    {
      acc++;
    }
    
    
  }

  cout << "Performance in Training set: " << setprecision (2) << fixed << acc*100/num_vec << " %"<< endl;
  cout << "*******************************************" << endl;
  
}

