
//Features as per Andres' WACV paper

inline
opt_feat::opt_feat(const std::string in_path, 
		   const std::string in_actionNames,  
		   const std::string in_feat_path,
		   const int in_col, 
		   const int in_row
)
:path(in_path), actionNames(in_actionNames), feat_path(in_feat_path), col(in_col), row(in_row), n_samples_tr(17), n_samples_te(8)
{
  THRESH = 0.000001;
  THRESH_2 = col*row*1/100;
  
  actions.load( actionNames );  //actions.print("All actions");
  
  ismultiAction = false; // per default is false
}



inline
void
opt_feat::features_per_action_training()
{
  
  for (uword act = 0 ; act < actions.n_rows; ++act) {
    
    
    //All feature vectors per action are stored in the following std:vector<vec>: feat_all_videos_action_i
    //
    feat_all_videos_action_i.clear(); 
    
    std::stringstream tmp_ss2;
    tmp_ss2 << path << actions(act)<<"/train/train_list.txt";
    //cout << tmp_ss2.str()<< endl;
    //getchar();
    videos.load(tmp_ss2.str());
    //videos.print("All videos");
    
    
    
    for (uword vi = 0; vi <videos.n_rows; ++vi ){ //videos.n_rows
      std::stringstream tmp_ss3;
      tmp_ss3 << path << actions(act)<<"/train/"<<  videos(vi);
      //cout << tmp_ss3.str()<< endl;
      
      cout << "Calculating features vector per video ..." << videos(vi) << endl;;
      feature_video(tmp_ss3.str()); //feat_all_video_action_i is calculated inside this method
      
    }
    
    cout << "Converting to Arma:" << endl;
    cout << "# of Vectors: " << feat_all_videos_action_i.size() << endl;
    mat mat_features(14,feat_all_videos_action_i.size());
    //cout << "Llega aqui? " << endl;
    
    
    for (uword i = 0; i < feat_all_videos_action_i.size(); ++i)
    {
      //cout << i << endl;
      mat_features.col(i) = feat_all_videos_action_i.at(i);
      
    }
    
    std::stringstream tmp_ss4;
    tmp_ss4 << feat_path << "train/feature_vectors_" << actions(act); 
    cout << endl;
    mat_features.save( tmp_ss4.str() );
  }
}



inline
void
opt_feat::create_gmm_action(int in_Ncent)
{ 
  N_cent = in_Ncent;
  cout << "# clusters: " << N_cent << endl;
  
  gmm_per_action();
  
}


inline
void
opt_feat::gmm_per_action()
{
  //Load the following: 
  ///OJO: actions.n_rows
  ///reojo!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  
  for (uword act = 0 ; act < actions.n_rows;  ++act) {
    
    std::stringstream tmp_ss4;
    tmp_ss4 << feat_path << "train/feature_vectors_" << actions(act); 
    cout << "Calculating GMM for action " << actions(act) << endl;
    mat mat_features;
    mat_features.load( tmp_ss4.str() );
    
     ///Feature Scaling
    /*
  cout << "Scaling Features" << endl;
  rowvec mean_dim = mean( mat_features.t() );
  rowvec std_dim = stddev( mat_features.t() );
  
  field<rowvec> scaling(2);
  scaling(0) = mean_dim;
  scaling(1) = std_dim;
  
  
  std::stringstream tmp_scal;
  tmp_scal<< feat_path << "train/scaling_field.dat";
  scaling.save(tmp_scal.str());
  
  mat_features.each_col()-=mean_dim.t() ;
  mat_features.each_col()/=std_dim.t() ;
  
  //to be sure that training set has mean 0 and stddev 1
//   rowvec mean_dim2 = mean( mat_features.t() ); 
//   cout << mean_dim2 << endl;
//   getchar();
//   rowvec std_dim2 = stddev( mat_features.t() );
//   cout << std_dim2 << endl;
//   getchar();*/

  ///end Feature Scaling
  
  
  
  
   
    gmm_diag gmm_model;
    gmm_model.learn(mat_features, N_cent, eucl_dist, static_subset, 20, 5, 1e-10, true);   
    
    std::stringstream tmp_ss5;
    tmp_ss5 << "./gmm_models/Ng" << N_cent << "_" << actions(act); 
    cout << "Saving GMM in " << tmp_ss5.str() << endl;
    gmm_model.save( tmp_ss5.str() );
  }
  
}




//******************TESTING SINGLE ACTIONS***********************************
//***************************************************************************


inline 
void
opt_feat::feature_testing()
{
  for (uword act = 0 ; act < actions.n_rows; ++act) {
    
    
    std::stringstream tmp_ss2;
    tmp_ss2 << path << actions(act)<<"/test/test_list.txt";
    //cout << tmp_ss2.str()<< endl;
    //getchar();
    videos.load(tmp_ss2.str());
    //videos.print("All videos");
    
    
    
    for (uword vi = 0; vi <videos.n_rows; ++vi ){ //videos.n_rows
      
      feat_all_videos_action_i.clear(); 
      
      std::stringstream tmp_ss3;
      tmp_ss3 << path << actions(act)<<"/test/"<<  videos(vi);
      //cout << tmp_ss3.str()<< endl;
      
      cout << "Calculating features vector per video ..." << videos(vi) << endl;;
      feature_video(tmp_ss3.str()); //feat_all_video_action_i is calculated inside this method
      
      ///guardar por video
      cout << "Converting to Arma:" << endl;
      cout << "# of Vectors: " << feat_all_videos_action_i.size() << endl;
      mat mat_features(14,feat_all_videos_action_i.size());
      
      
      for (uword i = 0; i < feat_all_videos_action_i.size(); ++i)
      {
	mat_features.col(i) = feat_all_videos_action_i.at(i);
	
      }
      
      
      std::stringstream tmp_ss4;
      tmp_ss4 << feat_path << "test/"<< actions(act)<< "_person"<< vi + 18;  // 17 for training from 18 onwards for testing
      cout << tmp_ss4.str() << endl;
      //getchar();
      mat_features.save( tmp_ss4.str() );
      
      
    }
  }
}



inline
void
opt_feat::gmm_testing(int in_Ncent)
{
  N_cent = in_Ncent;
  
  
  cout << "Testing for GMM with " << N_cent << " centroids" << endl;
  mat mat_features;
  
  double total_videos = n_samples_te*actions.n_rows;
  double acc = 0;
  for (uword act = 0 ; act < actions.n_rows; ++act) {
    
    
    std::stringstream tmp_ss2;
    tmp_ss2 << path << actions(act)<<"/test/test_list.txt";
    //cout << tmp_ss2.str()<< endl;
    //getchar();
    videos.load(tmp_ss2.str());
    //videos.print("All videos");
    
    
    
    for (uword vi = 0; vi <videos.n_rows; ++vi ){ 
      
      std::stringstream tmp_ss4;
      tmp_ss4 << feat_path << "test/"<< actions(act)<< "_person"<< vi + 18;  // 17 for training from 18 onwards for testing
      //cout << tmp_ss4.str() << endl;
      mat_features.load( tmp_ss4.str() );
      
      ///Scaling
      /*
      field<rowvec> scaling;
      std::stringstream tmp_scal;
      tmp_scal<< feat_path << "train/scaling_field.dat";
      scaling.load(tmp_scal.str());
      rowvec mean_dim = scaling(0);
      rowvec std_dim  = scaling(1);
      
      mat_features.each_col()-=mean_dim.t() ;
      mat_features.each_col()/=std_dim.t() ;
      */
      ///end scaling
  
  
      
      vec likelihood_actions(actions.n_rows);
      
      for (uword act_tr=0; act_tr < actions.n_rows; ++act_tr)
      {
	gmm_diag gmm_model;
	
	std::stringstream tmp_ss5;
	tmp_ss5 << "./gmm_models/Ng" << N_cent << "_" << actions(act_tr); 
	//cout << "Loading model:  " << tmp_ss5.str() << endl;
	gmm_model.load( tmp_ss5.str());
	
	likelihood_actions (act_tr) = gmm_model.avg_log_p(mat_features);
	
      }
      
      //likelihood_actions.t().print("Log_likelihood for this video");
      uword est_class;
      likelihood_actions.max(est_class);
      
      
      
      if (est_class == act)
      {
	acc++;
	//cout << acc << endl;
	//cout << "This video is " << actions(act) << " and was classified as " << actions(est_class) << ": GOOD" <<endl;
	
      }
      else
      {
	//cout << "This video is " << actions(act) << " and was classified as " << actions(est_class) <<endl;
	
      }
      //getchar();
      
      
    }
  }
  cout << "Number of videos well classified " << acc << endl;
  cout << "Accuracy: " << setprecision(2) << fixed  << 100*acc/total_videos<< " %" << endl;
  cout << "******************************************" << endl;
  
  
  
  
}



//***************************Feature per video: Training and Testing ************

inline 
void
opt_feat::feature_video(std::string one_video)
{
  
  std::vector < vec > feat_frame;
  
  
  cv::VideoCapture capVideo(one_video);
  //cout << one_video << endl;
  //double fps = capVideo.get(CV_CAP_PROP_FPS); //get the frames per seconds of the video
  //cout << "Frame per seconds : " << fps << endl;
  
  //cv::namedWindow("MyVideo",CV_WINDOW_AUTOSIZE); //create a window called "MyVideo"
  
  double frmcount = capVideo.get(CV_CAP_PROP_FRAME_COUNT);
  cout << "# of frames is: " << frmcount << endl;
  
  if( !capVideo.isOpened() )
  {
    cout << "Video couldn't be opened" << endl;
    return;
  }
  
  cv::Mat prevgray, gray, flow, cflow, frame, prevflow;
  cv::Mat ixMat, iyMat, ixxMat, iyyMat;
  //cv::namedWindow("My Video", 1);
  //running_stat_vec<vec> stats_video(true);
  //cout << "Frame: ";
  int t = -1;
  featuresframe_video_i.set_size( frmcount );
  lab_feature_vectors.set_size( frmcount );

  
  for(;;){
    
    //cout << t << " " ;
    
    bool bSuccess = capVideo.read(frame); // read a new frame from video
    
    if (!bSuccess) //if not success, break loop
	{
	  //cout << "Cannot read the frame from video file" << endl;
	  break;
	}
	t++;
    cv::cvtColor(frame, gray, CV_BGR2GRAY);
    //cv::cvtColor(frame,gray, cv::COLOR_BGR2GRAY);//For Opencv 3.0 (installed at home)
    
    if( prevgray.data )
    {
      //cout << t << " " ;
      cv::calcOpticalFlowFarneback(prevgray, 
				   gray, 
				   flow, 
				   0.5, //pyr_scale
				   3,   //levels
				   9,   //winsize
				   1,   //iterations
				   5,   //poly_n
				   1.1, //poly_sigma
				   0);  //flags
      
      
      cv::Sobel(gray, ixMat, CV_32F, 1, 0, 1);
      cv::Sobel(gray, iyMat, CV_32F, 0, 1, 1);
      cv::Sobel(gray, ixxMat, CV_32F, 2, 0, 1);
      cv::Sobel(gray, iyyMat, CV_32F, 0, 2, 1);
      
      float  ux = 0, uy = 0, vx = 0,  vy = 0;
      float u, v;
      
      if( prevflow.data )
      {
	
	for (uword x = 0 ; x < col ; ++x ){
	  for (uword y = 0 ; y < row ; ++y ) {
	    
	    vec features_one_pixel(14);
	    u = flow.at<cv::Vec2f>(y, x)[0];
	    v = flow.at<cv::Vec2f>(y, x)[1];
	    
	    //cout << "x= " << x << " - y= " << y << endl;
	    // x grad
	    //cout << " x y grad" << endl;
	    float ix = ixMat.at<float>(y, x);
	    //cout << " y grad" << endl;
	    float iy = iyMat.at<float>(y, x);
	    
	    // grad direction &  grad magnitude
	    //cout << "grad direction &  grad magnitude" << endl;
	    float gd = std::atan2(std::abs(iy), std::abs(ix));
	    float gm = std::sqrt(ix * ix + iy * iy);
	    
	    // x second grad
	    //cout << "x y  second grad " << endl;
	    float ixx = ixxMat.at<float>(y, x);
	    // y second grad
	    float iyy = iyyMat.at<float>(y, x);
	    
	    //du/dt
	    float ut = u - prevflow.at<cv::Vec2f>(y, x)[0];
	    // dv/dt
	    float vt = v - prevflow.at<cv::Vec2f>(y, x)[1];
	    
	    //// divergence &  vorticity
	    //cout << "divergence &  vorticity" << endl;
	    if (x>0 && y>0 )
	    {
	      ux = u - flow.at<cv::Vec2f>(y, x - 1)[0];
	      uy = u - flow.at<cv::Vec2f>(y - 1, x)[0];
	      vx = v - flow.at<cv::Vec2f>(y, x - 1)[1];
	      vy = v - flow.at<cv::Vec2f>(y - 1, x)[1];
	    }
	    //int x_submat = x + rec.x;
	    //int y_submat = y + rec.y;
	    //cout << x_submat << "&" << y_submat << endl;
	    
	    
	    
	    features_one_pixel  << x << y << abs(ix) << abs(iy) << abs(ixx) 
	    << abs(iyy) << gm << gd <<  u << v << abs(ut) 
	    << abs(ut) << (ux - vy)  << (vx - uy);
	    //features_one_pixel.t().print("Features Current Pixel: ");
	    //getchar();
	    
	    
	    if (!is_finite( features_one_pixel ) )
	    {
	      cout << "It's not FINITE... continue???" << endl;
	      getchar(); 
	    }
	    
	    // Plotting Moving pixels
	    //cout << " " << gm;
	    if (gm>40) // Empirically set to 40
			    {
			      frame.at<cv::Vec3b>(y,x)[0] = 0;
			      frame.at<cv::Vec3b>(y,x)[1] = 0;
			      frame.at<cv::Vec3b>(y,x)[2] = 255;
			      feat_frame.push_back(features_one_pixel);
			      
			    }
			    
	  }
	}
	
	
	
      }
    }
    
    if(cv::waitKey(30)>=0)
      break;

    
    std::swap(prevgray, gray);
      std::swap(prevflow, flow);

      
      string text;
      if (feat_frame.size() > THRESH_2) 
      {
	
	feat_all_videos_action_i.insert(feat_all_videos_action_i.end(), feat_frame.begin(), feat_frame.end()  );
	text = "Frame used";

      }
      
      
      else
      {
	text = "Frame discarded";
	
	if (ismultiAction)
	{
	  text = "Frame wasn't discarded. Multivideo";
	  //feat_all_videos_action_i.insert(feat_all_videos_action_i.end(), feat_frame.begin(), feat_frame.end()  );
	}
      }
      
      
      if (ismultiAction)
      {
	  //cout << "Frame " << t << ". Feature vectors "<< feat_frame.size() << endl;
	  mat feat_frame_i( 14,feat_frame.size() );
	  
	  for (uword l = 0; l<feat_frame.size(); ++l )
	  {
	  feat_frame_i.col(l) = feat_frame.at(l);
	  
	  }
	  
	  featuresframe_video_i(t) = feat_frame_i;

	  lab_feature_vectors(t) = arma_multi_labels(t);
	  //cout << "original: " << arma_multi_labels(t) << endl;
	  //cout << "Label in new vector element is "<< lab_feature_vectors.back() << endl;
	}
      
      
      
      int fontFace = cv::FONT_HERSHEY_PLAIN;
      double fontScale = 0.8;
      int thickness = 1;  
      cv::Point textOrg(10, 100);
      cv::putText(frame, text, textOrg, fontFace, fontScale, cv::Scalar::all(255), thickness,8);
      //cout << "otro tam: " <<feat_video_i.size() << endl;
      
      
      
      //cv::imshow("color", frame);
      //cv::waitKey();
      //
      
      
      feat_frame.clear();
  }
  
  
}

//******************TESTING MULTI ACTIONS***********************************
//***************************************************************************


inline 
void 
opt_feat::feature_multi_action(std::string path_multi)
{
  ismultiAction = true; // per default is false

   
  std::stringstream tmp_ss2;
  tmp_ss2 << path_multi << "/multi_list.txt";
  
  std::stringstream tmp_ss3;
  tmp_ss3 << path_multi << "/list_labels.txt";
  
  
  videos.load(tmp_ss2.str()); 
  
  field<std::string> labels;
  labels.load(tmp_ss3.str()); 
  labels.print();
  
  
  field<std::string> person;
  person.load("person_list.txt");
    
    for (uword vi = 0; vi <videos.n_rows; ++vi ){ //videos.n_rows
      
      feat_all_videos_action_i.clear(); 
      

      
      std::stringstream tmp_ss3;
      tmp_ss3 << path_multi << videos(vi);
      //cout << tmp_ss3.str()<< endl;
      
       std::stringstream tmp_lbs;
       tmp_lbs << path_multi << labels(vi);
       arma_multi_labels.load(tmp_lbs.str()); //labels are in a frame basis.
    
    
      
      cout << "Calculating features vector per video ..." << videos(vi) << endl;;
      feature_video(tmp_ss3.str()); //feat_all_video_action_i is calculated inside this method
      
           
      
      cout << "Total feature vectors: " << featuresframe_video_i.n_elem << endl;
      cout << "Total labels: " << lab_feature_vectors.n_elem << endl;
      
     
      
      std::stringstream tmp_ss4;
      tmp_ss4 << feat_path << "multi_test/person"<< person(vi);  // 17 for training from 18 onwards for testing
      
      std::stringstream tmp_vec_lab;
      tmp_vec_lab << feat_path << "multi_test/lab_person"<< person(vi);  // 17 for training from 18 onwards for testing
      
      
      //cout << tmp_ss4.str() << endl;
      //getchar();
      featuresframe_video_i.save( tmp_ss4.str() );
      lab_feature_vectors.save( tmp_vec_lab.str(), raw_ascii );
      
      
    }
  
}

inline 
void 
opt_feat::gmm_multi_action( int in_Ncent )
{
    int L =25;
    N_cent = in_Ncent;
    field<mat> featuresframe_video_i; // Features for frame i are in  arow of this field
    uvec real_labels;
    
    cout << "Testing for GMM with " << N_cent << " centroids" << endl;
    
     field<std::string> person;
     person.load("person_list.txt");
     
     
     ///ojo!!!!!!!!!!!!!
     for (uword vi = 0; vi <person.n_rows; ++vi ){ 
       
        std::stringstream tmp_ss4;
	tmp_ss4 << feat_path << "multi_test/person"<< person(vi);  // 17 for training from 18 onwards for testing
	std::stringstream tmp_vec_lab;
	tmp_vec_lab << feat_path << "multi_test/lab_person"<< person(vi);  // 17 for training from 18 onwards for testing
	
	
      featuresframe_video_i.load( tmp_ss4.str() );
      real_labels.load( tmp_vec_lab.str(), raw_ascii );
      int num_frames = featuresframe_video_i.n_rows;
      
      
       mat log_prob;
       log_prob.zeros(num_frames-2, actions.n_rows);  //( -2 ) because frame 0 and 1 are empty
       
       cout << "Doing for person " << person(vi) << endl;
       //cout << "num_frames " << num_frames << endl;
       //cout << "num_frames - L -2: " << (num_frames - L -2) << endl;
       
       
       for ( uword fr = 0; fr < num_frames - L - 1 ; ++fr)//( -1 ) because frame 0 and 1 are empty
       {
	 //cout << fr << " " ;
	 mat mat_features;
	 
	 for (uword j = fr; j< fr+L; j++)
	 {
	   //cout << j << " " ;
	   mat_features	 = join_rows( mat_features, featuresframe_video_i(j+2) );
	      
	   
	   
	}
	
      ///Scaling
      /*
      field<rowvec> scaling;
      std::stringstream tmp_scal;
      tmp_scal<< feat_path << "train/scaling_field.dat";
      scaling.load(tmp_scal.str());
      rowvec mean_dim = scaling(0);
      rowvec std_dim  = scaling(1);
      
      mat_features.each_col()-=mean_dim.t() ;
      mat_features.each_col()/=std_dim.t() ;
      */
      ///end scaling
      
	//cout << endl;
	vec likelihood_actions = get_loglikelihoods(mat_features);
	//likelihood_actions.t().print();
	//cout << endl << "ini " << fr << " fin:" << fr + L-1 << endl;
	log_prob.rows(fr, fr + L-1).each_row() += likelihood_actions.t();
	
	//cout << log_prob.rows(0, fr + L-1+10) << endl;
	//getchar();

       
     }
     //log_prob.print();
     cout << endl;
     uvec est_labels(num_frames - 2);
     
      
     
     for ( uword i = 0; i < num_frames - 2 ; ++i)//( -2 ) because frame 0 and 1 are empty
       {
	 uword index;
	 double max_frame = log_prob.row(i).max(index);
	 //cout << "Max for " << log_prob.row(i) << " is " << max_frame << " pos: " << index<< endl;
	 est_labels(i) = index;
	 //getchar();

       }
       
       std::stringstream tmp_save_estlab;
      tmp_save_estlab<< "./est_label_person" << person(vi);  // 17 for training from 18 onwards for testing
      est_labels.save(tmp_save_estlab.str(), raw_ascii);
     
     uvec comparing = find( est_labels == real_labels.subvec(2,num_frames-1) );
     double acc = comparing.n_elem;
     cout <<"performance for person "<<person(vi)<<" is "<<setprecision(2)<<fixed<<100*acc/est_labels.n_elem << " %"<<endl;
     //getchar();
    }
  
}


inline
vec
opt_feat::get_loglikelihoods(mat mat_features)
{
  vec likelihood_actions(actions.n_rows);
  
  for (uword act_tr=0; act_tr < actions.n_rows; ++act_tr)
  {
	gmm_diag gmm_model;
	std::stringstream tmp_ss5;
	tmp_ss5 << "./gmm_models/Ng" << N_cent << "_" << actions(act_tr); 
	//cout << "Loading model:  " << tmp_ss5.str() << endl;
	gmm_model.load( tmp_ss5.str());
	
	likelihood_actions (act_tr) = gmm_model.avg_log_p(mat_features);
	
      }
      
      
      return likelihood_actions;
      
}



