#include "aux_functions_def.hpp"
#include "aux_functions_impl.hpp"

#include "kmeans_regions_def.hpp"
#include "kmeans_regions_impl.hpp"


inline	
clustering_means::clustering_means(  std::string in_video_path,  field<std::string> in_frames_list, uword in_ro, uword in_co, std::string in_tosavein )
:frames_list(in_frames_list), video_path( in_video_path ) , ro(in_ro) , co(in_co), tosavein (in_tosavein)
{
  precalculate_features();
  
}


inline  
clustering_means::clustering_means(std::string load_field_means,std::string save_part, std::string in_GT, int summ_percentage, std::string in_video_path, field<std::string> in_frames_list )
:frames_list(in_frames_list), video_path( in_video_path ), GT (in_GT)
{
  
  ni = 5; // # of consecutive images to be used to calculate the point in the manifold
  
  
  kmeans_regions KmReg(load_field_means, 226);
  
  /*
  KmReg.run(100);
  uvec summary_small = KmReg.small_clusters(226);
  summary_small.t().print("summary =)");
  
  uvec summary_near = KmReg.near_points();
  
  cout << "Saving summary Small" << endl;
  summary_small.save("summarySmall_Dec13.dat", raw_ascii);
  
  cout << "Saving summary Near" << endl;
  summary_near.save("summaryNear_Dec13.dat", raw_ascii);
  
  */
  
  //uvec summary_small;
  //summary_small.load("summarySmall_Dec13.dat");
  
  //uvec summary_near;
  //summary_near.load("summaryNear_Dec13.dat");
  
  
  //showing_summary(summary_near);
  
  
  
  show_partitions();
}





inline	
void
clustering_means::precalculate_features()
{
  nf = frames_list.n_rows;
  ni = 5; // # of consecutive images to be used to calculate the point in the manifold
  cout << "# images to calculate a covariance matrix (Riemmannian Manifold): " << ni << endl;
  cout << "# of frames " << nf << endl;
  double nDx = floor(nf/ni); // # of Descriptors ( Covariance matrices or vectors of means)
  //cout << "# of covariance matrices is " << n_covMx << endl;
  
  field<mat> Cov_Features;
  field<mat> Mean_segment;
  // 4 regions
  int num_reg = 4;
  isBlock = true;
  double wx = co/num_reg;
  double hy = ro/num_reg;
  double n_regions = num_reg*num_reg;
  //Cov_Features.set_size(nDx-1,num_reg*num_reg); // Covariance matrices for all the segments. 1 segment = 1 second = 10 frames in UCSD dataset
  Mean_segment.set_size(nDx-1); // As the first and last frame are not used
  mat mean_subframe;
  uword pos = 0;
  uword prevIdx,currIdx,nextIdx;
  //idx_frames.zeros(ni+1);
  
  
  //Features:
  double rnf = nDx*ni; //
  t = 1;
  int segmAcc = 0;
  int segm = 0;
  mean_subframe.zeros( 15 , n_regions);
  
  //cv::Rect rec;
  
  for (uword f = 1; f <  rnf - 4; ++f)//  Should be nf, but I want this to be multiple of ni.
  {
    int reg = 0;
    
    prevIdx = f - 1;
    currIdx = f;
    nextIdx = f + 1;
    
    load_frames(prevIdx,currIdx,nextIdx);
    //cout << "Frame= " << f << endl;
    
    for (uword y = 0; y<num_reg; y++){
      int ini_y = hy*y;
      for (uword x = 0; x<num_reg; x++){
	int ini_x = wx*x;
	//cout << ini_y << " " << ini_x << endl;
	//cout << ini_x << " " << ini_y << " " << wx << " "<< hy <<  endl;
	mean_subframe(span::all,reg) += creating_RiemmannPoint_temporal(cv::Rect(ini_x, ini_y, wx, hy)); //Using spatio-temporal features
	vec borr = creating_RiemmannPoint_temporal(cv::Rect(ini_x, ini_y, wx, hy));
	//cout << "mean_frame: " <<  borr.t() << endl;
	reg++;
      }
    }
    
    segmAcc++;
    if (segmAcc == ni){
      cout << "segm: " << segm<< endl;
      mean_subframe = mean_subframe/ni;
      Mean_segment(segm) = mean_subframe;
      //mean_subframe.print("Mean for this segment");
      mean_subframe.zeros( 15 , n_regions);
      segm++;
      segmAcc = 0;
      //getchar();
    }
    t++;
  }
  Mean_segment.save(tosavein);
}


inline
void
clustering_means::load_frames(uword prevIdx, uword  currIdx, uword  nextIdx)
{
  std::stringstream tmp_name;
  
  tmp_name << video_path << frames_list(prevIdx) ; 
  //cout << "previous " << tmp_name.str() << endl;
  prevImg = cv::imread(tmp_name.str(), CV_LOAD_IMAGE_GRAYSCALE);
  
  tmp_name.str(std::string());
  tmp_name << video_path << frames_list(currIdx); 
  //cout << "current " << tmp_name.str() << endl;
  currentImg = cv::imread(tmp_name.str(), CV_LOAD_IMAGE_GRAYSCALE);
  
  
  tmp_name.str(std::string());
  tmp_name << video_path << frames_list(nextIdx);
  //cout << "next " << tmp_name.str() << endl;
  nextImg = cv::imread(tmp_name.str(), CV_LOAD_IMAGE_GRAYSCALE);
}

//feat_segment.zeros(15,l*(n_frames)); //see Andres's paper (WACV)
inline	
vec
clustering_means::creating_RiemmannPoint_temporal(cv::Rect rec)
{
  cv::Mat bl_prevImg, bl_currentImg, bl_nextImg;
  
  if (isBlock)
  {
    bl_prevImg = prevImg(rec);
    bl_currentImg = currentImg(rec);
    bl_nextImg = nextImg(rec);
    //cv::namedWindow( "ROI", CV_WINDOW_AUTOSIZE );// Create a window for display.
    //cv::imshow( "ROI", bl_currentImg );    
    //cv::namedWindow( "Original", CV_WINDOW_AUTOSIZE );// Create a window for display.
    //cv::imshow( "Original", currentImg );    
    //cv::waitKey();
    //getchar();
  }
  else
  {
    bl_prevImg = prevImg;
    bl_currentImg = currentImg;
    bl_nextImg = nextImg;
  }
  
  vec mean_current_frame;
  mean_current_frame.zeros(15);
  
  //uword n_frames = idx_frames.n_elem;
  
  cv::Mat flow, prevFlow;
  cv::Size winSize(5, 5);
  
  float u, v;
  double l  = co*ro;
  double num_vec = 0;
  
  
  cv::calcOpticalFlowFarneback(bl_currentImg, bl_nextImg, flow, 0.5,  3, 5, 3, 5, 1.2, 0); 
  cv::calcOpticalFlowFarneback(bl_prevImg, bl_currentImg, prevFlow, 0.5,  3, 5, 3, 5, 1.2, 0); 
  
  
  cv::Mat ixMat, iyMat, ixxMat, iyyMat;
  cv::Sobel(currentImg, ixMat, CV_32F, 1, 0, 1);
  cv::Sobel(currentImg, iyMat, CV_32F, 0, 1, 1);
  cv::Sobel(currentImg, ixxMat, CV_32F, 2, 0, 1);
  cv::Sobel(currentImg, iyyMat, CV_32F, 0, 2, 1);
  
  uword col = bl_currentImg.cols;
  uword row = bl_currentImg.rows;
  
  
  //cout << bl_currentImg.rows << endl;
  //cout << bl_currentImg.cols << endl;
  
  //cout << "Ready to get features" << endl;
  float  ux = 0;
  float  uy = 0;
  float  vx = 0;
  float  vy = 0;
  
  
  for (uword x = 0 ; x < col ; ++x ){
    for (uword y = 0 ; y < row ; ++y ) {
      
      vec features_one_pixel(15); 
      u = flow.at<cv::Vec2f>(y, x)[0];
      v = flow.at<cv::Vec2f>(y, x)[1];
      
      
      
      // x grad
      //cout << " x y grad" << endl;
      float ix = ixMat.at<float>(y, x);
      //cout << " y grad" << endl;
      float iy = iyMat.at<float>(y, x);
      
      
      // grad direction &  grad magnitude
      //cout << "grad direction &  grad magnitude" << endl;
      float gd = std::atan2(std::abs(iy), std::abs(ix));
      float gm = std::sqrt(ix * ix + iy * iy);
      
      // x second grad
      //cout << "x y  second grad " << endl;
      float ixx = ixxMat.at<float>(y, x);
      // y second grad
      float iyy = iyyMat.at<float>(y, x);
      
      
      //du/dt
      float ut = u - prevFlow.at<cv::Vec2f>(y, x)[0];
      // dv/dt
      float vt = v - prevFlow.at<cv::Vec2f>(y, x)[1];
      
      //// divergence &  vorticity
      if (x>0 && y>0 )
      {
	ux = u - flow.at<cv::Vec2f>(y, x - 1)[0];
	uy = u - flow.at<cv::Vec2f>(y - 1, x)[0];
	vx = v - flow.at<cv::Vec2f>(y, x - 1)[1];
	vy = v - flow.at<cv::Vec2f>(y - 1, x)[1];
      }
      
      
      int x_submat = x + rec.x;
      int y_submat = y + rec.y;
      //cout << x_submat << "&" << y_submat << endl;
      
      features_one_pixel  << x_submat << y_submat << t << abs(ix) << abs(iy) << abs(ixx) 
      << abs(iyy) << gm << gd <<  u << v << abs(ut) 
      << abs(ut) << (ux - vy)  << (vx - uy);
      
      
      if ( !is_finite(features_one_pixel) )
      {
	cout << features_one_pixel << endl;
	cout << ux << " | " <<  uy << " | " << vx << " | " <<  vy << endl;
	
	cout << "ux = " << u << " - " << flow.at<cv::Vec2f>(y, x - 1)[0] <<endl;
	cout << "uy = " << u << " - " << flow.at<cv::Vec2f>(y - 1, x)[0] <<endl;
	cout << "vx = " << v << " - " << flow.at<cv::Vec2f>(y, x - 1)[1] <<endl;
	cout << "vy = " << v << " - " << flow.at<cv::Vec2f>(y - 1, x)[1] <<endl;
	
	
	cout << t << endl;
	
	aux_functions auxfux;
	mat flow_arma = auxfux.convert2Arma(flow);
	cout <<"flow is finite: " << flow_arma.is_finite() << endl;
	getchar();
	
      }
      //cout << features_one_pixel.t() << endl;
      
      mean_current_frame += features_one_pixel;
      num_vec++;
      //cout << "end features" << endl;
      
      //stats(features_one_pixel);
    }
  }
  
  
  //cout <<  "num_vec= " << num_vec << endl;
  //getchar();
  return mean_current_frame/num_vec;
}







inline	
void
clustering_means::showing_summary(uvec vec_summ)
{
  ///Showing summary
  cv::namedWindow("Image", CV_WINDOW_AUTOSIZE );
  cv::Mat show_frame;
  cout << "ni: " << ni << endl;
  for (uword i = 0; i < vec_summ.n_elem; ++i )
  {
    double ini_frame = ( vec_summ(i)  -1 + 1)*ni + 1 + 1; // I add 1 as vec_summ starts in zero
    cout << "ini_frame : " << ini_frame << endl;
    cout << "vec_summ(i) " << vec_summ(i) + 1 << endl;
    
    
    for (int j = 0; j < ni; ++j )
    {
      std::stringstream tmp_name;
      tmp_name << video_path << "/"<<frames_list(ini_frame + j - 1, 0);
      
      //cout << tmp_name.str() << endl;
      //getchar();
      show_frame = cv::imread(tmp_name.str(), CV_LOAD_IMAGE_GRAYSCALE);
      resize(show_frame, show_frame, cv::Size(240,160 ) );
      
      cv::imshow("Image", show_frame);
      cv::moveWindow("Image", 10, 50);
      cv::waitKey(80);
    }
  }
}


inline
void
clustering_means::show_partitions()
{
  umat part_today;
  part_today.load("part_Dec17.dat");
  
 uvec counts_today;
  counts_today.load("counts_Dec17.dat");
  
  uvec tmp_part;
  
  for (uword i = 0; i < part_today.n_cols; ++i)
  {
    tmp_part = part_today.col(i);
    tmp_part.resize(counts_today(i));
    cout << "Partition " << i << endl;
    tmp_part.t().print("tmp_part");
    getchar();
    showing_summary(tmp_part);
  }
}

