#include <stdio.h>
#include <opencv2/opencv.hpp>
#include <fstream>
#include <iostream>
#include <armadillo>
#include <iomanip>
#include <vector>

using namespace std;
using namespace arma;

#include "optflow_feat_def.hpp"
#include "optflow_feat_impl.hpp"



uword ro = 120;
uword co = 160;


///Run this once, then load



uvec
rand_split(int run)
{
 
  arma_rng::set_seed_random();
  uvec v1 = linspace<uvec>(1,25,25);
  uvec v2 = shuffle(v1);

  //v2.t().print("v2:");
  std::stringstream run_pos;
  run_pos << "./run" << run << "/rand_selection_run"<< run << ".dat";
  
  v2.save( run_pos.str() ,raw_ascii);

  
  return v2;
}





//home
const std::string single_path = "/media/sdb1/codes/multi-action/kth_single_action/"; 
const std::string multi_path = "/media/sdb1/codes/multi-action/kth_multi_allVideos/stitched_dataset_run1/";


//NICTA
//const std::string single_path = "/home/johanna/codes/multi-actions/kth_single_action/"; 
//const std::string multi_path = "/home/johanna/codes/multi-actions/kth_multi_allVideos/stitched_dataset_run1/"; 

//UQ @ NICTA
//const std::string single_path = "/home/johanna-uq/codes-svn/multi-actions/kth_single_action/"; 
//const std::string multi_path = "/home/johanna-uq/codes-svn/multi-actions/kth_multi_allVideos/stitched_dataset_run1/";


const std::string  actionNames = "actionNames.txt";



int
main(int argc, char** argv)
{
  
  
  int run = 3;
  int N_cent = 1024;
  
  //To genarate data split: Training and Testing
  //uvec rand_videos = rand_split(run);
  
  
  //If already generate, load the file:
  uvec rand_videos;
  std::stringstream run_pos;
  run_pos << "./run" << run << "/rand_selection_run"<< run << ".dat";
  rand_videos.load( run_pos.str() );
  
  //rand_videos.t().print("rand_pos");
  //getchar();
  
  
  
  uvec peo_train = rand_videos.subvec (0,15);
  uvec peo_test  = rand_videos.subvec (16,24);
  
  
  
  opt_feat kth_optflow(single_path, multi_path, actionNames, co, ro, peo_train, peo_test, run);
  
  if(argc < 2)
  {
    cout << "usage: " << argv[0] << " video path" << endl;
    return -1;
  }
  
  
  std::string video_name = argv[1];
  cout << "Doing for video " << video_name << endl;
  kth_optflow.feature_video(video_name);
  
  
  //run just once
  //kth_optflow.create_data_list();
  
  
  /*
   if(argc < 2)
  {
    cout << "usage: " << argv[0] << " scenario: 1,2,3 or 4 ??" << endl;
    return -1;
  }
  
  int sc = atoi(argv[1]);
  cout << "Doing for scenario " << sc << endl;
  //getchar();
  
  
  kth_optflow.features_per_action_training(sc);
  kth_optflow.create_gmm_action(N_cent, sc);
  
  
  //for (uword i=1; i<=4; ++i)
 // {
 //  kth_optflow.create_gmm_action(N_cent, i);
 // }
  
  */

  

  
  
  ///Testing Multi-Action
  
  //Calculating features for multi-videos:
  //kth_optflow.feature_multi_action( );
  
  /*
    if(argc < 3)
  {
    cout << "usage: " << argv[0] << " person_list.txt" << " starting scene"<< endl;
    return -1;
  }
  
  std::string person_list = argv[1];
  cout << "person_list: " << person_list << endl;

  int sc = atoi(argv[2]);;
  kth_optflow.gmm_multi_action( N_cent, person_list, sc );
*/
    
  return 0;
  
}



  /*To Calcululate and Visualise features for ONE video
  if(argc < 2)
  {
    cout << "usage: " << argv[0] << " video path" << endl;
    return -1;
  }
  
  
  std::string video_name = argv[1];
  cout << "Doing for video " << video_name << endl;
  kth_optflow.feature_video(video_name);
  
  //getchar()
  */


