//      feature_main.cc
//      
//      Copyright 2012 tqlong <tqlong@espada>
//      
//      This program is free software; you can redistribute it and/or modify
//      it under the terms of the GNU General Public License as published by
//      the Free Software Foundation; either version 2 of the License, or
//      (at your option) any later version.
//      
//      This program is distributed in the hope that it will be useful,
//      but WITHOUT ANY WARRANTY; without even the implied warranty of
//      MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
//      GNU General Public License for more details.
//      
//      You should have received a copy of the GNU General Public License
//      along with this program; if not, write to the Free Software
//      Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
//      MA 02110-1301, USA.

#include <iostream>
#include <boost/program_options.hpp>
#include <boost/date_time/posix_time/posix_time.hpp> 
#include <boost/accumulators/accumulators.hpp>
#include <boost/accumulators/statistics/stats.hpp>
#include <boost/accumulators/statistics/mean.hpp>
#include <boost/foreach.hpp> 
#include <map>

#include "stl-helper.h"
#include "data.h"

#include "mp.h"
#include "blf.h"
#include "svm.h"

namespace po = boost::program_options;
namespace boost_acc = boost::accumulators;
po::options_description desc("Allowed options");
po::variables_map vm;

void process_options(int argc, char** argv);
void usage(int exitcode = 0);

void read_data_test();
void prepare_test();
void mp_sqloss_test();
void mp_logloss_test();
void mp_sqhingeloss_test();
void mp_smoothhingeloss_test();
void scale_test();
void blf_test();

int main(int argc, char** argv)
{
  //MSG("Matching Pursuit for General Loss ... ");

  process_options(argc, argv);

  boost::posix_time::ptime time_start(boost::posix_time::microsec_clock::local_time());

  if (vm.count("help"))
    usage();
  else if (vm.count("read_data"))
    read_data_test();
  else if (vm.count("prepare_test"))
    prepare_test();
  else if (vm.count("mp_sqloss"))
    mp_sqloss_test();
  else if (vm.count("mp_logloss"))
    mp_logloss_test();
  else if (vm.count("mp_sqhingeloss"))
    mp_sqhingeloss_test();
  else if (vm.count("mp_smoothhingeloss"))
    mp_smoothhingeloss_test();
  else if (vm.count("scale"))
    scale_test();
  else if (vm.count("blf"))
    blf_test();
  else
    usage();

  boost::posix_time::ptime time_end(boost::posix_time::microsec_clock::local_time());
  boost::posix_time::time_duration duration(time_end - time_start);
  MSG("Duration = " << duration);
}

void process_options(int argc, char** argv)
{
  desc.add_options()
    ("help", "produce this usage")
    ("input", po::value<std::string>(), "input file")
    ("test", po::value<std::string>(), "test file")
    ("output", po::value<std::string>(), "output file")
    ("debug", po::value<std::string>(), "debug file")
    ("seed", "random seed (REQUIRED)")
    ("fold", po::value<int>(), "number of folds")
    ("loss", po::value<std::string>(), "loss function (sq, log, smhinge, sqhinge")
    ("verbose", "print debug information")

    // OPTIMIZATION param
    ("optim_iter", po::value<double>(), "maximum number of optimization iterations")
    ("optim_tol", po::value<double>(), "optimization tolerance")
    ("optim_mem", po::value<int>(), "L-BFGS memory")
    ("kernel", po::value<std::string>(), "Kernel cache maximum number of entries")
    ("kernel_cache", po::value<int>(), "Kernel cache maximum number of entries")

    // PARAMETERS
    ("mu", po::value<double>(), "the approximation parameter of smooth hinge loss")
    ("C", po::value<double>(), "loss penalty of SVM")
    ("last_label", "label at end of line")

    //JOB TYPE
    ("read_data", "read data file\n--input=<DATA FILE> --output=<OUTPUT FILE> --fold=<# folds>")
    ("prepare_test", "prepare test indices\n--input=<DATA FILE> --output=<OUTPUT FILE> --fold=<# folds>")
    ("mp_sqloss", "matching pursuit with squared loss\n--input=<DATA FILE> --output=<OUTPUT FILE>")
    ("mp_logloss", "matching pursuit with logistic loss\n--input=<DATA FILE> --output=<OUTPUT FILE>")
    ("mp_sqhingeloss", "matching pursuit with squared hinge loss\n--input=<DATA FILE> --output=<OUTPUT FILE>")
    ("mp_smoothhingeloss", "matching pursuit with smoothed hinge loss\n--input=<DATA FILE> --output=<OUTPUT FILE> --mu=<APPROX PARAM>")
    ("scale", "normalized data before matching pursuit\n--input=<DATA FILE> --output=<OUTPUT FILE> --mu=<APPROX PARAM>")
    ("blf", "boosted latent features\n--loss=<LOSS FUNCTION> --input=<DATA FILE> --test=<TEST IDX FILE> --output=<OUTPUT FILE> --debug=<DEBUG FILE> --C=<LOSS PENALTY>")
    ;
  po::store(po::parse_command_line(argc, argv, desc), vm);
  po::notify(vm);
}

void usage(int exitcode)
{
  std::cout << desc << std::endl;
  exit(exitcode);
}

void read_data_test()
{
  std::string inputFile = vm["input"].as<std::string>();
  std::string outputFile = vm["output"].as<std::string>();

  MatrixData m;
  TextReader txt;

  /*
    txt.label_map_["L-1"] = -1;
    txt.label_map_["L1"] = 1;
    txt.label_map_["L1"] = 1;
    txt.delimiter_ = ":,";  
    txt.read(inputFile, m);
  */

  txt.label_map_["0"] = -1;
  txt.label_map_["1"] = 1;
  txt.delimiter_ = ":, \t\r\n";
  txt.read_last_label(inputFile, m);

  m.save(outputFile);
}

void mp_sqloss_test()
{
  std::string inputFile = vm["input"].as<std::string>();
  std::string outputFile = vm["output"].as<std::string>();

  MatrixData m;
  TextReader txt;

  txt.default_label_map();
  txt.read(inputFile, m);

  MP_SqLoss mp(m);
  if (vm.count("debug"))
    mp.debugFile_ = vm["debug"].as<std::string>();
  mp.solve();
  mp.print_result(outputFile);
}

void mp_logloss_test()
{
  std::string inputFile = vm["input"].as<std::string>();
  std::string outputFile = vm["output"].as<std::string>();

  MatrixData m, norm_m;
  TextReader txt;

  txt.default_label_map();
  txt.read(inputFile, m);

  arma::rowvec scale, offset;
  m.get_norm_scale(scale, offset);
  //MSG("scale=" << scale << " offset=" << offset);
  
  m.apply_norm_scale(scale, offset, norm_m);
  norm_m.get_norm_scale(scale, offset);  
  //MSG("new scale=" << scale << " offset=" << offset);

  
  MP_LogLoss mp(norm_m, vm);
  if (vm.count("debug"))
    mp.debugFile_ = vm["debug"].as<std::string>();
  mp.solve();
  mp.print_result(outputFile);

}

void mp_sqhingeloss_test()
{
  std::string inputFile = vm["input"].as<std::string>();
  std::string outputFile = vm["output"].as<std::string>();

  MatrixData m, norm_m;
  TextReader txt;

  txt.default_label_map();
  txt.read(inputFile, m);

  arma::rowvec scale, offset;
  m.get_norm_scale(scale, offset);
  //MSG("scale=" << scale << " offset=" << offset);
  
  m.apply_norm_scale(scale, offset, norm_m);
  
  MP_SqHingeLoss mp(norm_m, vm);
  if (vm.count("debug"))
    mp.debugFile_ = vm["debug"].as<std::string>();
  mp.solve();
  mp.print_result(outputFile);

}

void mp_smoothhingeloss_test()
{
  std::string inputFile = vm["input"].as<std::string>();
  std::string outputFile = vm["output"].as<std::string>();

  MatrixData m, norm_m;
  TextReader txt;

  txt.default_label_map();
  txt.read(inputFile, m);

  arma::rowvec scale, offset;
  m.get_norm_scale(scale, offset);
  //MSG("scale=" << scale << " offset=" << offset);
  
  m.apply_norm_scale(scale, offset, norm_m);
  
  MP_SmoothHingeLoss mp(norm_m, vm);
  if (vm.count("debug"))
    mp.debugFile_ = vm["debug"].as<std::string>();
  mp.solve();
  mp.print_result(outputFile);
}

void scale_test()
{
  std::string inputFile = vm["input"].as<std::string>();
  std::string outputFile = vm["output"].as<std::string>();

  MatrixData m;
  TextReader txt;

  txt.default_label_map();
  txt.read(inputFile, m);

  vi_t rand = RAND::randperm(sz(m));
  int n_fold = 10;
  int n = sz(m);

  int n_mu = 10;
  arma::vec mu = arma::zeros<arma::vec>(n_mu+1);
  mu(0) = 0.1;
  for (int i = 1; i <= n_mu; i++) mu(i) = mu(i-1) + 1;
  MSG("mu=" << mu);
  FileAccessWrite(out, outputFile);
  out.close();
  for (int i = 0; i < sz(mu); i++) {
    MSG("-----------**** mu =  " << mu(i) << " ****------------------------------------");
    ClassPerf cp, cp_all;

    boost_acc::accumulator_set<double, boost_acc::stats<boost_acc::tag::mean> > acc;
    for (int f = 0; f < n_fold; f++) {
      MSG("----------- fold " << f << " ------------------------------------");
      vi_t test_idx(rand.begin()+f*(n/n_fold), rand.begin()+(f+1)*(n/n_fold));
      MatrixData train, test;
      m.split_train_test(test_idx, train, test, true);
      
      MP_SmoothHingeLoss mp(train, vm);
      mp.mu_ = mu(i);
      mp.solve();
      arma::vec class_out = mp.classify(test);
      cp.add(test.label(), class_out);
      //MSG("out=" << class_out << " label = " << test.label());
      acc(sz(mp.selected_feature()));
			
      MP_SmoothHingeLoss mp_all(train, vm);
      mp_all.mu_ = mu(i);
      mp_all.solve_all_feature();
      class_out = mp_all.classify(test);
      cp_all.add(test.label(), class_out);
    }
    FileAccessAppend(out, outputFile);
    out << mu(i) << " " << boost_acc::mean(acc) << " " 
	<< cp.accuracy() << " " << cp.sensitivity() 
	<< " " << cp.specificity() << " " << cp_all.accuracy() << ENDL;
    out.close();
  }
}

void prepare_test()
{
  std::string inputFile = vm["input"].as<std::string>();
  std::string outputFile = vm["output"].as<std::string>();
  int n_fold = vm["fold"].as<int>();

  MatrixData m;
  TextReader txt;

  txt.default_label_map();
  txt.read(inputFile, m);

  // seeding
  //if (vm.count("seed"));
  //	srand(rand());
  srand(time(NULL));
	
  vi_t rand = RAND::randperm(sz(m));
  int n = sz(m);
	
  for (int f = 0; f < n_fold; f++) {
    //MSG("----------- fold " << f << " ------------------------------------");
    vi_t test_idx(rand.begin()+f*(n/n_fold), rand.begin()+(f+1)*(n/n_fold));
    std::stringstream f_s;
    f_s << f;
    MatrixData::save_idx(outputFile+"."+f_s.str(), test_idx);
  }
}

void blf_test()
{
  std::string inputFile = vm["input"].as<std::string>();
  std::string outputFile = vm["output"].as<std::string>();
  std::string testFile = vm["test"].as<std::string>();
  std::string lossFunc = vm["loss"].as<std::string>();

  MatrixData m;
  TextReader txt;

  txt.default_label_map();
  txt.read(inputFile, m, true);  // read and add bias term

  vi_t test_idx;
  MatrixData::load_idx(testFile, test_idx);
  //MSG("test file=" << testFile << " test_size=" << sz(test_idx));
	
  MatrixData train, test;
  m.split_train_test(test_idx, train, test, true); // split and normalize
	
  ClassPerf cp;
  arma::vec out, class_out;
  if (lossFunc == "sq")
    train_test<LinearRegression>(vm, train, test, out, class_out);
  else if (lossFunc == "log")
    train_test<LogisticRegression>(vm, train, test, out, class_out);
  else if (lossFunc == "sqhinge")
    train_test<L2SVM>(vm, train, test, out, class_out);
  else if (lossFunc == "hinge")
    train_test<L1SVM>(vm, train, test, out, class_out);
  else if (lossFunc == "l1svm") {
    Kernel K(train, vm);
    KernelL1SVM m(train, vm, K);
    m.solve();
    class_out = out = m.output(test);
    for (int i = 0; i < sz(out); i++) class_out(i) = (out(i) < m.threshold()) ? -1 : 1;    
  }
  else
    FATAL_ERROR("check loss function lossFunc=" << lossFunc);
		
  cp.add(test.label(), class_out);
  //MSG(vm, "out="<<out << "class_out="<<class_out<<"truth="<<test.label());
	
  FileAccessWrite(output, outputFile);
  output << cp.accuracy() << " " << cp.sensitivity() << " " 
	 << cp.specificity() << " " << cp.mse() << " ";
  output.close();
}
