//      blf.h
//      
//      Copyright 2012 tqlong <tqlong@espada>
//      
//      This program is free software; you can redistribute it and/or modify
//      it under the terms of the GNU General Public License as published by
//      the Free Software Foundation; either version 2 of the License, or
//      (at your option) any later version.
//      
//      This program is distributed in the hope that it will be useful,
//      but WITHOUT ANY WARRANTY; without even the implied warranty of
//      MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
//      GNU General Public License for more details.
//      
//      You should have received a copy of the GNU General Public License
//      along with this program; if not, write to the Free Software
//      Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
//      MA 02110-1301, USA.

#ifndef __BOOSTED_LATENT_FEATURE_H
#define __BOOSTED_LATENT_FEATURE_H

#include <set>
#include <boost/program_options.hpp>
#include "stl-helper.h"
#include "data.h"

class LinearRegression
{
 public:
  typedef MatrixData Data;
  typedef arma::vec Param;
  typedef arma::vec Output;
 protected:
  Data& data_;
  boost::program_options::variables_map& vm_;
  Param w_;
 public:
  LinearRegression(MatrixData& data, boost::program_options::variables_map& vm) ;
	
  void solve(bool use_bias = true) ;
  Output output(Data& test) const;
  double threshold() const { return 0.0; }
	
  Param get_param() const ;
  void set_param(const Param& w);	
	
  // for optimization
  double fval(const Param& w);
  void gradient(const Param& w, Param& g);
  void inverse_hessian(const Param& w, Param& h);
	
  void print(const std::string& outputFile) const;
	
  static void print_stats(std::ostream& output, const ClassPerf& cp);
};

class LogisticRegression
{
 public:
  typedef MatrixData Data;
  typedef arma::vec Param;
  typedef arma::vec Output;
 protected:
  Data& data_;
  boost::program_options::variables_map& vm_;
  Param w_;
  bool use_bias_;
  double C_;
 public:
  LogisticRegression(MatrixData& data, boost::program_options::variables_map& vm) ;
	
  void solve(bool use_bias = true) ;
  Output output(Data& test) const;
  double threshold() const { return 0.5; }
	
  Param get_param() const ;
  void set_param(const Param& w);	
	
  // for optimization
  double fval(const Param& w);
  void gradient(const Param& w, Param& g);
  void inverse_hessian(const Param& w, Param& h);
	
  void print(const std::string& outputFile) const;
	
  static void print_stats(std::ostream& output, const ClassPerf& cp);
};

class L2SVM
{
 public:
  typedef MatrixData Data;
  typedef arma::vec Param;
  typedef arma::vec Output;
 protected:
  Data& data_;
  boost::program_options::variables_map& vm_;
  Param w_;
  bool use_bias_;
  double C_;
 public:
  L2SVM(MatrixData& data, boost::program_options::variables_map& vm) ;
	
  void solve(bool use_bias = true) ;
  Output output(Data& test) const;
  double threshold() const { return 0.0; }
	
  Param get_param() const ;
  void set_param(const Param& w);	
	
  // for optimization
  double fval(const Param& w);
  void gradient(const Param& w, Param& g);
  void inverse_hessian(const Param& w, Param& h);
	
  void print(const std::string& outputFile) const;
	
  static void print_stats(std::ostream& output, const ClassPerf& cp);
};

class L1SVM
{
 public:
  typedef MatrixData Data;
  typedef arma::vec Param;
  typedef arma::vec Output;
 protected:
  Data& data_;
  boost::program_options::variables_map& vm_;
  Param w_;
  bool use_bias_;
  double C_;
  double mu_;
 public:
  L1SVM(MatrixData& data, boost::program_options::variables_map& vm) ;
	
  void solve(bool use_bias = true) ;
  Output output(Data& test) const;
  double threshold() const { return 0.0; }
	
  Param get_param() const ;
  void set_param(const Param& w);	
	
  // for optimization
  double fval(const Param& w);
  void gradient(const Param& w, Param& g);
  void inverse_hessian(const Param& w, Param& h);
	
  void print(const std::string& outputFile) const;
	
  static void print_stats(std::ostream& output, const ClassPerf& cp);
};

template<typename Model>
void train_test(boost::program_options::variables_map& vm, 
		typename Model::Data& train, typename Model::Data& test, 
		typename Model::Output& out, typename Model::Output& class_out)
{
  Model m(train, vm);
  m.solve();
  class_out = out = m.output(test);
  for (int i = 0; i < sz(out); i++) class_out(i) = (out(i) < m.threshold()) ? -1 : 1;
}

template<typename Model>
void train_save(boost::program_options::variables_map& vm, 
		typename Model::Data& train, const std::string& outputFile)
{
  Model m(train, vm);
  m.solve();
  m.print(outputFile);
}

template<typename Model>
void gradient_descent(boost::program_options::variables_map& vm, Model& m)
{
  typedef typename Model::Param Param;
  Param x = m.get_param();
  double f = m.fval(x);
	
  int max_iter = vm.count("optim_iter") ? vm["optim_iter"].as<int>() : 10000;
  double tol = vm.count("optim_tol") ? vm["optim_tol"].as<double>() : 1e-16;

  for (int iter = 0; iter < max_iter; iter++) {
    Param g;
    m.gradient(x, g);
		
    double alpha = 1;
    for (; alpha >= tol; alpha *= 0.5) {
      Param xa = x - alpha * g;
      double fa = m.fval(xa);
      if (fa < f) {
	f = fa;
	x = xa;
	break;
      }
    }
    MSG_VERBOSE(vm, "check " << iter << " " << alpha << " " << arma::norm(g, 2) << " f=" << f);
    if (alpha < tol || arma::norm(g, 2) < tol) break;
  }
  m.set_param(x);
}

template<typename Param>
void two_loop_recurse(const Param& g, int mem_idx, 
		      const std::vector<Param>& y, 
		      const std::vector<Param>& s, 
		      const std::vector<double>& sigma, 
		      const Param& h0, 
		      Param& d)
{
  Param q = g;
  std::vector<double> alpha = sigma;
  int mem = sz(sigma), current = mem_idx;
  for (int i = 0; i < mem; i++) {
    alpha[current] = arma::dot(s[current], q) / sigma[current];
    q = q - alpha[current] * y[current];
    current = (current > 0) ? current - 1 : mem-1;
  }
	
  d = h0 % q;
	
  current = (current < mem-1) ? current + 1 : 0;
  for (int i = 0; i < mem; i++) {
    double beta = arma::dot(y[current], d) / sigma[current];
    d = d + (alpha[current] - beta) * s[current];
    current = (current < mem-1) ? current + 1 : 0;
  }
}

template<typename Model>
void bfgs(boost::program_options::variables_map& vm, Model& m)
{
  typedef typename Model::Param Param;
  Param x = m.get_param(), g, prev_x, prev_g;
  double f = m.fval(x);
  m.gradient(x, g);
  prev_x = x;
  prev_g = g;
	
  int max_iter = vm.count("optim_iter") ? vm["optim_iter"].as<int>() : 10000;
  double tol = vm.count("optim_tol") ? vm["optim_tol"].as<double>() : 1e-16;
  int mem = vm.count("optim_mem") ? vm["optim_mem"].as<int>() : 10;

  std::vector<Param> y, s;
  std::vector<double> sigma;
	
  // gradient descent to initialize memory
  bool stop = false;
  int iter = 0;
  for (iter = 0; iter < mem; iter++) {
    double alpha = 1;
    for (; alpha >= tol; alpha *= 0.5) {
      Param xa = x - alpha * g;
      double fa = m.fval(xa);
      if (fa < f) {
	f = fa;
	x = xa;
	break;
      }
    }
		
    m.gradient(x, g);
    s.pb(x - prev_x);
    y.pb(g - prev_g);
    sigma.pb(arma::dot(y.back(), s.back()));
    prev_x = x;
    prev_g = g;
		
    MSG_VERBOSE(vm, "check iter=" << iter << " " << alpha << " " << arma::norm(g, 2) << " " << fabs(sigma.back()) << " f=" << f);
    if (alpha < tol || arma::norm(g, 2) < tol || sqrt(fabs(sigma.back())) < tol) stop = true;
  }
	
  if (stop) {
    m.set_param(x);
    //MSG("break before BFGS iter = " << iter);
    return;
  }

  // BFGS main loop
  int mem_idx = mem - 1;
  for (iter = mem; iter < max_iter && !stop; iter++) {
    arma::vec d, h0;
    m.inverse_hessian(x, h0);
    two_loop_recurse<Param>(g, mem_idx, y, s, sigma, h0, d);

    //if (arma::dot(g, d) < 0) d = -d;
    double alpha = 1;
    for (; alpha >= tol; alpha *= 0.5) {
      Param xa = x - alpha * d;
      double fa = m.fval(xa);
      if (fa < f) {
	f = fa;
	x = xa;
	break;
      }
    }

    // step too small, try gradient descent
    if (alpha < tol) {
      alpha = 1;
      for (; alpha >= tol; alpha *= 0.5) {
	Param xa = x - alpha * g;
	double fa = m.fval(xa);
	if (fa < f) {
	  f = fa;
	  x = xa;
	  break;
	}
      }
    }
		
    m.gradient(x, g);
    mem_idx = (mem_idx+1)%mem;
    s[mem_idx] = x - prev_x;
    y[mem_idx] = g - prev_g;
    sigma[mem_idx] = arma::dot(y.back(), s.back());
    prev_x = x;
    prev_g = g;
    MSG_VERBOSE(vm, "check iter=" << iter << " " << alpha << " " << arma::norm(g, 2) << " " << fabs(sigma.back()) << " f=" << f);
    if (alpha < tol || arma::norm(g, 2) < tol || sqrt(fabs(sigma[mem_idx])) < tol) stop = true;
  }
	
  m.set_param(x);
}

#endif
