//      blf.cc
//      
//      Copyright 2012 tqlong <tqlong@espada>
//      
//      This program is free software; you can redistribute it and/or modify
//      it under the terms of the GNU General Public License as published by
//      the Free Software Foundation; either version 2 of the License, or
//      (at your option) any later version.
//      
//      This program is distributed in the hope that it will be useful,
//      but WITHOUT ANY WARRANTY; without even the implied warranty of
//      MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
//      GNU General Public License for more details.
//      
//      You should have received a copy of the GNU General Public License
//      along with this program; if not, write to the Free Software
//      Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
//      MA 02110-1301, USA.

#include "blf.h"

// ================  LinearRegression  ====================
LinearRegression::LinearRegression(MatrixData& data, boost::program_options::variables_map& vm) 
  : data_(data), vm_(vm) 
{
  w_ = arma::zeros<arma::vec>(data_.dim());
}

void LinearRegression::solve(bool use_bias) 
{
  /*
    w_ = arma::solve(data_.data(), data_.label());
    MSG("arma::solve w=" << w_);
	
    w_ = arma::zeros<arma::vec>(data_.dim());
    gradient_descent<LinearRegression>(vm_, *this);
    MSG("grad_descent w=" << w_);
  */
  w_ = arma::zeros<arma::vec>(data_.dim());
  bfgs<LinearRegression>(vm_, *this);
  //MSG("bfgs w=" << w_);
	
  if (vm_.count("debug")) {
    print(vm_["debug"].as<std::string>());		
  }
}

LinearRegression::Param LinearRegression::get_param() const 
{
  return w_;
}

void LinearRegression::set_param(const Param& w)
{
  w_ = w;
}

LinearRegression::Output LinearRegression::output(LinearRegression::Data& test) const
{
  return test.data()*w_;
}

void LinearRegression::print(const std::string& outputFile) const
{
  FileAccessWrite(out, outputFile);
  for (int i = 0; i < sz(w_); i++)
    out << w_(i) << " ";
  out << ENDL;
  out.close();
}

void LinearRegression::print_stats(std::ostream& output, const ClassPerf& cp)
{
  output << cp.accuracy() << " " << cp.sensitivity() << " " 
	 << cp.specificity() << " " << cp.mse() << " ";
}

double LinearRegression::fval(const Param& w)
{
  double r = arma::norm(data_.label() - data_.data()*w, 2);
  return 0.5 / sz(data_) * sqr(r);
}

void LinearRegression::gradient(const Param& w, Param& g)
{
  g = -arma::trans(data_.data()) * (data_.label() - data_.data()*w) / sz(data_);
}

void LinearRegression::inverse_hessian(const Param& w, Param& h)
{
  //h = arma::ones<arma::vec>(data_.dim());
	
  h = arma::zeros<arma::vec>(data_.dim());
  for (int i = 0; i < sz(h); i++) {
    h(i) = (double)data_.size() / arma::dot(data_.f(i), data_.f(i));
  }
	
}

// ================  LogisticRegression  ====================
LogisticRegression::LogisticRegression(MatrixData& data, boost::program_options::variables_map& vm) 
  : data_(data), vm_(vm) 
{
  w_ = arma::zeros<arma::vec>(data_.dim());
}

void LogisticRegression::solve(bool use_bias) 
{
  w_ = arma::zeros<arma::vec>(data_.dim());
  use_bias_ = use_bias;
  C_ = vm_.count("C") ? vm_["C"].as<double>() : 100;
  /*
    w_ = arma::solve(data_.data(), data_.label());
    MSG("arma::solve w=" << w_);
  */
  /*		
		gradient_descent<LogisticRegression>(vm_, *this);
		MSG("grad_descent w=" << w_);
  */

  bfgs<LogisticRegression>(vm_, *this);
  //MSG("bfgs w=" << w_);
	
  if (vm_.count("debug")) {
    print(vm_["debug"].as<std::string>());		
  }
}

LogisticRegression::Param LogisticRegression::get_param() const 
{
  return w_;
}

void LogisticRegression::set_param(const Param& w)
{
  w_ = w;
}

LogisticRegression::Output LogisticRegression::output(LinearRegression::Data& test) const
{
  Output p = -test.data()*w_;
  for (int i = 0; i < sz(p); i++) p(i) = 1.0 / (1+exp(p(i)));
  return p;
}

void LogisticRegression::print(const std::string& outputFile) const
{
  FileAccessWrite(out, outputFile);
  for (int i = 0; i < sz(w_); i++)
    out << w_(i) << " ";
  out << ENDL;
  out.close();
}

void LogisticRegression::print_stats(std::ostream& output, const ClassPerf& cp)
{
  output << cp.accuracy() << " " << cp.sensitivity() << " " 
	 << cp.specificity() << " " << cp.mse() << " ";
}

double LogisticRegression::fval(const Param& w)
{
  arma::vec p = -data_.label() % (data_.data() * w);
  double f = 0;
  for (int i = 0; i < sz(p); i++)
    f += log(1+exp(p(i)));
  double n_w = 0;
  for (int i = 0; i < sz(w)-(use_bias_?1:0); i++)
    n_w += sqr(w(i));
  return C_ / sz(data_) * f + 0.5 * n_w;
}

void LogisticRegression::gradient(const Param& w, Param& g)
{
  arma::vec p = -data_.label() % (data_.data() * w);
  p = arma::exp(p);
  for (int i = 0; i < sz(p); i++)
    p(i) = -data_.y(i) * p(i) / (1+p(i));
  g = C_ / sz(data_) * arma::trans(data_.data()) * p;
	
  for (int i = 0; i < sz(w)-(use_bias_?1:0); i++)
    g(i) += 1.0;
}

void LogisticRegression::inverse_hessian(const Param& w, Param& h)
{
  h = arma::ones<arma::vec>(data_.dim());
	
  //h = arma::zeros<arma::vec>(data_.dim());
  //for (int i = 0; i < sz(h); i++) {
  //	h(i) = (double)data_.size() / arma::dot(data_.f(i), data_.f(i));
  //}
}

// ================  L2SVM  ====================
L2SVM::L2SVM(MatrixData& data, boost::program_options::variables_map& vm) 
  : data_(data), vm_(vm) 
{
  w_ = arma::zeros<arma::vec>(data_.dim());
  //MSG("data=" << data_.size() << " x " << data_.dim());
}

void L2SVM::solve(bool use_bias) 
{
	
  w_ = arma::zeros<arma::vec>(data_.dim());
  use_bias_ = use_bias;
  C_ = vm_.count("C") ? vm_["C"].as<double>() : 100;
  /*
    w_ = arma::solve(data_.data(), data_.label());
    MSG("arma::solve w=" << w_);
  */
  /*		
		gradient_descent<LogisticRegression>(vm_, *this);
		MSG("grad_descent w=" << w_);
  */

  bfgs<L2SVM>(vm_, *this);
  //MSG("bfgs w=" << w_);
	
  if (vm_.count("debug")) {
    print(vm_["debug"].as<std::string>());		
  }
}

L2SVM::Param L2SVM::get_param() const 
{
  return w_;
}

void L2SVM::set_param(const Param& w)
{
  w_ = w;
}

L2SVM::Output L2SVM::output(LinearRegression::Data& test) const
{
  return test.data()*w_;
}

void L2SVM::print(const std::string& outputFile) const
{
  FileAccessWrite(out, outputFile);
  for (int i = 0; i < sz(w_); i++)
    out << w_(i) << " ";
  out << ENDL;
  out.close();
}

void L2SVM::print_stats(std::ostream& output, const ClassPerf& cp)
{
  output << cp.accuracy() << " " << cp.sensitivity() << " " 
	 << cp.specificity() << " " << cp.mse() << " ";
}

double L2SVM::fval(const Param& w)
{
  arma::vec p = 1 - (data_.label() % (data_.data() * w));	
  for (int i = 0; i < sz(p); i++)
    if (p(i) < 0) p(i) = 0;
  double f = arma::dot(p, p);
	
  double n_w = 0;
  for (int i = 0; i < sz(w)-(use_bias_?1:0); i++)
    n_w += sqr(w(i));

  return C_ / sz(data_) * f + 0.5 * n_w;
}

void L2SVM::gradient(const Param& w, Param& g)
{
  arma::vec p = 1 - (data_.label() % (data_.data() * w));	
  for (int i = 0; i < sz(p); i++)
    if (p(i) < 0) p(i) = 0;

  g = C_ / sz(data_) * arma::trans(data_.data()) * (-data_.label() % p);
	
  for (int i = 0; i < sz(w)-(use_bias_?1:0); i++)
    g(i) += 1.0;
}

void L2SVM::inverse_hessian(const Param& w, Param& h)
{
  h = arma::ones<arma::vec>(data_.dim());
	
  //h = arma::zeros<arma::vec>(data_.dim());
  //for (int i = 0; i < sz(h); i++) {
  //	h(i) = (double)data_.size() / arma::dot(data_.f(i), data_.f(i));
  //}	
}

// ================  L1SVM  ====================
L1SVM::L1SVM(MatrixData& data, boost::program_options::variables_map& vm) 
  : data_(data), vm_(vm) 
{
  w_ = arma::zeros<arma::vec>(data_.dim());
  //MSG("data=" << data_.size() << " x " << data_.dim());
}

void L1SVM::solve(bool use_bias) 
{
	
  w_ = arma::zeros<arma::vec>(data_.dim());
  use_bias_ = use_bias;
  C_ = vm_.count("C") ? vm_["C"].as<double>() : 100;
  mu_ = vm_.count("mu") ? vm_["mu"].as<double>() : 0.1;
  /*
    w_ = arma::solve(data_.data(), data_.label());
    MSG("arma::solve w=" << w_);
  */
  /*		
		gradient_descent<LogisticRegression>(vm_, *this);
		MSG("grad_descent w=" << w_);
  */

  bfgs<L1SVM>(vm_, *this);
  //MSG("bfgs w=" << w_);
	
  if (vm_.count("debug")) {
    print(vm_["debug"].as<std::string>());		
  }
}

L1SVM::Param L1SVM::get_param() const 
{
  return w_;
}

void L1SVM::set_param(const Param& w)
{
  w_ = w;
}

L1SVM::Output L1SVM::output(Data& test) const
{
  return test.data()*w_;
}

void L1SVM::print(const std::string& outputFile) const
{
  FileAccessWrite(out, outputFile);
  for (int i = 0; i < sz(w_); i++)
    out << w_(i) << " ";
  out << ENDL;
  out.close();
}

void L1SVM::print_stats(std::ostream& output, const ClassPerf& cp)
{
  output << cp.accuracy() << " " << cp.sensitivity() << " " 
	 << cp.specificity() << " " << cp.mse() << " ";
}

double L1SVM::fval(const Param& w)
{
  arma::vec p = (1 - (data_.label() % (data_.data() * w))) / mu_;	
  double f = 0;
  for (int i = 0; i < sz(p); i++)
    if (p(i) < 0) {
      p(i) = 0;			
    }
    else if (p(i) > 1) {
      p(i) = 1;
      f += p(i)*mu_ - 0.5*mu_;
    }
    else {
      f += 0.5*sqr(p(i))*mu_;
    }
	
  double n_w = 0;
  for (int i = 0; i < sz(w)-(use_bias_?1:0); i++)
    n_w += sqr(w(i));

  return C_ / sz(data_) * f + 0.5 * n_w;
}

void L1SVM::gradient(const Param& w, Param& g)
{
  arma::vec p = (1 - (data_.label() % (data_.data() * w))) / mu_;	
  for (int i = 0; i < sz(p); i++)
    if (p(i) < 0) p(i) = 0;
    else if (p(i) > 1) p(i) = 1;

  g = C_ / sz(data_) * arma::trans(data_.data()) * (-data_.label() % p);
	
  for (int i = 0; i < sz(w)-(use_bias_?1:0); i++)
    g(i) += 1.0;
}

void L1SVM::inverse_hessian(const Param& w, Param& h)
{
  h = arma::ones<arma::vec>(data_.dim());
	
  //h = arma::zeros<arma::vec>(data_.dim());
  //for (int i = 0; i < sz(h); i++) {
  //	h(i) = (double)data_.size() / arma::dot(data_.f(i), data_.f(i));
  //}	
}
