#include "helper/helper.h"
#include "helper/mpi_helper.h"

#include "pegasos.h"

#include <sstream>
#include <unordered_set>

void Problem::readInput(const string& fname)
{
  // scan file for number of samples, number of elements
  ifstream f1(fname.c_str());
  string line;
  int index;
  double label, value;
  char semiColon;
  int count = 0, n_element = 0;
  while (getline(f1, line)) {
    stringstream ss(line);
    string tk;
    if (ss >> label) { 
      count++;
      if (count % 100000 == 0) MSG("count=" << count);
      while (ss >> index) {
	semiColon = ' ';
	ss >> semiColon;
	if (semiColon == ':' && ss >> value)
	  n_element++;
      }
      n_element++; // mark -1 for end of feature vector
    }
  }
  f1.close();
  MSG("count=" << count << " n_ele=" << n_element);

  // read file again  
  f1.open(fname.c_str());
  xStore.reserve(n_element); // allocate memory
  xStart.reserve(count);
  y.reserve(count);
  xStore.clear();
  xStart.clear();
  y.clear();

  count = 0;
  n_element = 0;
  this->n = 0;

  while (getline(f1, line)) {
    stringstream ss(line);
    string tk;
    if (ss >> label) { 
      y.push_back(label);
      count++;
      xStart.push_back(n_element);
      //cout << y.back();
      if (count % 100000 == 0) MSG("count=" << count << " n=" << this->n);
      while (ss >> index) {
	semiColon = ' ';
	ss >> semiColon;
	if (semiColon == ':' && ss >> value) {
	  xStore.push_back(FeatureNode(index, value));
	  n_element++;	  
	  this->n = max(this->n, index+1);
	  //cout << " " << xStore.back().index << ":" << xStore.back().value;
	}
      }
      xStore.push_back(FeatureNode(-1,0));
      n_element++; // mark -1 for end of feature vector      
      //cout << endl;
    }
  }
  MSG("count=" << count << " n_ele=" << n_element << " n=" << this->n);
}

double LinearModel::output(const Problem& problem, int i) const
{
  int p = problem.xStart[i], n = SIZE(w);
  double s = bias;
  while (problem.xStore[p].index != -1 && problem.xStore[p].index < n) {
    s += w[problem.xStore[p].index]*problem.xStore[p].value;
    p++;
  }
  return s;
}

void LinearModel::save(const string& fname) const
{
  ofstream f(fname.c_str());
  f << SIZE(w) << endl << bias << endl;
  FORE(i, 0, SIZE(w)) f << w[i] << endl;
}

void LinearModel::load(const string& fname) 
{
  ifstream f(fname.c_str());
  int n;
  f >> n >> bias;
  w = vector<double>(n, 0);
  FORE(i, 0, n) f >> w[i];
}

void Pegasos::doTrain() 
{
  REQUIRE_PARAM("input");
  problem.readInput(vm["input"].as<string>());
  train();
  model.save(vm["model"].as<string>());
}

void Pegasos::train()
{
  lambda = vm["lambda"].as<double>();
  k = vm["sample"].as<int>();
  max_iter = int(vm["epoch"].as<double>()*problem.size());
  verbose = vm["verbose"].as<int>();
  int n_report = vm["report"].as<int>();

  model.w = vector<double>(problem.n, 0.0);
  model.bias = 0.0;

  vector<double> avgW(problem.n,0.0);
  double avgBias = 0.0;

  int problem_size = SIZE(problem);
  int n = SIZE(model.w), avgSize = max_iter / 2;
  FORE(iter, 0, max_iter) {
    double eta_t = 1.0 / lambda / (iter+1);
    
    vector<int> sample;
    sample.reserve(k);
    FORE(s, 0, k) { // pick s sample
      int i = rand() % problem_size;
      double output = problem.y[i]*model.output(problem, i);
      if (output < 1) sample.push_back(i);
    }

    double eta_t_lambda = 1.0 - eta_t * lambda;
    FORE(i,0,n) model.w[i] *= eta_t_lambda;

    double eta_t_k = eta_t / k;
    FORE(s, 0, SIZE(sample)) {
      int i = sample[s];
      int p = problem.xStart[i];
      double label = problem.y[i];
      while (problem.xStore[p].index != -1 && problem.xStore[p].index < n) {
	model.w[problem.xStore[p].index] += eta_t_k * label*problem.xStore[p].value;
	p++;
      }
      model.bias += eta_t_k * label;
    }
    
    if (iter + avgSize >= max_iter) {
      FORE(i,0,n) avgW[i] += model.w[i] / avgSize;
      avgBias += model.bias / avgSize;
    }

    if (verbose && iter % (max_iter/n_report) == 0 || iter == max_iter-1) {
      if (verbose == 1) {
	MSG("iter=" << iter/* << " nnz=" << SIZE(nz)*/);
      } else if (verbose == 2) {
	double f = computeObjective();
	MSG("iter=" << iter/* << " nnz=" << SIZE(nz)*/ << " f=" << f);
      }
    }
  }
  model.w = avgW;
  model.bias = avgBias;
  //double f = computeObjective();
  //MSG("f=" << f);
}

double Pegasos::computeObjective() const
{
  int problem_size = SIZE(problem);
  int n = SIZE(model.w);
  double lambda = vm["lambda"].as<double>();
  double reg = 0, loss = 0;

  FORE(i, 0, n) reg += model.w[i]*model.w[i];
  reg *= lambda;

  FORE(sample, 0, problem_size) {
    double output = problem.y[sample]*model.output(problem, sample);
    loss += (output < 1 ? 1-output : 0);
  }
  loss /= problem_size;

  return loss+reg;
}

void Pegasos::doTest() 
{
  REQUIRE_PARAM("input");
  problem.readInput(vm["input"].as<string>());
  model.load(vm["model"].as<string>());
  ofstream out(vm["output"].as<string>().c_str());

  int problem_size = SIZE(problem);
  double n_correct = 0;

  FORE(sample, 0, problem_size) {
    double output = model.output(problem, sample);
    n_correct += (problem.y[sample]*output >= 0 ? 1 : 0);
    out << (output < 0 ? "-1" : "+1") << endl; 
  }

  MSG("Accuracy = " << n_correct / problem_size * 100 << "\%");
}

void Pegasos::doCrossValidation() 
{
}

