#include "helper/helper.h"
#include "helper/mpi_helper.h"

#include "parPegasos.h"

#include <sstream>
#include <set>
#include <boost/filesystem.hpp>
#include <cstdint>
#include <cassert>
#include <cstring>
#include <functional>
#include <algorithm>
namespace fs = boost::filesystem;

void Problem::readInput(const string& fname)
{
  int64_t fileSize = fs::file_size(fname), localSize = fileSize / MPI_SIZE;
  int64_t startPos = localSize * MPI_RANK;
  int64_t endPos = MPI_RANK < MPI_SIZE-1 ? startPos + localSize : fileSize;
  string skip_buf;
  //MSG(MPI_RANK << " 1 s=" << startPos << " e=" << endPos);

  if (MPI_RANK > 0) {
    ifstream f(fname.c_str());
    f.seekg(startPos, ios::beg);
    getline(f, skip_buf);
    startPos = f.tellg();
  }

  if (MPI_RANK < MPI_SIZE-1) {
    ifstream f(fname.c_str());
    f.seekg(endPos, ios::beg);
    getline(f, skip_buf);
    endPos = f.tellg();
    f.close();
  }
  //MSG(MPI_RANK << " 2 s=" << startPos << " e=" << endPos);

  int nRead = endPos-startPos;
  vector<char> buf;
  ifstream f(fname.c_str());
  buf.reserve(nRead+16);
  f.seekg(startPos, ios::beg);
  f.read(buf.data(), nRead);
  //MSG(MPI_RANK << " 3 read=" << f.gcount() << " n_read=" << nRead);
  assert(f.gcount() == nRead);
  *(buf.data()+nRead) = '\0'; // make it a standard string

  stringstream ss;
  ss.rdbuf()->pubsetbuf(buf.data(), nRead);
  firstPass(ss);

  stringstream ss1;
  ss1.rdbuf()->pubsetbuf(buf.data(), nRead);
  secondPass(ss1);
}

#define MSG_RANK(m) cout << MPI_RANK << " " << m << endl

void Problem::firstPass(stringstream& s) {
  string line;
  double label, value;
  int index;
  char semiColon;

  n_sample = 0;
  n_ele = 0;
  n_neg = 0; n_pos = 0;
  if (MPI_RANK == 0) cout << "first pass ... ";
  while (!s.eof()) {
    getline(s, line);
    stringstream ss(line);
    if (ss >> label) { 
      n_sample++;
      if (label > 0) n_pos += 1; else n_neg += 1;
      if (n_sample % 100000 == 0 && MPI_RANK == 0) {
	cout << n_sample << " "; cout.flush();
      }
      while (ss >> index) {
	semiColon = ' ';
	ss >> semiColon;
	if (semiColon == ':' && ss >> value)
	  n_ele++;
      }
      n_ele++; // mark -1 for end of feature vector
    }
    
    //MSG("-" << MPI_RANK << "- " << line << " -" << MPI_RANK << "-");
  }
  int sum,sum1;
  mpi::reduce(world, n_sample, sum, std::plus<double>(), 0);
  mpi::reduce(world, n_ele, sum1, std::plus<double>(), 0);
  if (MPI_RANK == 0) MSG_RANK("done first pass n_sample=" << sum << " n_ele=" << sum1);
}

void Problem::secondPass(stringstream& s) {
  string line;
  double label, value;
  int index;
  char semiColon;

  xStore.reserve(n_ele); // allocate memory
  xStart.reserve(n_sample);
  y.reserve(n_sample);
  negIdx.reserve(n_neg);
  posIdx.reserve(n_pos);
  xStore.clear();
  xStart.clear();
  y.clear();
  negIdx.clear();
  posIdx.clear();

  n_sample = 0;
  n_ele = 0;
  this->local_n = 0;

  if (MPI_RANK == 0) cout << "second pass ... ";
  n_neg = 0; n_pos = 0;
  while (!s.eof()) {
    getline(s, line);
    stringstream ss(line);
    if (ss >> label) { 
      y.push_back(label > 0 ? 1 : -1);
      xStart.push_back(n_ele);
      if (label > 0) {
	n_pos += 1; 
	posIdx.push_back(n_sample);
      }
      else {
	n_neg += 1;
	negIdx.push_back(n_sample);
      }
      n_sample++;
      if (n_sample % 100000 == 0 && MPI_RANK == 0) {
	cout << n_sample << " "; cout.flush();
      }
      while (ss >> index) {
	semiColon = ' ';
	ss >> semiColon;
	if (semiColon == ':' && ss >> value) {
	  xStore.push_back(FeatureNode(index, value));
	  local_n = max(local_n, index+1);
	  n_ele++;
	}
      }
      xStore.push_back(FeatureNode(-1, 0.0));
      n_ele++; // mark -1 for end of feature vector
    }
    
    //MSG("-" << MPI_RANK << "- " << line << " -" << MPI_RANK << "-");
  }

  mpi::all_reduce(world, n_sample, total_sample, std::plus<double>());
  mpi::all_reduce(world, n_pos, total_pos, std::plus<double>());
  mpi::all_reduce(world, n_neg, total_neg, std::plus<double>());
  mpi::all_reduce(world, n_ele, total_ele, std::plus<double>());
  mpi::all_reduce(world, local_n, n, mpi::maximum<double>());
  if (MPI_RANK == 0) MSG_RANK("done second pass n_sample=" << total_sample << " n_ele=" << total_ele << " n=" << n);
}

double LinearModel::output(const Problem& problem, int i) const
{
  int p = problem.xStart[i], n = SIZE(w);
  double s = bias;
  while (problem.xStore[p].index != -1 && problem.xStore[p].index < n) {
    s += w[problem.xStore[p].index]*problem.xStore[p].value;
    p++;
  }
  return s;
}

void LinearModel::save(const string& fname) const
{
  ofstream f(fname.c_str());
  f << SIZE(w) << endl << bias << endl;
  FORE(i, 0, SIZE(w)) {
    if (fabs(w[i]) > 1e-10) f << i << "\t" << w[i] << endl;
  }
}

void LinearModel::load(const string& fname) 
{
  ifstream f(fname.c_str());
  int n, idx;
  double val;
  f >> n >> bias;
  w = vector<double>(n, 0);
  while (f >> idx >> val) {
    w[idx] = val;
  }
}

void ParPegasos::doTrain() 
{
  REQUIRE_PARAM("input");
  problem.readInput(vm["input"].as<string>());
  countFeature();
  train();
  if (MPI_RANK == 0) avgModel.save(vm["model"].as<string>());
}

void ParPegasos::countFeature() {
  fCount = vector<int>(problem.n, 0);
  FORE(i, 0, problem.n_ele)
    if (problem.xStore[i].index != -1)
      fCount[problem.xStore[i].index]++;
}

vector<int> ParPegasos::randomTuple(int k, int n) { // TODO: stratified sampling
  // k >= 2
  vector<int> s;
  s.reserve(k);
  s.push_back( problem.posIdx[rand()%SIZE(problem.posIdx)] );
  s.push_back( problem.negIdx[rand()%SIZE(problem.negIdx)] );
  double neg = 1, pos = 1;
  FORE(i, 2, k) {
    if (pos / neg < negativeWeight) {
      s.push_back( problem.posIdx[rand()%SIZE(problem.posIdx)] );
      pos += 1;
    }
    else {
      s.push_back( problem.negIdx[rand()%SIZE(problem.negIdx)] );
      neg += 1;
    }
  }
  assert( pos >= 1 && neg >= 1 );
  return s;
}

vector<int> ParPegasos::getIndex(const vector<int>& sample) const
{
  set<int> ss;
  FORE(s,0,SIZE(sample)) {
    for (int p = problem.xStart[sample[s]]; problem.xStore[p].index != -1; p++) 
      ss.insert(problem.xStore[p].index);
  }
  return vector<int>(ss.begin(), ss.end());
}

#define MPI_TAG_READ_W 0
#define MPI_TAG_RESULT_W 1
void ParPegasos::readWfromMaster()
{
  mpi::gather(world, idx, recv_idx, 0);
   
  vector<vector<double> > send_val(MPI_SIZE);
  vector<double> val;
  vector<mpi::request> reqs2;
  if (MPI_RANK == 0) {
    FORE(p, 0, MPI_SIZE) {
      //MSG(p << " " << SIZE(recv_idx[p]));
      send_val[p].reserve(SIZE(recv_idx[p])+1);
      FORE(i, 0, SIZE(recv_idx[p])) {
	send_val[p].push_back(masterModel.w[recv_idx[p][i]]);
      }
      send_val[p].push_back(masterModel.bias);
      reqs2.push_back(world.isend(p, MPI_TAG_RESULT_W, send_val[p]));
    }
  }
  reqs2.push_back(world.irecv(0, MPI_TAG_RESULT_W, val));
  mpi::wait_all(ALL(reqs2));

  FORE(i, 0, SIZE(idx)) model.w[idx[i]] = val[i];
  model.bias = val.back(); 
  saveModel.w = val;
}

void ParPegasos::sendWtoMaster()
{
  FORE(i, 0, SIZE(idx)) saveModel.w[i] = model.w[idx[i]]-saveModel.w[i];
  saveModel.w.back() = model.bias - saveModel.w.back();

  vector<vector<double> > val;
  mpi::gather(world, saveModel.w, val, 0);
  if (MPI_RANK == 0) {
    FORE(p, 0, MPI_SIZE) {
      FORE(i, 0, SIZE(recv_idx[p])) {
	masterModel.w[recv_idx[p][i]] += val[p][i];
	if (isAvg) avgModel.w[recv_idx[p][i]] += masterModel.w[recv_idx[p][i]] / avgSize;
      }
      masterModel.bias += val[p].back();
      if (isAvg) avgModel.bias += masterModel.bias / avgSize;
    }
  }
}

void ParPegasos::train()
{
  lambda = vm["lambda"].as<double>();
  k = vm["sample"].as<int>();
  max_iter = vm.count("iter") ? vm["iter"].as<int>() : int(vm["epoch"].as<double>()*problem.total_sample);
  verbose = vm["verbose"].as<int>();
  int n_report = vm["report"].as<int>();

  model.w = vector<double>(problem.n, 0.0);
  if (MPI_RANK == 0) avgModel = masterModel = model;

  int problem_size = SIZE(problem);
  int n = problem.n;
  avgSize = max_iter / 2;
  negativeWeight = double(problem.total_pos) / problem.total_neg;
  double sampleSize = max(2.0, max(ceil(1/negativeWeight),ceil(negativeWeight))+1);
  k = max(k, int(sampleSize));
  if (MPI_RANK == 0) {
    MSG("negative weight=" << negativeWeight << " k=" << k);
  }
  FORE(iter, 0, max_iter) {
    isAvg = iter + avgSize >= max_iter;

    double eta_t = 1.0 / lambda / (iter+1);
    double eta_t_k = eta_t / k;
        
    vector<int> sample = randomTuple(k, problem_size);
    idx = getIndex(sample);
    readWfromMaster();  // read only the needed features

    vector<int> error_sample;
    error_sample.reserve(k);
    FORE(s, 0, k) { // pick s sample
      int i = sample[s];
      double output = problem.y[i]*model.output(problem, i);
      if (output < 1) error_sample.push_back(i);
    }

    FORE(i,0,SIZE(idx)) {
      assert(fCount[idx[i]] != 0);
      model.w[idx[i]] *= (1-eta_t*lambda/MPI_SIZE/fCount[idx[i]]);
    }

    FORE(s, 0, SIZE(error_sample)) {
      int i = error_sample[s];
      int p = problem.xStart[i];
      double label = problem.y[i];
      while (problem.xStore[p].index != -1 && problem.xStore[p].index < n) {
	model.w[problem.xStore[p].index] += eta_t_k * (label > 0 ? 1 : negativeWeight) * label*problem.xStore[p].value;
	p++;
      }
      model.bias += eta_t_k * label;
    }
    sendWtoMaster();
    
    if (verbose && iter % max(1,max_iter/n_report) == 0 || iter == max_iter-1) {
      if (verbose == 1) {
	if (MPI_RANK == 0) MSG("iter=" << iter/* << " nnz=" << SIZE(nz)*/);
      } else if (verbose == 2) {
	//double f = computeObjective();
	if (MPI_RANK == 0) {
	  MSG("iter=" << iter/* << " nnz=" << SIZE(nz) << " f=" << f*/);
	  FORE(i, 0, SIZE(masterModel.w)) cout << masterModel.w[i] << " ";
	  cout << endl;
	}
      }
    }
  } // FOR iter
}

double ParPegasos::computeObjective() const
{
  int problem_size = SIZE(problem);
  int n = SIZE(model.w);
  double lambda = vm["lambda"].as<double>();
  double reg = 0, loss = 0;

  FORE(i, 0, n) reg += model.w[i]*model.w[i];
  reg *= lambda;

  FORE(sample, 0, problem_size) {
    double output = problem.y[sample]*model.output(problem, sample);
    loss += (output < 1 ? 1-output : 0);
  }
  loss /= problem_size;

  return loss+reg;
}

void ParPegasos::doTest() 
{
  REQUIRE_PARAM("input");
  problem.readInput(vm["input"].as<string>());
  model.load(vm["model"].as<string>());
  //ofstream out(vm["output"].as<string>().c_str());

  int problem_size = SIZE(problem);
  double n_correct = 0, total_correct;
  double n_TP = 0, total_TP;
  double n_TN = 0, total_TN;
  double n_NEG = 0, n_POS = 0, total_NEG, total_POS;

  FORE(sample, 0, problem_size) {
    double output = model.output(problem, sample);
    bool isCorrect = problem.y[sample]*output >= 0;
    if (isCorrect) {
      n_correct += 1;
      if (problem.y[sample] > 0) n_TP += 1;
      else n_TN += 1;
    }
    if (problem.y[sample] > 0) n_POS += 1;
    else n_NEG += 1;
    //out << (output < 0 ? "-1" : "+1") << endl; 
  }

  mpi::reduce(world, n_correct, total_correct, std::plus<double>(), 0);
  mpi::reduce(world, n_TP, total_TP, std::plus<double>(), 0);
  mpi::reduce(world, n_TN, total_TN, std::plus<double>(), 0);
  mpi::reduce(world, n_POS, total_POS, std::plus<double>(), 0);
  mpi::reduce(world, n_NEG, total_NEG, std::plus<double>(), 0);
  if (MPI_RANK == 0) 
    MSG("n=" << problem.total_sample << " n_POS=" << total_POS << " n_NEG=" << total_NEG << endl <<
	"AC = " << total_correct / problem.total_sample * 100 << "\%" << endl <<
	"SN = " << total_TP / total_POS * 100 << "\%" << endl <<
	"SP = " << total_TN / total_NEG * 100 << "\%");
}

void ParPegasos::doCrossValidation() 
{
}

