#ifndef MF_ABTRACT_CC
#define MF_ABSTRACT_CC
#include<string>
#include<fstream>
#include<sstream>
#include<iostream>
#include<iomanip>
#include<algorithm>
#include<iterator>
#include<vector>
#include<queue>
#include<map>
#include<cstdlib>
#include<ctime>
#include<cmath>
#include<unordered_map>
#include<tuple>
#include<algorithm>
#include "def.hpp"
#include "model.hpp"
#include "utils.hpp"
#include "log.hpp"

using namespace std;

namespace Puza{
	PuzaLFM::PuzaLFM(){

	}

	PuzaLFM::~PuzaLFM(){

	}

	double PuzaLFM::Hypothesis(PuzaRow r){
		return 0.0;
	}

	double PuzaLFM::PointwiseLossFunction(double y_bar, double y){
		return 0.0;
	}

	double PuzaLFM::RankwiseLossFunction(double y1, double y2){
		return 0.0;
	}

	void PuzaLFM::PointwiseSGD(double y_bar, PuzaRow r){

	}

	void PuzaLFM::RankwiseSGD(double y_bar, PuzaSession s){

	}

	void PuzaLFM::PointwiseTrain(int iter){
		unsigned int training_size = floor(PuzaDEF::Instance()->INPUT_ROW_TABLE.size() * PuzaDEF::Instance()->TRAIN_THRESHOLD);
		double training_errors = 0.0;
		for(unsigned int i= 0; i < training_size; i++){
			// compute gradient descent for each data point (row)
			PuzaRow current_row = PuzaDEF::Instance()->INPUT_ROW_TABLE[i];
			double y_bar = this->Hypothesis(current_row);
			double loss =  this->PointwiseLossFunction(y_bar, current_row.response);
			this->PointwiseSGD(y_bar, current_row); // perform gradient descent
			training_errors += loss;
		}
		PuzaLogger::Instance()->PutString("(SGD): Iteration " + to_string(iter) + " Finished.");
		PuzaLogger::Instance()->PutString("(SGD): Iteration " + to_string(iter) + " Training Errors:" + to_string(training_errors));
	}

	void PuzaLFM::RankwiseTrain(int iter){
		unsigned int training_size = floor(PuzaDEF::Instance()->INPUT_SESSION_TABLE.size() * PuzaDEF::Instance()->TRAIN_THRESHOLD);
		double training_errors = 0.0;
		for(unsigned int i= 0; i < training_size; i++){
			// compute gradient descent for each data point (row)
			// !!!! Important
			// ignore all sessions without positive rows
			PuzaSession current_session = PuzaDEF::Instance()->INPUT_SESSION_TABLE[i];
			if((current_session.positive_rows.size() > 0) && (current_session.negative_rows.size() > 0)){
				for(unsigned int n=0; n < current_session.positive_rows.size(); n++){
					PuzaRow positive_row = current_session.positive_rows[n];
					for(unsigned int m=0; m < current_session.negative_rows.size(); m++){
						PuzaRow negative_row = current_session.negative_rows[m];
						double y1_bar = this->Hypothesis(positive_row);
						double y2_bar = this->Hypothesis(negative_row);
						double loss = this->RankwiseLossFunction(y1_bar, y2_bar);
						training_errors += loss;
					}
				}
			}
		}
		PuzaLogger::Instance()->PutString("(SGD): Iteration " + to_string(iter) + " Finished.");
		PuzaLogger::Instance()->PutString("(SGD): Iteration " + to_string(iter) + " Training Errors:" + to_string(training_errors));
	}

	void PuzaLFM::PointwiseValidation(int iter){
		int N = 0;
		unsigned int training_size = floor(PuzaDEF::Instance()->INPUT_ROW_TABLE.size() * PuzaDEF::Instance()->TRAIN_THRESHOLD);
		double validation_errors = 0.0;
		for(unsigned int i= training_size; i < PuzaDEF::Instance()->INPUT_ROW_TABLE.size(); i++){
			PuzaRow current_row = PuzaDEF::Instance()->INPUT_ROW_TABLE[i];
			double y_bar = this->Hypothesis(current_row);
			double loss =  this->RMSE(y_bar, current_row.response);
			validation_errors += loss;
			N = N + 1;
		}
		validation_errors = sqrt(validation_errors / N);
		PuzaLogger::Instance()->PutString("(SGD): Iteration " + to_string(iter) + " Validation Error:" + to_string(validation_errors));
	}

	void PuzaLFM::RankwiseValidation(int iter){
		unsigned int training_size = floor(PuzaDEF::Instance()->INPUT_SESSION_TABLE.size() * PuzaDEF::Instance()->TRAIN_THRESHOLD);
		double training_errors = 0.0;
		for(unsigned int i= training_size; i < PuzaDEF::Instance()->INPUT_SESSION_TABLE.size(); i++){
			// compute gradient descent for each data point (row)

		}
		PuzaLogger::Instance()->PutString("(SGD): Iteration " + to_string(iter) + " Finished.");
		PuzaLogger::Instance()->PutString("(SGD): Iteration " + to_string(iter) + " Training Errors:" + to_string(training_errors));
	}


	double PuzaLFM::RMSE(double y_bar, double y){
		return pow((y_bar - y),2);
	}

	void PuzaLFM::TrainValidation(){
		// multiple pass of the training part of the dataset
		// the threshold is specified


		PuzaLogger::Instance()->PutString("Start Stochastic Gradient Descent.");
		if(PuzaDEF::Instance()->TRAIN_TYPE == PROCESS_POINT){
			unsigned int training_size = floor(PuzaDEF::Instance()->INPUT_ROW_TABLE.size() * PuzaDEF::Instance()->TRAIN_THRESHOLD);
			PuzaLogger::Instance()->PutString("Training rows:" + to_string(training_size));
		}
		else if(PuzaDEF::Instance()->TRAIN_TYPE == PROCESS_RANK){
			unsigned int training_size = floor(PuzaDEF::Instance()->INPUT_SESSION_TABLE.size() * PuzaDEF::Instance()->TRAIN_THRESHOLD);
			PuzaLogger::Instance()->PutString("Training sessions:" + to_string(training_size));
		}

		for(int iter =0; iter < PuzaDEF::Instance()->TOTAL_ITER; iter ++){
			PuzaLogger::Instance()->PutString("(SGD): Iteration " + to_string(iter) + " Started.");
			/* ---------------- training phase ----------------- */
			if(PuzaDEF::Instance()->TRAIN_TYPE == PROCESS_POINT){
				this->PointwiseTrain(iter);
			}

			/* ---------------- validation phase ----------------- */
			if(PuzaDEF::Instance()->TEST_TYPE == PROCESS_POINT){
				this->PointwiseValidation(iter);
			}
		}
		PuzaLogger::Instance()->PutString("End Stochastic Gradient Descent.");
	}
};

#endif
