package informed.sgd;

import java.util.ArrayList;
import java.util.Collections;
import java.util.Random;

import informed.matrix.Utils;
import informed.samples.LinearRegressionSample;
import informed.samples.Sample;
import cern.colt.matrix.DoubleMatrix1D;
import cern.colt.matrix.DoubleMatrix2D;
import cern.colt.matrix.impl.DenseDoubleMatrix1D;
import cern.colt.matrix.impl.DenseDoubleMatrix2D;
import cern.colt.matrix.linalg.Algebra;

public class LinearRegression{
	
	private double lambda; 		// Regularization parameter
	private int iter;			// Number of iterations
	private DoubleMatrix1D w;	// Model parameters
	private double learningRate;	// Learning rate
	private Algebra algebra;
	
	private int nSamples;
	private double error;
	
	public LinearRegression(double l) {
		lambda = l;
		iter = 1;
		w = null;
		learningRate = 0.0;
		algebra = new Algebra();
		nSamples = 0;
		error = 0;
	}
	
	public void init(ArrayList<Sample> samples) {
		LinearRegressionSample s = (LinearRegressionSample)samples.get(0);
		Random r = new Random(System.currentTimeMillis());
		w = new DenseDoubleMatrix1D(s.x.size());
		w.setQuick(0, 1);
		w = Utils.multByScalar(w, r.nextDouble());
		learningRate = 0.1;
		nSamples = samples.size();
		System.out.println("Starting w = " + w.getQuick(0) + " learningRate=" + learningRate);
	}
	
	public void run(ArrayList<Sample> samples, int epochs){
		init(samples);
		for(int i = 0; i < epochs; i++){
			Random rand = new Random(System.currentTimeMillis());
			Collections.shuffle(samples, rand);
			for(int j = 0; j < samples.size(); j++){
				move(samples.get(j));
				iter++;
			}
			error *= 1.0/nSamples;
			System.out.println("Iteration " + iter + " error=" + error + " w=" + w.getQuick(0));
		}
	}
	
	private boolean move(Sample x){
		// Compute descent direction
		DoubleMatrix1D g = gradient(x);
		DoubleMatrix2D h = hessian(x);
		DoubleMatrix2D h_inv = algebra.inverse(h);
		DoubleMatrix1D step = h_inv.zMult(g, null);
		// Update model
		//step = Utils.addVectors(step, g);
		//System.out.println(" >> " + ((LinearRegressionSample)(x)).id + " | " + step.get(0) + " += " + h_inv.getQuick(0, 0) + "*" + g.getQuick(0) + "=" + aux.get(0));
		updateModel(step);
		return true;
	}
	
	private void updateModel(DoubleMatrix1D step){
		// The following learning rate was set according to:
		// http://www.chokkan.org/publication/survey/logistic_regression_sgd.html
		// http://machine-learning-quirks.blogspot.com/2010/01/stochastic-gradient-descent.html
		// and it works pretty well.
		double gamma = learningRate/(1 + iter / nSamples);
		w = Utils.addVectors(w, Utils.multByScalar(step, -gamma));
	}

	public DoubleMatrix1D gradient(Sample x) {
		LinearRegressionSample s = (LinearRegressionSample)x;
		double e = s.y - w.zDotProduct(s.x);
		error += e*e;
		DoubleMatrix1D ex = Utils.multByScalar(s.x, -e);
		DoubleMatrix1D lw = Utils.multByScalar(w, lambda);
		return Utils.addVectors(ex, lw);
	}

	public DoubleMatrix2D hessian(Sample x) {
		LinearRegressionSample s = (LinearRegressionSample)x;
		DenseDoubleMatrix2D h = new DenseDoubleMatrix2D(s.x.size(), s.x.size());
		for(int i = 0; i < s.x.size(); i++){
			h.setQuick(i, i, s.x.zDotProduct(s.x) + lambda);
		}
		return h;
	}

	public String getSolution() {
		return w.toString();
	}
}
