package informed.sgd;

import java.util.ArrayList;
import java.util.Collections;
import java.util.Random;

import informed.matrix.Utils;
import informed.samples.MatrixCellValue;
import informed.samples.Sample;
import cern.colt.matrix.DoubleMatrix1D;
import cern.colt.matrix.DoubleMatrix2D;
import cern.colt.matrix.impl.DenseDoubleMatrix2D;
import cern.colt.matrix.linalg.Algebra;

public class SimpleMatrixFactorization {
	private DoubleMatrix2D P;		// Model parameter
	private DoubleMatrix2D Q;		// Model parameter
	private int factors;			// Model parameter
	private double lambda;			// Regularization parameter
	private double learningRate;	// Learning Rate
	private int iter;				// Number of iterations
	
	private Algebra algebra;
	private int nSamples;
	private double error;
	
	public SimpleMatrixFactorization(int n, int m, int f){
		algebra = new Algebra();
		factors = f;
		error = 0;
		iter = 0;
		learningRate = 0.0;
		nSamples = 0;
		P = new DenseDoubleMatrix2D(n,f);
		Q = new DenseDoubleMatrix2D(f,m);
		lambda = 0.1;
	}
	
	private void init(ArrayList<Sample> samples){
		nSamples = samples.size();
		Random r = new Random(System.currentTimeMillis());
		double scale = 0.2;
		for(int f = 0; f < factors; f++){
			for(int n = 0; n < P.rows(); n++){
				P.setQuick(n, f, scale*r.nextDouble());
			}
			for(int m = 0; m < Q.columns(); m++){
				Q.setQuick(f, m, scale*r.nextDouble());
			}
		}
		learningRate = 0.1;
		System.out.println("Initializing P and Q. Learning rate = " + learningRate);
	}
	
	public void run(ArrayList<Sample> samples, int epochs){
		init(samples);
		for(int i = 0; i < epochs; i++){
			Random rand = new Random(System.currentTimeMillis());
			Collections.shuffle(samples, rand);
			for(int j = 0; j < samples.size(); j++){
				move(samples.get(j));
				//System.out.println(samples.get(j));
				iter++;
			}
			error *= 1.0/nSamples;
			System.out.println("Iteration " + iter + " error=" + error);
			error = 0;
		}
	}
	
	private void move(Sample x){
		MatrixCellValue s = (MatrixCellValue)x;
		double gamma = learningRate/(1 + (double)(iter / nSamples));
		gamma = learningRate;
		
		DoubleMatrix1D gP = gradientP(s);
		DoubleMatrix1D gQ = gradientQ(s);
		
		update(P, Utils.multByScalar(gP, -gamma), -1, s.i);
		update(Q, Utils.multByScalar(gQ, -gamma), s.j, -1);
	}
	
	private void update(DoubleMatrix2D M, DoubleMatrix1D v, int col, int row){
		// Fixed column
		if(col != -1 && row == -1){
			for(int k = 0; k < M.rows(); k++){
				M.setQuick(k, col, M.getQuick(k, col)+v.getQuick(k));
			}
		}
		// Fixed row
		if(row != -1 && col == -1){
			for(int k = 0; k < M.columns(); k++){
				M.setQuick(row, k, M.getQuick(row, k)+v.getQuick(k));
			}
		}
	}
	
	private DoubleMatrix1D gradientP(MatrixCellValue s){
		double e = s.x - algebra.mult(P.viewRow(s.i), Q.viewColumn(s.j));
		error += e*e;
		DoubleMatrix1D eq = Utils.multByScalar(Q.viewColumn(s.j), -e);
		DoubleMatrix1D lp = Utils.multByScalar(P.viewRow(s.i), lambda);
		return Utils.addVectors(eq, lp);
	}
	
	private DoubleMatrix1D gradientQ(MatrixCellValue s){
		double e = s.x - algebra.mult(P.viewRow(s.i), Q.viewColumn(s.j));
		error += e*e;
		DoubleMatrix1D ep = Utils.multByScalar(P.viewRow(s.i), -e);
		DoubleMatrix1D lq = Utils.multByScalar(Q.viewColumn(s.j), lambda);
		return Utils.addVectors(ep, lq);
	}
	
	//TODO: Hessian!!
	
	public String printSolution(){
		String s = "";
		s = "\nMatrix P: ";
		s += P.toString();
		s += "\nMatrix Q: ";
		s += Q.toString();
		s += "\nReconstruction: ";
		s += algebra.mult(P, Q).toString();
		return s;
	}
}
