package org.robotfish.learn;

import java.util.ArrayList;

import org.ejml.alg.dense.linsol.LinearSolverFactory;
import org.ejml.data.DenseMatrix64F;
import org.ejml.ops.CommonOps;
import org.ejml.simple.SimpleMatrix;

public class LinearModel {
	int inputDims;
	ArrayList<Features> features;
	/** General regularization level, used for calculating component regularizations */
	double regularizationLevel;
	/** Specific regularization for each component, used in regression.
	 *  Set to Var(x_i) * regularizationLevel */
	double[] regComponents;
	
	SimpleMatrix inputRaw;
	SimpleMatrix inputFeatures;
	SimpleMatrix outputs;
	int totalDim;
	
	LinearRegression regression;
	
	public LinearModel(int inputDims, double regularization) {
		this.inputDims = inputDims;
		this.regularizationLevel = regularization;
		features = new ArrayList<Features>();
		outputs = new SimpleMatrix(0, 1);
		inputRaw = new SimpleMatrix(0, inputDims);
		inputFeatures = new SimpleMatrix(0, 0);
		totalDim = 0;
	}
	
	/**
	 * @param f feature to be added to the model
	 */
	public void addFeature(Features f) {
		features.add(f);
		totalDim += f.dimensions();
		inputFeatures.reshape( 0, totalDim );
	}
	
	/**
	 * Adds a (input, output) datapoint, also calculate input features.<br>
	 * Does not learn a new model.
	 * @param input
	 * @param output
	 */
	public void addDataPoint(SimpleMatrix input, double output) {
		if (input.numCols()*input.numRows()!=inputDims) {
			throw new RuntimeException( String.format("Input dimemsion should be %d, but was %d.",
				inputDims, input.numCols()*input.numRows() 
			));
		}
		addRow(inputRaw, input);
		addRow(inputFeatures, getFeatures( input ) );
		addRow(outputs, new double[]{output} );
	}
	
	/**
	 * Adds all points in the matrixes
	 * @param input   the matrix where each row is an input with raw features
	 * @param output  (row or column) vector where each element is an output
	 */
	public void addDataPoints(SimpleMatrix input, SimpleMatrix output) {
		for (int i=0; i<input.numRows(); i++) {
			addDataPoint(input.extractVector(true, i), output.get(i));
		}
	}
	
	/**
	 * @param input  array with raw inputs
	 * @return row vector of calculated features
	 */
	public SimpleMatrix getFeatures(double[] input) {
		return getFeatures(new SimpleMatrix(new double[][]{input}));
	}
	
	/**
	 * @param input  array with raw inputs 
	 * @return output value, based on current regression coefficients
	 */
	public double getPrediction(double[] input) {
		return getFeatures(input).dot(this.regression.alpha);
	}

	/**
	 * @param input  vector of raw inputs (Nx1 or 1xN matrix) 
	 * @return output value, based on current regression coefficients
	 */
	public double getPrediction(SimpleMatrix input) {
		return getFeatures(input).dot(this.regression.alpha);
	}
	
	/**
	 * @param input  vector of raw inputs (Nx1 or 1xN matrix)
	 * @return Row vector of calculated features.
	 */
	public SimpleMatrix getFeatures(SimpleMatrix input) {
		SimpleMatrix m = new SimpleMatrix(1, totalDim);
		int offset = 0;
		for (Features f : features) {
			m.setRow( 0, offset, f.feat(input) );
			offset += f.dimensions();
		}
		return m;
	}
	
	/**
	 * @return variances for current input features (unbiased estimates).
	 */
	public double[] getFeatureVariances() {
		return getColumnVariances(inputFeatures);
	}
	
	/**
	 * Updates regularization for the linear model based on feature variances
	 * and regularization level:<br>
	 * componentRegularization[i] = regularizationLevel * getFeatureVariances()[i].
	 */
	public void updateRegularization() {
		regComponents = getFeatureVariances();
		boolean var0 = false;
		for (int i=0; i<regComponents.length; i++) {
			if (regComponents[i]<1e-9) {
				if (var0) { 
					// already one feature with var == 0, removing next one:
					regComponents[i] = 1.0;
				} else {
					// first one with var == 0, no change:
					var0 = true;
				}
			}
			regComponents[i] *= regularizationLevel;
		}
	}
	
	public void setRegComponents(double[] regComponents) {
		this.regComponents = regComponents;
	}
	
	/**
	 * @return regularization values for each feature
	 */
	public double[] getRegComponents() {
		return this.regComponents;
	}
	
	/**
	 * Carries out regression. Regularization should be set before.
	 */
	public void doFullRegression() {
		this.regression = linearRegression(inputFeatures, outputs, regComponents);
	}
	
	/**
	 * Updates regularization and does full regression,
	 * i.e., calls updateRegularization() and doFullRegression().
	 */
	public void doRegFullRegression() {
		updateRegularization();
		doFullRegression();
	}
	
	public LinearRegression getRegression() {
		return this.regression;
	}

	/**
	 * Updates regression model using incremental update. <br>
	 * Regression object must be already been initiated. 
	 * @param inputRaw  vector of raw input (Nx1 or 1xN matrix)
	 * @param output    output value
	 */
	public void addPointAndUpdateRegression(SimpleMatrix inputRaw, double output) {
		addDataPoint(inputRaw, output);
		updateRegression();
	}

	/**
	 * Updates regression model using incremental update with the last added point. <br>
	 * Regression object must be already been initiated. 
	 */
	public void updateRegression() {
		double output = outputs.get(outputs.numRows()-1);
		SimpleMatrix x = inputFeatures.extractVector(true, inputFeatures.numRows()-1).transpose();
		
		// v = A * x
		SimpleMatrix v = regression.A.mult(x);
		
		// -1/(1+ x'*v) * v*v'
		SimpleMatrix G = v.mult(v.transpose()).scale( -1.0/(1.0 + x.dot(v)) );
		
		// b += x * y  ### y is output
		CommonOps.addEquals(regression.b.getMatrix(), x.scale(output).getMatrix());
		
		// alpha += A * x * y
		CommonOps.addEquals(regression.alpha.getMatrix(), regression.A.mult(x).scale(output).getMatrix());
		
		// alpha += G * b
		CommonOps.addEquals(regression.alpha.getMatrix(), G.mult(regression.b).getMatrix());
		
		// A += G
		CommonOps.addEquals(regression.A.getMatrix(), G.getMatrix());
	}
	
	/**
	 * 
	 * @param M  matrix
	 * @return variances for each columns (uses unbiased estimator)
	 */
	public static double[] getColumnVariances(SimpleMatrix M) {
		int rows = M.numRows();
		int cols = M.numCols();
		double[] vars = new double[cols];
		for (int f=0; f<cols; f++) {
			DenseMatrix64F m  = CommonOps.extract(M.getMatrix(), 0, rows, f, f+1);
			DenseMatrix64F m2 = new DenseMatrix64F( m.numRows, m.numCols );
			CommonOps.elementMult(m, m, m2);
			vars[f] = CommonOps.elementSum(m2)/rows - Math.pow(CommonOps.elementSum(m)/rows,2);
			// unbiased estimator:
			vars[f] = rows*vars[f]/(rows-1);
		}
		return vars;
	}
	
	/**
	 * @param X      input features (each row is an input)
	 * @param y      output values (column vector)
	 * @param regul  array with regularization values
	 * @return solution to linear regression:<br>
	 * 	       alpha = inv(X'X + diag(regul)) * X' * y
	 */
	public static LinearRegression linearRegression(SimpleMatrix Xm, SimpleMatrix ym, double[] regul) {
		DenseMatrix64F X = Xm.getMatrix();
		DenseMatrix64F y = ym.getMatrix();
		if (X.numRows!=y.numRows) {
			throw new RuntimeException(String.format("X and y should have the same number of rows. X has %d and y %d rows.", X.numRows, y.numRows));
		}
		if (X.numCols!=regul.length) {
			throw new RuntimeException(String.format("Number of columns in X and length of regul should be the same. X has %d cols and regul has size %d.", X.numCols, regul.length));
		}
		
		DenseMatrix64F A = CommonOps.diag(regul); // diag(lambda)
		DenseMatrix64F b = new DenseMatrix64F(regul.length, 1);
		DenseMatrix64F alpha = new DenseMatrix64F(regul.length, 1);
		
		CommonOps.multAddTransA(X, X, A); // A = X'*X + diag(lambda)
		CommonOps.multTransA(X, y, b); // b = X'*y
		
		// alpha = inv(A) * b
		if (!CommonOps.invert(A)) {
			//if (!CommonOps.solve(A, b, alpha)) {
			throw new RuntimeException("(X'*X + diag(lambda)) is singular, couldn't solve.");
		}
		CommonOps.mult(A, b, alpha);
		return new LinearRegression(new SimpleMatrix(A), new SimpleMatrix(b), new SimpleMatrix(alpha) );
	}
	
	
	/**
	 * Adds row <b>r</b> to the matrix <b>m</b>, grows the size of the matrix if necessary.<br>
	 * Number of rows is increased by 1.
	 * @param m  matrix to be increased
	 * @param r  row to add (either row or column vector)
	 */
	public static void addRow(SimpleMatrix m, SimpleMatrix r) {
		DenseMatrix64F matrix = m.getMatrix();
		DenseMatrix64F row = r.getMatrix();
		// matrix.reshape(3, 4, true);
		int nrow = matrix.numRows;
		// growing matrix 2x, if full:
		if (isPowerOf2(nrow)) { 
			matrix.reshape(nrow*2, matrix.numCols, true);
		} 
		matrix.reshape(nrow+1, matrix.numCols, true);
		for (int i=0; i<matrix.numCols; i++) {
			matrix.set(nrow, i, row.get(i));
		}
	}

	/**
	 * Adds row to the matrix, grows the size of the matrix if necessary.
	 * Number of rows is increased by 1.
	 * @param matrix
	 * @param row
	 */
	public static void addRow(SimpleMatrix m, double[] row) {
		DenseMatrix64F matrix = m.getMatrix();
		// matrix.reshape(3, 4, true);
		int nrow = matrix.numRows;
		// growing matrix 2x, if full:
		if (isPowerOf2(nrow)) { 
			matrix.reshape(nrow*2, matrix.numCols, true);
		} 
		matrix.reshape(nrow+1, matrix.numCols, true);
		for (int i=0; i<matrix.numCols; i++) {
			matrix.set(nrow, i, row[i]);
		}
	}

	/**
	 * @param x
	 * @return true if x is power of 2, i.e., there is an integer n such that 2^n=x
	 */
	public static boolean isPowerOf2(int x) {
		if (x<=2) return true;
		
		if ( 1 << binlog(x) == x) {
			return true;
		}
		return false;
	}
	
	/**
	 * 
	 * @param bits
	 * @return number of log2(bits), floor value
	 */
	public static int binlog( int bits )
	{
	    int log = 0;
	    if( ( bits & 0xffff0000 ) != 0 ) { bits >>>= 16; log = 16; }
	    if( bits >= 256 ) { bits >>>= 8; log += 8; }
	    if( bits >= 16  ) { bits >>>= 4; log += 4; }
	    if( bits >= 4   ) { bits >>>= 2; log += 2; }
	    return log + ( bits >>> 1 );
	}

}
