package standalone.classification.linear;

import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;

import cern.colt.matrix.DoubleFactory1D;
import cern.colt.matrix.DoubleMatrix1D;
import cern.colt.matrix.DoubleMatrix2D;
import cern.jet.math.Mult;

import reducible.FloatScalarReducible;
import reducible.VectorReducible;
import reducible.IntScalarReducible;
import utils.DataInput;

import allreduce.AllReducer;

/**
 * Logistic Regression
 * @author Erheng Zhong (purlin.zhong@gmail.com)
 *
 */
public class LogisticRegression {
	/**
	 * Number of features
	 */
	private int numFeatures;
	/**
	 * Number of classes
	 */
	private int numClasses;
	/**
	 * Model, coefficient vectors
	 */
	public VectorReducible[] w;
	/**
	 * Allreduce object for communication
	 */
	protected AllReducer reduceObj;
	/**
	 * Initialization
	 * @param master: master IP
	 * @param masterPort: port of master
	 * @throws IOException
	 */
	public void initialize(String master, int masterPort) throws IOException {
		//Initialize w randomly
		w = new VectorReducible[numClasses];
		for(int i=0;i<numClasses;i++) {
			w[i] = new VectorReducible(DoubleFactory1D.dense.random(numFeatures));
		}
		//Obtain the number of clients;
		IntScalarReducible numClients = new IntScalarReducible(1);
		reduceObj = new AllReducer(master, masterPort);
		reduceObj.init();
		numClients = (IntScalarReducible) reduceObj.run(numClients);
		//Average w
		for(int i=0;i<numClasses;i++) {
			w[i] = (VectorReducible) reduceObj.run(w[i]);
			w[i].vectors.assign(Mult.div(numClients.val));
		}
	}
	/**
	 * Compute the gradients
	 * @param instances: data
	 * @param labels: target
	 * @return gradients
	 * @throws IOException
	 */
	protected DoubleMatrix1D[] getGradients(DoubleMatrix2D instances, List<Integer> labels) throws IOException {
		DoubleMatrix1D[] localGradient = new DoubleMatrix1D[numClasses];
		for(int i=0;i<numClasses;i++) localGradient[i] = DoubleFactory1D.dense.make(numFeatures);
		//Calculate local gradients
		for(int i=0;i<labels.size();i++){
			double[] prob = predictProb(instances.viewRow(i));
			for(int j=0;j<numClasses;j++) {
				int t = 0;
				if(j==labels.get(i)) t = 1;
				for(int k=0;k<numFeatures;k++) localGradient[j].setQuick(k, localGradient[j].getQuick(k) + (prob[j] - t) * instances.getQuick(i,k));
			}
		}
		//Allreduce to obtain global gradients
		for(int i=0;i<numClasses;i++) {
			VectorReducible gw = new VectorReducible(localGradient[i]);
			gw = (VectorReducible) reduceObj.run(gw);
			localGradient[i].assign(gw.vectors);
		}
		return localGradient;
	}
	/**
	 * Get likelihood 
	 * @param instances: data
	 * @param labels: target
	 * @return likelihood
	 * @throws IOException 
	 */
	protected float getLikelihood(DoubleMatrix2D instances, List<Integer> labels) throws IOException {
		float ll = 0.0f;
		for(int i=0;i<labels.size();i++) ll += Math.log(predictProb(instances.viewRow(i))[labels.get(i)]);
		FloatScalarReducible likelihood = new FloatScalarReducible(ll);
		likelihood = (FloatScalarReducible) reduceObj.run(likelihood);
		return likelihood.val;
	}
	
	/**
	 * Predict the class distribution of one given instance
	 * @param instance
	 * @return class distribution
	 */
	protected double[] predictProb(DoubleMatrix1D instance) {
		double[] probs = new double[numClasses];
		double sumProb = 0.0f;
		for(int i=0;i<numClasses;i++){
			double prob = 0.0f;
			for(int j=0;j<numFeatures;j++) prob += instance.getQuick(j)*w[i].vectors.getQuick(j);
			probs[i] = (float) Math.exp(prob);
			sumProb += probs[i];
		}
		for(int i=0;i<numClasses;i++) probs[i] /= sumProb;
		return probs;
	}
	/**
	 * Perform gradient descent to find w
	 * @param instances: data
	 * @param labels: target
	 * @param numIt: number of iterations
	 * @param learningRate: learning rate
	 * @param eps: terminate threshold
	 * @return final likelihood
	 * @throws IOException
	 */
	public float gradientDescent(DoubleMatrix2D instances, List<Integer> labels, float d, int numIt, float learningRate, float eps) throws IOException {
		float likelihood = 0.0f, ll = Float.MIN_VALUE;
		// Get the number of all instances
		IntScalarReducible nc = new IntScalarReducible(instances.size());
		nc = (IntScalarReducible) reduceObj.run(nc);
		for(int it=0;it<numIt;it++){
			//Update
			DoubleMatrix1D[] gw = getGradients(instances, labels);
			for(int i=0;i<numClasses;i++)
				for(int j=0;j<numFeatures;j++) {
					double currentW = this.w[i].vectors.getQuick(j);
					this.w[i].vectors.setQuick(j, currentW - (learningRate*gw[i].getQuick(j)/nc.val + d*currentW));
				}
			//Likelihood
			likelihood = getLikelihood(instances, labels);
			if(Math.abs(likelihood-ll)<eps) break;
			ll = likelihood;
			System.out.println(it+" Likelihood: " + ll/(float)nc.val);
		}
		reduceObj.clean();
		return likelihood;
	}
	
	/**
	 * Save model
	 * @param out: output stream
	 * @throws IOException
	 */
	public void saveModel(OutputStream out) throws IOException{
		for(int i=0;i<numClasses;i++) out.write((this.w[i].toString()+"\n").getBytes());
	}
	
	/**
	 * The rank of the current client
	 * @return
	 */
	public int getRank() {
		return this.reduceObj.rank;
	}
	
	/**
	 * Constructor
	 * @param numFeatures: number of features
	 * @param numClasses: number of classes
	 */
	public LogisticRegression(int numFeatures, int numClasses){
		this.numClasses = numClasses;
		this.numFeatures = numFeatures;
	}

	/**
	 * Model Building
	 * @param inputPath: training data path
	 * @param outputPath: output path
	 * @param dataType: data representation type: libsvm/dense
	 * @param modelName: built model name
	 * @param d: trade-off parameter
	 * @param numFeatures: number of features 
	 * @param numClasses: number of classes
	 * @param numIt: number of iterations
	 * @param learningRate: learning rate
	 * @param eps: threshold for termination 
	 * @param master: IP of spanning tree server
	 * @param masterPort: Port of spanning tree server
	 * @throws Exception
	 */
	public static void buildModel(String inputPath, String outputPath, String dataType, String modelName, float d, int numFeatures, int numClasses, int numIt, float learningRate, float eps, String master, int masterPort) throws NumberFormatException, IOException {
		//Read data
		List<Integer> labels = new ArrayList<Integer>();
		DoubleMatrix2D instances = null;
		if("libsvm".endsWith(dataType)) instances = DataInput.readLibSVM(inputPath, numFeatures, labels);
		else if("dense".endsWith(dataType)) instances = DataInput.readDenseMatrix(inputPath, numFeatures, labels);
		//Model building
		LogisticRegression lrObj = new LogisticRegression(numFeatures, numClasses);
		lrObj.initialize(master, masterPort);
		lrObj.gradientDescent(instances, labels, d, numIt, learningRate, eps);
		//Model saving
		if(lrObj.getRank()==0){
			DataOutputStream out = new DataOutputStream(new FileOutputStream(new File(outputPath+"/"+modelName)));
			lrObj.saveModel(out);
			out.close();
		}
	}
}
