package hadoop.classification.linear;


import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;

import cern.colt.matrix.DoubleMatrix1D;

import standalone.classification.linear.LogisticRegression;
import utils.DataInput;
import utils.StructureTransform;

/**
 * Mapper for LR
 * @author Erheng Zhong (purlin.zhong@gmail.com)
 *
 */
public class LRMapper extends MapReduceBase implements Mapper<LongWritable, Text, NullWritable, NullWritable> {
	/**
	 * Training data
	 */
	private List<DoubleMatrix1D> instances;
	/**
	 * Traning labels
	 */
	private List<Integer> labels;
	/**
	 * Number of instances
	 */
	private int numFeatures;
	/**
	 * Number of classes
	 */
	private int numClasses;
	/**
	 * Number of iterations
	 */
	private int numIt;
	/**
	 * Regularization parameter
	 */
	private float d;
	/**
	 * Learning rate
	 */
	private float learningRate;
	/**
	 * Threshold of termination
	 */
	private float eps;
	/**
	 * IP of spanning tree server
	 */
	private String masterIP;
	/**
	 * Model name
	 */
	private String modelName;
	/**
	 * Port of spanning tree server
	 */
	private int masterPort;
	/**
	 * Model path
	 */
	private Path outputPath = null;
	/**
	 * Configuration of this job
	 */
	private JobConf thisJob = null;
	/**
	 * File Type
	 */
	private String fileType = null;
	
	@Override
	public void configure(JobConf job) {
		//Parameters
		numFeatures = job.getInt("data.numfeatures", 0);
		numClasses = job.getInt("data.numclasses", 2);
		numIt = job.getInt("model.numit", 100);
		learningRate = job.getFloat("model.learningrate", 0.05f);
		d = job.getFloat("model.reg", 0.1f);
		eps = job.getFloat("model.eps", 1e-3f);
		masterIP = job.get("net.master", "localhost");
		masterPort = job.getInt("net.masterport", 10090);
		modelName = job.get("model.name","model.lr");
		outputPath = new Path(job.get("model.path")+"/"+modelName);
		fileType = job.get("data.type","libsvm");
		thisJob = job;
		//Initialization
		instances = new ArrayList<DoubleMatrix1D>();
		labels = new ArrayList<Integer>();
	}
	
	@Override
	public void map(LongWritable key, Text value, OutputCollector<NullWritable, NullWritable> collector, Reporter reporter)
			throws IOException {
		String[] featureVec = value.toString().split(" ",2);
		if("libsvm".equalsIgnoreCase(fileType)) instances.add(DataInput.parseLibSVM(featureVec[1], numFeatures));
		else if("dense".equalsIgnoreCase(fileType)) instances.add(DataInput.parseDense(featureVec[1], numFeatures));
		labels.add(Integer.parseInt(featureVec[0]));
	}
	
	@Override
	public void close() throws IOException{
		//Build model
		LogisticRegression lrObj = new LogisticRegression(numFeatures,numClasses);
		lrObj.initialize(masterIP, masterPort);
		lrObj.gradientDescent(StructureTransform.vecList2Matrix(instances), labels, d, numIt, learningRate, eps);	//Gradient descent
		//Save model
		if(lrObj.getRank()==0){
			FileSystem fs = FileSystem.get(thisJob); 
			FSDataOutputStream out = fs.create(outputPath);
			lrObj.saveModel(out);
			out.close();
		}
	}

}
