package regression.MR;

import java.io.IOException;

import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;

import regression.model.AbstractRegression;
import utils.HadoopUtil;

/**
 * Mapper for gradient
 * @author tigerzhong
 *
 */
public class GradientMapper extends MapReduceBase implements
	Mapper<LongWritable, Text, Text, Text> {
    /**
     * Number of features in using
     */
    private int numFeatures;
    /**
     * Number of classes
     */
    private int numClasses;
    /**
     * Number of features to skip
     */
    private int numSkip;
    /**
     * Regression object
     */
    AbstractRegression regression;
    /**
     * Output Key
     */
    private Text key = new Text();
    /**
     * Output Value
     */
    private Text value = new Text();

    @Override
    public void map(LongWritable inKey, Text inValue,
	    OutputCollector<Text, Text> output, Reporter reporter)
	    throws IOException {
	/*Split*/
	String items[] = inValue.toString().split(",");
	double instance[] = new double[numFeatures];
	double target[] = new double[numClasses];
	for (int i = 0; i < numFeatures; i++)
	    instance[i] = Double.parseDouble(items[numSkip + i]);
	for (int i = 0; i < numClasses; i++)
	    target[i] = Double.parseDouble(items[numSkip + numFeatures + i]);
	/*Get the updated information*/
	double resDev[][] = regression.getGradient(instance, target);
	for (int i = 0; i < numFeatures; i++) {
	    String outValues = "";
	    for (int j = 0; j < numClasses; j++) outValues += (resDev[i][j] + ",");
	    key.set(i + "");
	    value.set(outValues);
	    output.collect(key, value);
	}
    }

    @Override
    public void configure(JobConf job) {
	numClasses = job.getInt("Data.NumClasses", 1);
	numFeatures = job.getInt("Data.NumFeatures", 8);
	numSkip = job.getInt("Data.NumSkip", 3);
	try {
	    Path[] paths = DistributedCache.getLocalCacheFiles(job);
	    regression = HadoopUtil.HDFSLoadModel(job,paths[0].toString());
	} catch (IOException e) {
	    e.printStackTrace();
	}
    }

}
