package completedRDT;

import java.io.IOException;

import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;

import util.ModelOperator;

/**
 * Mapper for CRDT prediction
 * @author tigerzhong
 *
 */
public class CRDTTestMapper extends MapReduceBase implements
	Mapper<Object, Text, Text, Text> {
    /**
     * CRDT Model
     */
    private RDTModel rdt = null;
    /**
     * Output Key
     */
    private Text keyText = new Text();
    /**
     * Output Value
     */
    private Text valueText = new Text();
    /**
     * Number of features in using
     */
    private int numFeatures;
    /**
     * Number of features to skip
     */
    private int numSkip;

    @Override
    public void map(Object key, Text value,
	    OutputCollector<Text, Text> context, Reporter reporter)
	    throws IOException {
	/* Split */
	String features[] = value.toString().split(",", -1);
	String f[] = new String[numFeatures];
	for (int i = 0; i < numFeatures; i++)
	    f[i] = features[i + numSkip];
	/* Classification and get the results */
	double preVal[] = rdt.classifyInstance(f);	//Class distributions
	StringBuilder resPreVal = new StringBuilder();
	StringBuilder resTrueVal = new StringBuilder();
	for (int i = 0; i < preVal.length; i++) {
	    resPreVal.append(preVal[i]);
	    double trueval = 0.0;
	    if (!features[numFeatures + numSkip + i].equals(""))	//True class value used to output
		trueval = Double.parseDouble(features[numFeatures + numSkip + i]);
	    resTrueVal.append(trueval);
	    if (i != preVal.length - 1){
		resPreVal.append(',');
		resTrueVal.append(',');
	    }
	}
	StringBuilder resKey = new StringBuilder();
	for(int i=0;i<numSkip;i++){
	    resKey.append(features[i]);
	    if(i!=numSkip-1) resKey.append(",");
	}
	/* Output format:
		key <Tab> true class <Tab> prediction class
	*/ 
	keyText.set(resKey.toString());
	valueText.set(resTrueVal.toString()+'\t'+resPreVal.toString());
	context.collect(keyText, valueText);
    }

    @Override
    public void configure(JobConf job) {
	try {
	    Path[] paths = DistributedCache.getLocalCacheFiles(job);
	    rdt = ModelOperator.loadModel(paths[0].toString());		//Load model from hard disk
	    numSkip = job.getInt("Data.NumSkip", 1);
	    numFeatures = job.getInt("Data.NumFeatures", 10);
	} catch (IOException e) {
	    return;
	}
    }
}
