package completedRDT;

import java.io.IOException;
import java.util.List;

import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;

import util.ModelOperator;

/**
 * Mapper for CRDT training
 * @author tigerzhong
 *
 */
public class CRDTTrainMapper extends MapReduceBase implements
	Mapper<Object, Text, Text, Text> {
    /**
     * CRDT Model
     */
    private RDTModel rdt = null;
    /**
     * Output Key
     */
    private Text keyText = new Text();
    /**
     * Output Value
     */
    private Text valueText = new Text();
    /**
     * Number of features in using
     */
    private int numFeatures;
    /**
     * Number of classes
     */
    private int numClasses;
    /**
     * Number of features to skip
     */
    private int numSkip;

    @Override
    public void map(Object key, Text value,
	    OutputCollector<Text, Text> context, Reporter reporter)
	    throws IOException {
	String rowData = value.toString();
	if (rowData.length() != 0) {
	    /* Split */
	    String features[] = rowData.split(",", -1);
	    String f[] = new String[numFeatures];
	    for (int i = 0; i < numFeatures; i++)
		f[i] = features[i + numSkip];
	    /* Classes information for building trees */
	    StringBuilder label = new StringBuilder();
	    for (int i = 0; i < numClasses; i++) {
		if (i != 0) label.append(",");
		label.append(features[numFeatures + numSkip + i]);
	    }
	    /* Get the tree and node where the instance reaches*/
	    List<String> idList = rdt.goThroughTree(f);
	    for (String id : idList) {
		keyText.set(id);
		valueText.set(label.toString());
		context.collect(keyText, valueText);
	    }
	}
    }

    @Override
    public void configure(JobConf job) {
	try {
	    Path[] paths = DistributedCache.getLocalCacheFiles(job);
	    rdt = ModelOperator.loadModel(paths[0].toString());		//Load model from hard disk
	    numSkip = job.getInt("Data.NumSkip", 1);
	    numFeatures = job.getInt("Data.NumFeatures", 10);
	    numClasses = job.getInt("Data.numClasses", 1);
	} catch (IOException e) {
	    e.printStackTrace();
	    return;
	}
    }
}
