package driver;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.GenericOptionsParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import partialRDT.PRDTTestMapper;
import partialRDT.PRDTTrainMapper;
import util.ModelOperator;

import completedRDT.CRDTTestMapper;
import completedRDT.CRDTTrainMapper;
import completedRDT.CRDTTrainReducer;
import completedRDT.RDTModel;

/**
 * Driver for M/R
 * @author tigerzhong
 *
 */
public class RDTDriver {
    /**
     * Log Object
     */
    private static final Logger log = LoggerFactory.getLogger(RDTDriver.class);
    /**
     * Job Configuration
     */
    private JobConf jConf = null;

    /**
     * Constructor
     */
    public RDTDriver(String confPath) {
	jConf = new JobConf(confPath);
	jConf.setNumMapTasks(Integer.parseInt(jConf.get("Hadoop.NumOfMap")));
	jConf.setNumReduceTasks(Integer.parseInt(jConf
		.get("Hadoop.NumOfReduce")));
	jConf.setOutputKeyClass(Text.class);
	jConf.setOutputValueClass(Text.class);
	jConf.setJar(jConf.get("Hadoop.Jar"));
	jConf.set("mapred.child.java.opt", "-Xmx1024m");
    };

    /**
     * Main Function
     * 
     * @param args
     * @throws Exception
     */
    public static void main(String[] args) throws Exception {
	String[] otherArgs = new GenericOptionsParser(new Configuration(), args)
		.getRemainingArgs();
	if (otherArgs.length != 3) {
	    System.exit(2);
	}
	String task = otherArgs[0];	//Task, train or test
	String type = otherArgs[1];	//Type, partial or completed
	String confFile = otherArgs[2];	//Configuration file
	RDTDriver rdtDriver = new RDTDriver(confFile);
	if (task.equalsIgnoreCase("Train"))
	    rdtDriver.runTrainningJob(type);
	if (task.equalsIgnoreCase("Test"))
	    rdtDriver.runTestJob(type);
    }

    /**
     * Perform Training of RDT
     * 
     * @param conf
     * @param input
     * @throws Exception
     */
    public void runTrainningJob(String type) throws Exception {
	jConf.setJobName("RDTTraining");
	FileInputFormat.setInputPaths(jConf, new Path(jConf
		.get("Data.TrainInputPath")));
	FileOutputFormat.setOutputPath(jConf, new Path(jConf
		.get("Data.OutputPath")));
	String outPath = jConf.get("Data.OutputPath");
	String fullModelPath = jConf.get("Model.FullHDFSPath");
	String modelPath = jConf.get("Model.HDFSPath");
	if ("Completed".equalsIgnoreCase(type)) {
	    /* Build RDT on local file system */
	    RDTModel rdt = new RDTModel();
	    rdt.setDepth(jConf.getInt("RDT.Depth",1));
	    rdt.setMinNum(jConf.getInt("RDT.MinNum",1));
	    rdt.setNumTrees(jConf.getInt("RDT.Number",1));
	    rdt.setNumClass(jConf.getInt("Data.NumClasses",1));
	    rdt.setNumFeatures(jConf.getInt("Data.NumFeatures",1));
	    rdt.setRatio(jConf.getFloat("RDT.Ratio",1));
	    rdt.setLbFeature(jConf.get("Data.LowBound"));
	    rdt.setUbFeature(jConf.get("Data.UpBound"));
	    rdt.buildClassifier();
	    ModelOperator.saveModel(rdt, jConf.get("Model.LocalPath")+"Temp.RDT.model", fullModelPath+"Temp.RDT.model"); // Serialize the model into hard-disk
	    log.info("Set Finished!");
	    // M/R Get the node index of each instance; Calculate the decision probability of each leaf node.
	    jConf.setMapperClass(CRDTTrainMapper.class);
	    jConf.setReducerClass(CRDTTrainReducer.class);
	    // Distribute the model to each slave node
	    DistributedCache.addCacheFile(new Path(fullModelPath+"Temp.RDT.model").toUri(), jConf);
	    JobClient.runJob(jConf);
	    log.info("Build Finished!");
	    /* Pure, update the information of each node */
	    RDTModel.fillModel(jConf.get("Model.LocalPath"), outPath, jConf.get("Model.LocalPath")+"Temp.RDT.model", jConf.get("Model.LocalPath")+"Completed.RDT.model", fullModelPath+"Completed.RDT.model");
	    log.info("CompletedRDT Finished!");
	} else if ("Partial".equalsIgnoreCase(type)) {
	    jConf.setMapperClass(PRDTTrainMapper.class);
	    DistributedCache.addCacheFile(new Path(jConf.get("Data.HeadPath"))
		.toUri(), jConf);
	    JobClient.runJob(jConf);
	    FileSystem fs = FileSystem.get(jConf);
	    FileUtil.copyMerge(fs, new Path(outPath+modelPath), fs, new Path(
		    outPath+modelPath + "Partial.rdt.model"), false, jConf, "");
	    log.info("PartialRDT Finished!");
	}
    }

    /**
     * Perform Prediction of RDT
     * 
     * @param conf
     * @param input
     * @throws Exception
     */
    public void runTestJob(String type) throws Exception {
	jConf.setJobName("RDTPrediction");
	FileInputFormat.setInputPaths(jConf, new Path(jConf
		.get("Data.TestInputPath")));
	FileOutputFormat.setOutputPath(jConf, new Path(jConf
		.get("Data.TestPath")));
	String outPath = jConf.get("Data.OutputPath");
	String fullModelPath = jConf.get("Model.FullHDFSPath");
	String modelPath = jConf.get("Model.HDFSPath");
	 /* Prediction */
	if ("Completed".equalsIgnoreCase(type)) {
	    jConf.setMapperClass(CRDTTestMapper.class);
	    // Distribute the model to each slave node
	    DistributedCache.addCacheFile(new Path(fullModelPath+"Completed.RDT.model").toUri(), jConf);
	}
	else if ("Partial".equalsIgnoreCase(type)) {
	    jConf.setMapperClass(PRDTTestMapper.class);
	    DistributedCache.addCacheFile(new Path(jConf.get("Data.HeadPath"))
		.toUri(), jConf); 
	    // Distribute the model to each slave node
	    DistributedCache.addCacheFile(new Path(
		    outPath+modelPath+"Partial.rdt.model").toUri(), jConf);
	}
	JobClient.runJob(jConf);
	log.info("Prediction Finished!");
    }
}
