package driver;

import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.TextOutputFormat;

import regression.model.AbstractRegression;
import utils.HadoopUtil;

public class PoissonRunner extends RegressionRunner {
    /**
     * Perform Model Fitting
     * @param confPath
     * @throws Exception
     */
    public void performFitting(String confPath) throws Exception {
	JobConf jConf = new JobConf(confPath);
	int iteration = jConf.getInt("Model.Iteration", 100);
	String outPath = jConf.get("Data.OutputPath");
	/*Initialize the model*/
	AbstractRegression regression = (AbstractRegression) Class.forName(jConf.get("Model.Name")).newInstance();	//Reflection, get a regression object
	regression.init(confPath);	//Initialize the parameters
	HadoopUtil.HDFSMove(false, outPath + "0", outPath + "parameter.res.0");	//Merge the files
	regression.setParameters(HadoopUtil.HDFSReadArrayData(outPath + "parameter.res.0"));	//Set the parameters
	HadoopUtil.HDFSSaveModel(jConf,regression,outPath + "model.res.0");
	/* Iteration */
	for (int it = 0; it < iteration; it++) {
	    System.out.println("regression " + it);
	    /*M/R Job*/
	    jConf = new JobConf(confPath);
	    jConf.setNumMapTasks(Integer.parseInt(jConf.get("Hadoop.NumOfMap")));
	    jConf.setNumReduceTasks(Integer.parseInt(jConf.get("Hadoop.NumOfReduce")));
	    jConf.setOutputKeyClass(Text.class);
	    jConf.setOutputValueClass(Text.class);
	    jConf.setMapperClass(regression.MR.GradientMapper.class);
	    jConf.setReducerClass(regression.MR.GradientReducer.class);
	    jConf.setOutputFormat(TextOutputFormat.class);
	    jConf.setJar(jConf.get("Hadoop.Jar"));
	    jConf.set("mapred.child.java.opt", "-Xmx1024m");
	    jConf.setJobName("regression " + it);
	    DistributedCache.addCacheFile(new Path(outPath + "model.res." + it).toUri(), jConf);
	    FileInputFormat.setInputPaths(jConf, new Path(jConf.get("Data.TrainInputPath")));
	    FileOutputFormat.setOutputPath(jConf, new Path(outPath + (it + 1)));
	    RunningJob rJob = JobClient.runJob(jConf);
	    while (!rJob.isComplete());
	    
	    HadoopUtil.HDFSMove(false, outPath + (it + 1), outPath + "parameter.res." + (it + 1));
	    regression.setParameters(HadoopUtil.HDFSReadArrayData(outPath + "parameter.res." + (it + 1)));
	    HadoopUtil.HDFSSaveModel(jConf,regression,outPath + "model.res."+(it + 1));
	}
    }
    
    /**
     * Perform Prediction
     * @param confPath
     * @throws Exception
     */
    public void performPrediction(String confPath) throws Exception {
	JobConf jConf = new JobConf(confPath);
	String outPath = jConf.get("Data.TestPath");
	jConf.setNumMapTasks(Integer.parseInt(jConf.get("Hadoop.NumOfMap")));
	jConf.setNumReduceTasks(0);
	jConf.setOutputKeyClass(Text.class);
	jConf.setOutputValueClass(Text.class);
	jConf.setMapperClass(regression.MR.PredictMapper.class);
	jConf.setOutputFormat(TextOutputFormat.class);
	jConf.setJar(jConf.get("Hadoop.Jar"));
	jConf.set("mapred.child.java.opt", "-Xmx1024m");
	jConf.setJobName("Regression Prediction");
	DistributedCache.addCacheFile(new Path(outPath + "model.res." + jConf.get("Model.Iteration")).toUri(), jConf);
	FileInputFormat.setInputPaths(jConf, new Path(jConf
		.get("Data.TestInputPath")));
	FileOutputFormat.setOutputPath(jConf, new Path(outPath));
	JobClient.runJob(jConf);
    }

}
