/**
 * 
 */
package eval;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import dataStructures.ErrorElementWritable;
import dataStructures.FilesAndPaths;

/**
 * @author zvikadori
 *
 */
public class EvalDriver {

	public static boolean runEval(Configuration conf) throws Exception {

        // Create a JobConf using the processed conf
        Job job = new Job(conf);
        
        // Process custom command-line options
        
        Path in = new Path("Profilers/userProfileValidation");
        Path out = new Path("foo" + conf.getInt("iteration",0) +"/" + FilesAndPaths.RMSE_DIR_NAME);
        
        // Specify various job-specific parameters     
        job.setJobName("EvalMapRed");
        SequenceFileInputFormat.addInputPath(job, in);
        job.setInputFormatClass(SequenceFileInputFormat.class);
        TextOutputFormat.setOutputPath(job, out);
        job.setOutputFormatClass(TextOutputFormat.class);
        
        job.setMapperClass(EvalMapper.class);
        job.setMapOutputKeyClass(IntWritable.class);
        job.setMapOutputValueClass(ErrorElementWritable.class);
        
        job.setCombinerClass(EvalCombiner.class);
        
        job.setReducerClass(EvalReducer.class);
        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(DoubleWritable.class);
        // Submit the job, then poll for progress until the job is complete
        return job.waitForCompletion(true);
      }
      

}
