/*
**    Copyright (C) 2003-2011 Institute for Systems Biology
**                            Seattle, Washington, USA.
**
**    This library is free software; you can redistribute it and/or
**    modify it under the terms of the GNU Lesser General Public
**    License as published by the Free Software Foundation; either
**    version 3 of the License, or (at your option) any later version.
**
**    This library is distributed in the hope that it will be useful,
**    but WITHOUT ANY WARRANTY; without even the implied warranty of
**    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
**    Lesser General Public License for more details.
**
**    You should have received a copy of the GNU Lesser General Public
**    License along with this library; if not, write to the Free Software
**    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307  USA
*/

package org.systemsbiology.rface.hadoop;

import java.io.File;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Pattern;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.util.logging.Logger;
import org.systemsbiology.hadoop.ioformats.FileAwareTextInputFormat;
import org.systemsbiology.hadoop.ioformats.LexicalKey;
import org.systemsbiology.rface.hadoop.combiners.CombineTargetRecords;
import org.systemsbiology.rface.hadoop.data.TargetRecord;
import org.systemsbiology.rface.hadoop.mappers.LineToInterestingTargetRecord;
import org.systemsbiology.rface.hadoop.mappers.LineToTargetRecord;
import org.systemsbiology.rface.hadoop.reducers.AnalyzeTargetConsistency;

/**
 * Entry point class for Hadoop target-focused approach for RF-ACE job analysis.
 * @author anorberg
 *
 */
public class TargetAnalysisMain extends Configured implements Tool{

	public static void main(String[] args) throws Exception{
		Logger.getLogger(TargetAnalysisMain.class.getName()).info("RF-ACE Aggregator result: " +
				ToolRunner.run(new Configuration(), new TargetAnalysisMain(), args));
	}

	public int run(String[] args) throws Exception {
		if(args.length < 2){
			throw new IllegalArgumentException("Wrong number of parameters: must be INFILE-LIST, OUTFILE [flags]");
		}
		
		Set<String> cmdOptions = new HashSet<String>();
		for(int k = 2; k < args.length; ++k){
			if(args[k].startsWith("--")){
				cmdOptions.add(args[k].substring(2).toLowerCase());
			} else if(args[k].startsWith("-")){
				for(int z = 1; z < args[k].length(); ++z){
					cmdOptions.add(args[k].substring(z, z+1));
				}
			}
		}
		
		Configuration conf = new Configuration(getConf());
		
		Job job = new Job(conf, getClass().getName());
		job.setJarByClass(getClass());
		if(cmdOptions.contains("interesting")){
			job.setMapperClass(LineToInterestingTargetRecord.class);
		} else {
			job.setMapperClass(LineToTargetRecord.class);
		}
		job.setReducerClass(AnalyzeTargetConsistency.class);
		job.setCombinerClass(CombineTargetRecords.class);
		job.setInputFormatClass(FileAwareTextInputFormat.class);
		job.setOutputFormatClass(TextOutputFormat.class);
		job.setOutputKeyClass(LexicalKey.class);
		job.setOutputValueClass(TargetRecord.InconsistencyResult.class);
		job.setMapOutputKeyClass(LexicalKey.class);
		job.setMapOutputValueClass(TargetRecord.class);
		
		String[] inFiles = args[0].split(Pattern.quote(File.pathSeparator));
		for(String path: inFiles){
			FileInputFormat.addInputPath(job, new Path(path));
		}
		
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		
		return job.waitForCompletion(true) ? 0 : 1;
	}
}
