package sg.edu.nus.iss.stockex;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

public class RelativeStrengthIndexBatchJob {
	
		public static void startJob(String input, String output) throws Exception
		{
			
			Configuration conf = new Configuration();

			Job job = new Job(conf, "StockEx");
			job.setJarByClass(StockCompositeMapper.class);
			job.setMapOutputKeyClass(CodeDateKey.class);
			job.setMapOutputValueClass(TimestampPriceDataPoint.class);
			
			job.setMapperClass(StockCompositeMapper.class);
			job.setReducerClass(RelativeStrengthIndexReducer.class);
			job.setPartitionerClass(CodeKeyPartitioner.class);
			job.setSortComparatorClass(CodeDateKeyComparator.class);
			job.setGroupingComparatorClass(CodeKeyGroupingComparator.class);

			job.setInputFormatClass(TextInputFormat.class);

			job.setOutputFormatClass(TextOutputFormat.class);
			
			
			
			FileSystem.get(conf).delete(new Path(output), true);	
			FileInputFormat.setInputPaths(job, new Path(input));
			FileOutputFormat.setOutputPath(job, new Path(output));

			job.waitForCompletion(true);

			
	}
		public static void main(String[] args) throws Exception {
			
			String input = args[0];
			String output = args[1];
			//String input = "input";
			//String output = "rsi_output";
			startJob(input, output);
		}
}
