package mapreduce;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;

import mapreduce.phase1.Map1;
import mapreduce.phase1.Reduce1;
import mapreduce.phase2.Map2;
import mapreduce.phase2.Reduce2;
import mapreduce.phase3.Map3;
import mapreduce.phase3.Reduce3;
import mapreduce.phase4.Map4;
import mapreduce.phase4.Reduce4;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;

@SuppressWarnings("deprecation")
public class ConnectedComponents {

	private static void runJob(String[] args, int phase, int which) throws IOException
	{
		JobConf confPart = new JobConf(ConnectedComponents.class);
		confPart.setJobName("connectedcomponents");
		confPart.setOutputKeyClass(IntWritable.class);
		confPart.setOutputValueClass(Text.class);
		confPart.setInputFormat(TextInputFormat.class);
		confPart.setOutputFormat(TextOutputFormat.class);
		
		
		switch(phase) {
		case 1:
			if (which==1 || which==3) {
				confPart.setMapperClass(Map1.class);
			}
			if (which==2 || which==3) {
				confPart.setReducerClass(Reduce1.class);
			}
			FileInputFormat.setInputPaths(confPart, new Path(args[0]));
			FileOutputFormat.setOutputPath(confPart, new Path("p1" + args[1]));
			break;
			
		case 2:
			if (which==1 || which==3) {
				confPart.setMapperClass(Map2.class);
			}
			if (which==2 || which==3) {
				confPart.setReducerClass(Reduce2.class);
			}
			FileInputFormat.setInputPaths(confPart, new Path("p1" + args[1]));
			FileOutputFormat.setOutputPath(confPart, new Path("p2" + args[1]));
			break;
			
		case 3:
			FileSystem fs = FileSystem.get(confPart);
			
			fs.rename(new Path("p1" + args[1] + "/" + "part-00000"), new Path("p2" + args[1] + "/" + "part-1"));
			if (which==1 || which==3) {
				confPart.setMapperClass(Map3.class);
			}
			if (which==2 || which==3) {
				confPart.setReducerClass(Reduce3.class);
			}
			FileInputFormat.setInputPaths(confPart, new Path("p2" + args[1]));
			FileOutputFormat.setOutputPath(confPart, new Path("p3" +args[1]));
			break;
			
		case 4: 
			confPart.setOutputKeyClass(Text.class);
			confPart.setOutputValueClass(IntWritable.class);
			if (which==1 || which==3) {
				confPart.setMapperClass(Map4.class);
			}
			if (which==2 || which==3) {
				confPart.setCombinerClass(Reduce4.class);
				confPart.setReducerClass(Reduce4.class);
			}
			FileInputFormat.setInputPaths(confPart, new Path("p3" + args[1]));
			FileOutputFormat.setOutputPath(confPart, new Path(args[1]));
			break;
		}
		
		confPart.setBoolean("mapred.output.compress", false);
		confPart.set("mapred.job.tracker", "local");
		confPart.set("fs.default.name", "local");
		
		JobClient.runJob(confPart);
	}

	public static void main(String[] args) throws Exception 
	{
		Properties properties= new Properties();
		properties.load(new FileInputStream(new File(args[2])));
		DataToPass.wmax = Double.parseDouble(properties.getProperty("wmax"));
		DataToPass.wmin = Double.parseDouble(properties.getProperty("wmin"));
		DataToPass.cases = Integer.parseInt(properties.getProperty("cases"));
		DataToPass.lmax = Double.parseDouble(properties.getProperty("lmax"));

		int phase = Integer.parseInt(args[3]);
		int which = Integer.parseInt(args[4]);
		
		if(phase==0)
		{
			for(int i =1; i< 5; i++)
				runJob(args, i, which);
		}
		else
			runJob(args, phase, which);
		
	}
	
}
