import java.io.IOException;

import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.PropertiesCredentials;
import com.amazonaws.services.ec2.model.InstanceType;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduceClient;
import com.amazonaws.services.elasticmapreduce.model.HadoopJarStepConfig;
import com.amazonaws.services.elasticmapreduce.model.JobFlowInstancesConfig;
import com.amazonaws.services.elasticmapreduce.model.PlacementType;
import com.amazonaws.services.elasticmapreduce.model.RunJobFlowRequest;
import com.amazonaws.services.elasticmapreduce.model.RunJobFlowResult;
import com.amazonaws.services.elasticmapreduce.model.StepConfig;

public class TestMain {

	/**
	 * @param args
	 * 0- Name of output file
	 * 1- Num of K similarity word pairs
	 */
	public static void main(String[] args) {
		AWSCredentials credentials = null;
		try {
			   credentials = new PropertiesCredentials(TestMain.class.getResourceAsStream("AwsCredentials.properties"));
			  } catch (IOException e) {
			   e.printStackTrace();
			  }
		AmazonElasticMapReduce mapReduce = new AmazonElasticMapReduceClient(credentials);   
		
		HadoopJarStepConfig hadoopJarStep1 = new HadoopJarStepConfig() 
		    .withJar("s3n://miri.moshe.bucket/Step1.jar") // This should be a full map reduce application. 
		//    .withArgs("s3n://miri.moshe.bucket/input1/", "s3n://miri.moshe.bucket/output1/"); 
		//.withArgs("s3n://dsp122/heb.corpus.10K/", "s3n://miri.moshe.bucket/output1/");
		.withArgs("s3n://dsp122/heb.corpus.100k/", "s3n://miri.moshe.bucket/output1/");
		//.withArgs("s3://datasets.elasticmapreduce/ngrams/books/20090715/heb-all/5gram/data", "s3n://miri.moshe.bucket/output1/");
		
		  
		StepConfig stepConfig1 = new StepConfig() 
		    .withName("Step1") 
		    .withHadoopJarStep(hadoopJarStep1) 
		    .withActionOnFailure("TERMINATE_JOB_FLOW"); 
		
		
		HadoopJarStepConfig hadoopJarStep2 = new HadoopJarStepConfig() 
		    .withJar("s3n://miri.moshe.bucket/Step2.jar") // This should be a full map reduce application.
		    .withArgs("s3n://miri.moshe.bucket/output1/", "s3n://miri.moshe.bucket/output2/"); 
	  	
		StepConfig stepConfig2 = new StepConfig() 
		    .withName("Step2") 
		    .withHadoopJarStep(hadoopJarStep2) 
		    .withActionOnFailure("TERMINATE_JOB_FLOW"); 
		
		HadoopJarStepConfig hadoopJarStep3 = new HadoopJarStepConfig() 
		    .withJar("s3n://miri.moshe.bucket/Step3.jar") // This should be a full map reduce application. 
		    .withArgs("s3n://miri.moshe.bucket/output2/", "s3n://miri.moshe.bucket/output3/"); 
	  
		StepConfig stepConfig3 = new StepConfig() 
		    .withName("Step3") 
		    .withHadoopJarStep(hadoopJarStep3) 
		    .withActionOnFailure("TERMINATE_JOB_FLOW"); 
		
		HadoopJarStepConfig hadoopJarStep4 = new HadoopJarStepConfig() 
			.withJar("s3n://miri.moshe.bucket/Step4.jar") // This should be a full map reduce application. 
			.withArgs("s3n://miri.moshe.bucket/output3/", "s3n://miri.moshe.bucket/"+args[0]+"/", args[1]); 
		
		StepConfig stepConfig4 = new StepConfig() 
			.withName("Step4") 
			.withHadoopJarStep(hadoopJarStep4) 
			.withActionOnFailure("TERMINATE_JOB_FLOW");
		  
		JobFlowInstancesConfig instances = new JobFlowInstancesConfig() 
		    .withInstanceCount(10)
		    .withMasterInstanceType(InstanceType.M1Small.toString()) 
		    .withSlaveInstanceType(InstanceType.M1Small.toString()) 
		    .withHadoopVersion("0.20")
		    .withKeepJobFlowAliveWhenNoSteps(false) 
		    .withPlacement(new PlacementType()); 
		  
		RunJobFlowRequest runFlowRequest = new RunJobFlowRequest() 
		    .withName("DistributionalSimilarity") 
		    .withInstances(instances) 
		    .withSteps(stepConfig1,stepConfig2,stepConfig3,stepConfig4)
		    .withLogUri("s3n://miri.moshe.bucket/logs/"); 
		  
		RunJobFlowResult runJobFlowResult = mapReduce.runJobFlow(runFlowRequest); 
		String jobFlowId = runJobFlowResult.getJobFlowId(); 
		System.out.println("Run job flow with id: " + jobFlowId);
	}
}
