import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.ClasspathPropertiesFileCredentialsProvider;
import com.amazonaws.services.ec2.model.InstanceType;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduceClient;
import com.amazonaws.services.elasticmapreduce.model.DescribeJobFlowsRequest;
import com.amazonaws.services.elasticmapreduce.model.DescribeJobFlowsResult;
import com.amazonaws.services.elasticmapreduce.model.HadoopJarStepConfig;
import com.amazonaws.services.elasticmapreduce.model.JobFlowDetail;
import com.amazonaws.services.elasticmapreduce.model.JobFlowInstancesConfig;
import com.amazonaws.services.elasticmapreduce.model.PlacementType;
import com.amazonaws.services.elasticmapreduce.model.RunJobFlowRequest;
import com.amazonaws.services.elasticmapreduce.model.RunJobFlowResult;
import com.amazonaws.services.elasticmapreduce.model.StepConfig;

public class JobBuilder {	
	private static final int NUM_OF_MACHINES = 15 ;
	public static final String counterFileName = "record counter";
	private static final String SMALL_FILE = "s3n://ahgass2/input/testFile.txt";//"s3n://ahgass2/input/eng.corp.10k";
	private static final String BIG_FILE = "s3://datasets.elasticmapreduce/ngrams/books/20090715/eng-gb-all/5gram/data";
	public static AWSCredentialsProvider credentials = new ClasspathPropertiesFileCredentialsProvider();
	public static AmazonElasticMapReduce mapReduce = new AmazonElasticMapReduceClient(
			credentials);
	public static String bucketLocation = "s3n://ahgass2/";
	private static String inputFile = BIG_FILE;
	public static String firstJar = "firstStep.jar";
	public static String firstStepMainClass = "FirstStep";
	public static String secondJar = "secondStep.jar";
	public static String secondStepMainClass = "SecondStep";
	public static String thirdJar = "thirdStep.jar";
	public static String thirdStepMainClass = "ThirdStep";
	public static String fourthJar = "fourthStep.jar";
	public static String fourthStepMainClass = "FourthStep";
	public static String partAJar = "partA.jar";
	public static String partAStepMainClass = "PartA";


	public static void main(String[] args) {
		String k = args[0];
		String access = credentials.getCredentials().getAWSAccessKeyId();
		String secret = credentials.getCredentials().getAWSSecretKey();
		HadoopJarStepConfig firstStep = new HadoopJarStepConfig()
				.withJar(bucketLocation + firstJar)
				.withMainClass(firstStepMainClass).withArgs(inputFile,"no-debug", access, secret);

		StepConfig firstStepConfig = new StepConfig().withName("firstStep")
				.withHadoopJarStep(firstStep)
				.withActionOnFailure("TERMINATE_JOB_FLOW");

		HadoopJarStepConfig secondStep = new HadoopJarStepConfig()
		.withJar(bucketLocation + secondJar)
		.withMainClass(secondStepMainClass).withArgs(access, secret);

		StepConfig secondStepConfig = new StepConfig().withName("secondStep")
		.withHadoopJarStep(secondStep)
		.withActionOnFailure("TERMINATE_JOB_FLOW");
		
		HadoopJarStepConfig thirdStep = new HadoopJarStepConfig()
		.withJar(bucketLocation + thirdJar)
		.withMainClass(thirdStepMainClass).withArgs(access, secret);

		StepConfig thirdStepConfig = new StepConfig().withName("thirdStep")
		.withHadoopJarStep(thirdStep)
		.withActionOnFailure("TERMINATE_JOB_FLOW");
		
		HadoopJarStepConfig fourthStep = new HadoopJarStepConfig()
		.withJar(bucketLocation + fourthJar)
		.withMainClass(fourthStepMainClass).withArgs(access, secret);

		StepConfig fourthStepConfig = new StepConfig().withName("fourthStep")
		.withHadoopJarStep(fourthStep)
		.withActionOnFailure("TERMINATE_JOB_FLOW");
		
		HadoopJarStepConfig partAStep = new HadoopJarStepConfig()
		.withJar(bucketLocation + partAJar)
		.withMainClass(partAStepMainClass).withArgs(k, access, secret);

		StepConfig partAStepConfig = new StepConfig().withName("partAStep")
		.withHadoopJarStep(partAStep)
		.withActionOnFailure("TERMINATE_JOB_FLOW");
		
		JobFlowInstancesConfig instances = new JobFlowInstancesConfig()
				.withInstanceCount(NUM_OF_MACHINES)
				.withMasterInstanceType(InstanceType.M1Medium.toString())
				.withSlaveInstanceType(InstanceType.M1Medium.toString())
				//.withHadoopVersion("2.2.0")
				.withEc2KeyName("dsp1Key")
				.withKeepJobFlowAliveWhenNoSteps(false)
				.withPlacement(new PlacementType("us-east-1a"));

		RunJobFlowRequest runFlowRequest = new RunJobFlowRequest()
				.withName("Assingment2")
				.withInstances(instances)
				.withAmiVersion("3.1.0")
				.withSteps(firstStepConfig, secondStepConfig, thirdStepConfig, fourthStepConfig, partAStepConfig)
				.withLogUri(bucketLocation + "logs/");
		
		RunJobFlowResult runJobFlowResult = mapReduce
				.runJobFlow(runFlowRequest);
		String jobFlowId = runJobFlowResult.getJobFlowId();
		System.out.println("Ran job flow with id: " + jobFlowId);
		DescribeJobFlowsRequest jobAttributesRequest = new DescribeJobFlowsRequest().withJobFlowIds(jobFlowId);
		DescribeJobFlowsResult jobAttributes =  mapReduce.describeJobFlows(jobAttributesRequest);
		JobFlowDetail jobDetail = jobAttributes.getJobFlows().get(0);
		while (!(jobDetail.getExecutionStatusDetail().getState().equals("COMPLETED")||
				jobDetail.getExecutionStatusDetail().getState().equals("FAILED")||
				jobDetail.getExecutionStatusDetail().getState().equals("TERMINATED"))){
			try {
				Thread.sleep(10000);
			} catch (InterruptedException e) {
			}
			System.out.println(jobAttributes.getJobFlows().get(0).getExecutionStatusDetail().toString());
			jobAttributes = mapReduce.describeJobFlows(jobAttributesRequest);
			jobDetail = jobAttributes.getJobFlows().get(0);
		}
		//print records
		Utils.sumAllCounters(credentials.getCredentials());
	}
}
