package finalProject;

import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.ClasspathPropertiesFileCredentialsProvider;
import com.amazonaws.services.ec2.model.InstanceType;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduceClient;
import com.amazonaws.services.elasticmapreduce.model.HadoopJarStepConfig;
import com.amazonaws.services.elasticmapreduce.model.JobFlowInstancesConfig;
import com.amazonaws.services.elasticmapreduce.model.PlacementType;
import com.amazonaws.services.elasticmapreduce.model.RunJobFlowRequest;
import com.amazonaws.services.elasticmapreduce.model.RunJobFlowResult;
import com.amazonaws.services.elasticmapreduce.model.StepConfig;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.S3ObjectSummary;

public class JobBuilder {

	public static AWSCredentialsProvider credentials = new ClasspathPropertiesFileCredentialsProvider();
	public static AmazonElasticMapReduce mapReduce = new AmazonElasticMapReduceClient(credentials);
	public static String bucketLocation = "s3n://ahgfinal/";
	public static String bucketName = "ahgfinal";
	public static String step1tJar = "Step1.jar";
	public static String step1MainClass = "Step1";
	public static String step2Jar = "Step2.jar";
	public static String step2MainClass = "Step2";
	public static String testStepJar = "TestStep.jar";
	public static String testStepMainClass = "TestStep";
	public static String numOfFiles = "1";
	/**
	 * @param args
	 */
	public static void main(String[] args) {
		if (args.length < 2){
			System.err.println("Usege : <DPMin> <FeatureType>" );
			return;
		}
		String dpMinAsString = args[0];
		String featureType = args[1];

		DeletePreviousOutputDictionaries();

//		String access = credentials.getCredentials().getAWSAccessKeyId();
//		String secret = credentials.getCredentials().getAWSSecretKey();
		
		HadoopJarStepConfig step1 = new HadoopJarStepConfig()
		.withJar(bucketLocation + step1tJar)
		.withMainClass(step1MainClass)
		.withArgs(dpMinAsString, featureType, numOfFiles);

		StepConfig step1Config = new StepConfig().withName("Step1")
				.withHadoopJarStep(step1)
				.withActionOnFailure("TERMINATE_JOB_FLOW");


		HadoopJarStepConfig step2 = new HadoopJarStepConfig()
		.withJar(bucketLocation + step2Jar)
		.withMainClass(step2MainClass);

		StepConfig step2Config = new StepConfig().withName("Step2")
				.withHadoopJarStep(step2)
				.withActionOnFailure("TERMINATE_JOB_FLOW");
		
//		HadoopJarStepConfig testStep = new HadoopJarStepConfig()
//		.withJar(bucketLocation + testStepJar)
//		.withMainClass(testStepMainClass)
//		.withArgs(dpMinAsString, featureType, access, secret);

//		StepConfig testStepConfig = new StepConfig().withName("TestStep")
//				.withHadoopJarStep(testStep)
//				.withActionOnFailure("TERMINATE_JOB_FLOW");

		JobFlowInstancesConfig instances = new JobFlowInstancesConfig()
		.withInstanceCount(10)
		.withMasterInstanceType(InstanceType.M1Medium.toString())
		.withSlaveInstanceType(InstanceType.M1Medium.toString())
		.withHadoopVersion("2.2.0").withEc2KeyName("dsp1Key")
		.withKeepJobFlowAliveWhenNoSteps(false)
		.withPlacement(new PlacementType("us-east-1a"));

		RunJobFlowRequest runFlowRequest = new RunJobFlowRequest()
		.withName("Final").withInstances(instances)
		.withSteps(step1Config, step2Config)
//		.withSteps(testStepConfig)
		.withLogUri(bucketLocation + "logs/");

		RunJobFlowResult runJobFlowResult = mapReduce.runJobFlow(runFlowRequest);
		String jobFlowId = runJobFlowResult.getJobFlowId();
		System.out.println("Ran job flow with id: " + jobFlowId);
	}

	private static void DeletePreviousOutputDictionaries() {

		AmazonS3Client s3Client = new AmazonS3Client(credentials);
		try{
			ObjectListing objects = s3Client.listObjects(bucketName, "Output");

			for (S3ObjectSummary objectSummary : objects.getObjectSummaries()) {
				s3Client.deleteObject(bucketLocation, objectSummary.getKey());
			}

		} catch (Exception e){
			e.printStackTrace();
		}
	}

}
