package admin.iss.nus.edu.sg;

import java.io.IOException;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.UUID;

import com.amazonaws.AmazonServiceException;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.ec2.model.InstanceType;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduceClient;
import com.amazonaws.services.elasticmapreduce.model.DescribeJobFlowsRequest;
import com.amazonaws.services.elasticmapreduce.model.DescribeJobFlowsResult;
import com.amazonaws.services.elasticmapreduce.model.HadoopJarStepConfig;
import com.amazonaws.services.elasticmapreduce.model.JobFlowDetail;
import com.amazonaws.services.elasticmapreduce.model.JobFlowExecutionState;
import com.amazonaws.services.elasticmapreduce.model.JobFlowInstancesConfig;
import com.amazonaws.services.elasticmapreduce.model.RunJobFlowRequest;
import com.amazonaws.services.elasticmapreduce.model.RunJobFlowResult;
import com.amazonaws.services.elasticmapreduce.model.StepConfig;

public class executeWorkFlow {
	private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
	private static final String HADOOP_VERSION = "0.20";
	private static final int INSTANCE_COUNT = 3;
	private static final String INSTANCE_TYPE = InstanceType.M1Small.toString();
	private static final UUID RANDOM_UUID = UUID.randomUUID();
	private static final String FLOW_NAME = "cloudburst-"
			+ RANDOM_UUID.toString();
	private static final String BUCKET_NAME = "stockprocessing";
	private static final String S3N_HADOOP_JAR = "s3n://stockprocessing/StockEx.jar";
	private static final String S3N_LOG_URI = "s3n://" + BUCKET_NAME + "/log";
	private static final String[] JOB1_ARGS = new String[] {
			"sg.edu.nus.iss.stockex.RelativeStrengthIndexBatchJob",
			"s3n://stockprocessing/stockData",
			"s3n://stockprocessing/rsi_output" };
	private static final String[] JOB2_ARGS = new String[] {
			"sg.edu.nus.iss.stockex.RelativeStrengthIndexSingleJob",
			"s3n://stockprocessing/stockData",
			"s3n://stockprocessing/rsi_s_output" };
	private static final String[] JOB3_ARGS = new String[] {
			"sg.edu.nus.iss.stockex.LinearRegressionBatchJob",
			"s3n://stockprocessing/stockData",
			"s3n://stockprocessin/lr_output" };
	private static final String[] JOB4_ARGS = new String[] {
			"sg.edu.nus.iss.stockex.LinearRegressionSingleJob",
			"s3n://stockprocessing/stockData",
			"s3n://stockprocessing/lr_s_output" };
	private static final String[] JOB5_ARGS = new String[] {
			"sg.edu.nus.iss.stockex.MovingAverageBatchJob",
			"s3n://stockprocessing/stockData",
			"s3n://stockprocessing/ma_output" };
	private static final String[] JOB6_ARGS = new String[] {
			"sg.edu.nus.iss.stockex.MovingAverageSingleJob",
			"s3n://stockprocessing/stockData",
			"s3n://stockprocessing/ma_s_output" };
	private static final String[] JOB7_ARGS = new String[] {
			"sg.edu.nus.iss.stockex.StochasticOscillatorBatchJob",
			"s3n://stockprocessing/stockData",
			"s3n://stockprocessing/so_output" };
	private static final String[] JOB8_ARGS = new String[] {
			"sg.edu.nus.iss.stockex.StochasticOscillatorSingleJob",
			"s3n://stockprocessing/stockData",
			"s3n://stockprocessing/so_s_output" };

	private static final List<String> JOB_1_ARGS_AS_LIST = Arrays
			.asList(JOB1_ARGS);
	private static final List<String> JOB_2_ARGS_AS_LIST = Arrays
			.asList(JOB2_ARGS);
	private static final List<String> JOB_3_ARGS_AS_LIST = Arrays
			.asList(JOB3_ARGS);
	private static final List<String> JOB_4_ARGS_AS_LIST = Arrays
			.asList(JOB4_ARGS);
	private static final List<String> JOB_5_ARGS_AS_LIST = Arrays
			.asList(JOB5_ARGS);
	private static final List<String> JOB_6_ARGS_AS_LIST = Arrays
			.asList(JOB6_ARGS);
	private static final List<String> JOB_7_ARGS_AS_LIST = Arrays
			.asList(JOB7_ARGS);
	private static final List<String> JOB_8_ARGS_AS_LIST = Arrays
			.asList(JOB8_ARGS);

	private static final List<JobFlowExecutionState> DONE_STATES = Arrays
			.asList(new JobFlowExecutionState[] {
					JobFlowExecutionState.COMPLETED,
					JobFlowExecutionState.FAILED,
					JobFlowExecutionState.TERMINATED });
	static AmazonElasticMapReduce emr;

	public static void main(String[] a) {
		try {
			try {
				initAWS();
			} catch (Exception e) {
				System.out.println("Exception caught");
				System.out.println(e.getMessage());
			}
			//startWorkFlow(JOB_1_ARGS_AS_LIST);			
			startWorkFlow(JOB_2_ARGS_AS_LIST);
			//startWorkFlow(JOB_3_ARGS_AS_LIST);
			startWorkFlow(JOB_4_ARGS_AS_LIST);
			//startWorkFlow(JOB_5_ARGS_AS_LIST);
			startWorkFlow(JOB_6_ARGS_AS_LIST);
			//startWorkFlow(JOB_7_ARGS_AS_LIST);
			startWorkFlow(JOB_8_ARGS_AS_LIST);
			
			
			
		} catch (AmazonServiceException ase) {
			System.out.println("Caught Exception: " + ase.getMessage());
			System.out.println("Reponse Status Code: " + ase.getStatusCode());
			System.out.println("Error Code: " + ase.getErrorCode());
			System.out.println("Request ID: " + ase.getRequestId());
		}

	}

	public static void checkStatus() {

	}

	public static void startWorkFlow(final List<String> JOB_ARGUMENT) {
		// Configure instances to use
		JobFlowInstancesConfig instances = new JobFlowInstancesConfig();

		// Placement ps=new Placement("ap-southeast-1");
		// instances.setPlacement(new
		// PlacementType("ap-southeast-1"));
		// instances.setPlacement(new
		// PlacementType("ap-northeast-1"));
		System.out.println("Using EMR Hadoop v" + HADOOP_VERSION);

		instances.setHadoopVersion(HADOOP_VERSION);
		System.out.println("Using instance count: " + INSTANCE_COUNT);
		instances.setInstanceCount(INSTANCE_COUNT);
		System.out.println("Using master instance type: " + INSTANCE_TYPE);
		instances.setMasterInstanceType(INSTANCE_TYPE);
		System.out.println("Using slave instance type: " + INSTANCE_TYPE);
		instances.setSlaveInstanceType(INSTANCE_TYPE);
		// Configure the job flow
		System.out.println("Configuring flow: " + "StockProcessing-"+UUID.randomUUID());
		RunJobFlowRequest jobFlowRequest = new RunJobFlowRequest("StockProcessing-"+UUID.randomUUID(),
				instances);
		System.out.println("\tusing log URI: " + S3N_LOG_URI);
		jobFlowRequest.setLogUri(S3N_LOG_URI);

		// Configure the Hadoop jar to use
		System.out.println("\tusing jar URI: " + S3N_HADOOP_JAR);

		HadoopJarStepConfig jarConfig = new HadoopJarStepConfig(S3N_HADOOP_JAR);
		jarConfig.setArgs(JOB_ARGUMENT);
		StepConfig stepConfig = new StepConfig(S3N_HADOOP_JAR
				.substring(S3N_HADOOP_JAR.indexOf('/') + 1), jarConfig);
		jobFlowRequest.setSteps(Arrays.asList(new StepConfig[] { stepConfig }));
		// Run the job flow
		RunJobFlowResult result = emr.runJobFlow(jobFlowRequest);
		// Check the status of the running job

		String lastState = "";
		STATUS_LOOP: while (true) {
			DescribeJobFlowsRequest desc = new DescribeJobFlowsRequest(Arrays
					.asList(new String[] { result.getJobFlowId() }));

			DescribeJobFlowsResult descResult = emr.describeJobFlows(desc);
			for (JobFlowDetail detail : descResult.getJobFlows()) {
				String state = detail.getExecutionStatusDetail().getState();
				if (isDone(state)) {
					System.out.println("Job " + state + ": "
							+ detail.toString());
					break STATUS_LOOP;
				} else if (!lastState.equals(state)) {
					lastState = state;
					System.out.println("Job " + state + " at "
							+ new Date().toString());
				}
			}

			try {
				Thread.sleep(10000);
			} catch (InterruptedException e) {
				System.out.println(e.getMessage());
				e.printStackTrace();
			}
		}
	}

	public static boolean isDone(String value) {
		JobFlowExecutionState state = JobFlowExecutionState.fromValue(value);
		return DONE_STATES.contains(state);
	}

	public static AWSCredentials getCredentials() {
		AWSCredentials credentials = new BasicAWSCredentials(
				"AKIAI4LDMCJYHSTVLF4Q",
				"WNOyyksEIg7z4V/BPapKvRkYJ9iUz1h9MtaaRt8j");
		return credentials;
	}

	private static void initAWS() throws Exception {

		emr = new AmazonElasticMapReduceClient(getCredentials());
	}

}
