import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;

import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.auth.PropertiesCredentials;
import com.amazonaws.services.dynamodb.AmazonDynamoDBClient;
import com.amazonaws.services.ec2.model.InstanceType;
import com.amazonaws.services.ec2.model.Placement;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduceClient;
import com.amazonaws.services.elasticmapreduce.model.DescribeJobFlowsRequest;
import com.amazonaws.services.elasticmapreduce.model.DescribeJobFlowsResult;
import com.amazonaws.services.elasticmapreduce.model.HadoopJarStepConfig;
import com.amazonaws.services.elasticmapreduce.model.JobFlowDetail;
import com.amazonaws.services.elasticmapreduce.model.JobFlowExecutionState;
import com.amazonaws.services.elasticmapreduce.model.JobFlowInstancesConfig;
import com.amazonaws.services.elasticmapreduce.model.PlacementType;
import com.amazonaws.services.elasticmapreduce.model.RunJobFlowRequest;
import com.amazonaws.services.elasticmapreduce.model.RunJobFlowResult;
import com.amazonaws.services.elasticmapreduce.model.StepConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.PropertiesCredentials;
import com.amazonaws.services.dynamodb.AmazonDynamoDBClient;
import com.amazonaws.services.dynamodb.model.AttributeValue;
import com.amazonaws.services.dynamodb.model.ComparisonOperator;
import com.amazonaws.services.dynamodb.model.Condition;
import com.amazonaws.services.dynamodb.model.Key;
import com.amazonaws.services.dynamodb.model.QueryRequest;
import com.amazonaws.services.dynamodb.model.QueryResult;
import com.amazonaws.services.dynamodb.model.ScanRequest;
import com.amazonaws.services.dynamodb.model.ScanResult;

/**
 * Servlet implementation class processJobFlow
 */
public class processJobFlow extends HttpServlet {
	private static final long serialVersionUID = 1L;
	private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
	private static final String HADOOP_VERSION = "0.20";
	static AmazonDynamoDBClient client;
	static String tableName = "tableworddata";
	private static final int INSTANCE_COUNT = 3;
	private static final String INSTANCE_TYPE = InstanceType.M1Small.toString();
	private static final UUID RANDOM_UUID = UUID.randomUUID();
	private static final String FLOW_NAME = "cloudburst-"
			+ RANDOM_UUID.toString();
	private static final String BUCKET_NAME = "wrdcountbcket";
	private static final String S3N_HADOOP_JAR = "s3n://wrdcountbcket/WordIndexer2.jar";
	private static final String S3N_LOG_URI = "s3n://" + BUCKET_NAME + "/log";
	private static final String[] JOB_ARGS = new String[] {
			"s3n://wrdcountbcket/indexerinput",
			"s3n://wrdcountbcket/indexeroutputfol" };

	private static final List<String> ARGS_AS_LIST = Arrays.asList(JOB_ARGS);

	private static final List<JobFlowExecutionState> DONE_STATES = Arrays
			.asList(new JobFlowExecutionState[] {
					JobFlowExecutionState.COMPLETED,
					JobFlowExecutionState.FAILED,
					JobFlowExecutionState.TERMINATED });
	static AmazonElasticMapReduce emr;

	/**
	 * @see HttpServlet#HttpServlet()
	 */
	public processJobFlow() {
		super();
		// TODO Auto-generated constructor stub
	}

	/**
	 * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse
	 *      response)
	 */
	/*
	 * (non-Javadoc)
	 * 
	 * @see
	 * javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest
	 * , javax.servlet.http.HttpServletResponse)
	 */
	protected void doGet(HttpServletRequest request,
			HttpServletResponse response) throws ServletException, IOException {
		PrintWriter out = response.getWriter();
		String val = "";

		if (request.getParameter("txt1") != null) {
			val = request.getParameter("txt1");
			try {
				initDynamoDbclient();
			} catch (Exception e) {
				out.println(e.getMessage());
			}

			Condition scanFilter = new Condition().withComparisonOperator(
					ComparisonOperator.EQ.toString()).withAttributeValueList(
					new AttributeValue().withS(val));
			Map<String, Condition> conditions = new HashMap<String, Condition>();
			conditions.put("word", scanFilter);

			ScanRequest scanRequest = new ScanRequest()
					.withTableName(tableName).withScanFilter(conditions);

			ScanResult result = client.scan(scanRequest);
			if (result.getCount() == 0) {
				out.println("Sorry no matching word Found");
			} else {
				for (Map<String, AttributeValue> item : result.getItems()) {

					for (Map.Entry<String, AttributeValue> item1 : item
							.entrySet()) {
						String attributeName = item1.getKey();
						AttributeValue value = item1.getValue();
						String wordindex = value.getS();
						out.print(attributeName + ":");
						String[] x = wordindex.split(",");
						for (int i = 0; i < x.length; i++) {
							out.print(x[i]+"\n");
						}
						

					}
					/*
					 * out.print(item.get(val)); AttributeValue
					 * value=item.get(request.getParameter("txt1"));
					 * out.print(value);
					 */
				}
			}
		}
		/*
		 * try { try { initAWS(); } catch (Exception e) { // TODO Auto-generated
		 * catch block out.println(e.getMessage()); }
		 * 
		 * // Configure instances to use JobFlowInstancesConfig instances = new
		 * JobFlowInstancesConfig(); Placement ps=new
		 * Placement("ap-southeast-1"); instances.setPlacement(new
		 * PlacementType("ap-southeast-1")); out.println("Using EMR Hadoop v" +
		 * HADOOP_VERSION); //instances.setPlacement(new
		 * PlacementType("ap-northeast-1"));
		 * instances.setHadoopVersion(HADOOP_VERSION);
		 * out.println("Using instance count: " + INSTANCE_COUNT);
		 * instances.setInstanceCount(INSTANCE_COUNT);
		 * out.println("Using master instance type: " + INSTANCE_TYPE);
		 * instances.setMasterInstanceType(INSTANCE_TYPE);
		 * out.println("Using slave instance type: " + INSTANCE_TYPE);
		 * instances.setSlaveInstanceType(INSTANCE_TYPE); // Configure the job
		 * flow out.println("Configuring flow: " + FLOW_NAME); RunJobFlowRequest
		 * jobFlowRequest = new RunJobFlowRequest(FLOW_NAME, instances);
		 * out.println("\tusing log URI: " + S3N_LOG_URI);
		 * jobFlowRequest.setLogUri(S3N_LOG_URI);
		 * 
		 * // Configure the Hadoop jar to use out.println("\tusing jar URI: " +
		 * S3N_HADOOP_JAR); HadoopJarStepConfig jarConfig = new
		 * HadoopJarStepConfig( S3N_HADOOP_JAR); out.println("\tusing args: " +
		 * ARGS_AS_LIST); jarConfig.setArgs(ARGS_AS_LIST);
		 * 
		 * StepConfig stepConfig = new StepConfig(S3N_HADOOP_JAR
		 * .substring(S3N_HADOOP_JAR.indexOf('/') + 1), jarConfig);
		 * jobFlowRequest.setSteps(Arrays .asList(new StepConfig[] { stepConfig
		 * }));
		 * 
		 * // Run the job flow
		 * 
		 * RunJobFlowResult result = emr.runJobFlow(jobFlowRequest);
		 * 
		 * out.print(jobFlowRequest.getInstances().getPlacement().
		 * getAvailabilityZone()); // Check the status of the running job String
		 * lastState = ""; STATUS_LOOP: while (true) { DescribeJobFlowsRequest
		 * desc = new DescribeJobFlowsRequest( Arrays.asList(new String[] {
		 * result.getJobFlowId() })); DescribeJobFlowsResult descResult =
		 * emr.describeJobFlows(desc); for (JobFlowDetail detail :
		 * descResult.getJobFlows()) { String state =
		 * detail.getExecutionStatusDetail().getState(); if (isDone(state)) {
		 * out.println("Job " + state + ": " + detail.toString()); break
		 * STATUS_LOOP; } else if (!lastState.equals(state)) { lastState =
		 * state; out.println("Job " + state + " at " + new Date().toString());
		 * } } try { Thread.sleep(10000); } catch (InterruptedException e) {
		 * out.println(e.getMessage()); e.printStackTrace(); } }
		 * 
		 * } catch (AmazonServiceException ase) {
		 * out.println("Caught Exception: " + ase.getMessage());
		 * out.println("Reponse Status Code: " + ase.getStatusCode());
		 * out.println("Error Code: " + ase.getErrorCode());
		 * out.println("Request ID: " + ase.getRequestId()); }
		 */
	}

	private void initDynamoDbclient() {
		AWSCredentials credentials = new BasicAWSCredentials(
				"AKIAI4LDMCJYHSTVLF4Q",
				"WNOyyksEIg7z4V/BPapKvRkYJ9iUz1h9MtaaRt8j");
		client = new AmazonDynamoDBClient(credentials);
		client.setEndpoint("https://dynamodb.ap-southeast-1.amazonaws.com");
	}

	private void initAWS() throws Exception {
		AWSCredentials credentials = new BasicAWSCredentials(
				"AKIAI4LDMCJYHSTVLF4Q",
				"WNOyyksEIg7z4V/BPapKvRkYJ9iUz1h9MtaaRt8j");
		emr = new AmazonElasticMapReduceClient(credentials);
	}

	public static boolean isDone(String value) {
		JobFlowExecutionState state = JobFlowExecutionState.fromValue(value);
		return DONE_STATES.contains(state);
	}

	/**
	 * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse
	 *      response)
	 */
	protected void doPost(HttpServletRequest request,
			HttpServletResponse response) throws ServletException, IOException {
		// TODO Auto-generated method stub
	}

}
