package info.emamian.droplet.worker;

import info.emamian.droplet.data.DynamoDbClientFactory;
import info.emamian.droplet.data.DynamoDbSms;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.math.BigInteger;
import java.util.Date;
import java.util.Enumeration;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.amazonaws.auth.AWSCredentialsProviderChain;
import com.amazonaws.auth.ClasspathPropertiesFileCredentialsProvider;
import com.amazonaws.auth.InstanceProfileCredentialsProvider;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper;
import com.amazonaws.services.s3.AmazonS3Client;

/**
 * An example Amazon Elastic Beanstalk Worker Tier application. This example
 * requires a Java 7 (or higher) compiler.
 */
public class WorkerServlet extends HttpServlet {
	private static final Logger logger = LoggerFactory.getLogger(WorkerServlet.class);

	private static BigInteger servletCount = BigInteger.ZERO;
	private static BigInteger hitCount = BigInteger.ZERO;
	private static BigInteger activeServletCount = BigInteger.ZERO;

	private static final long serialVersionUID = 1L;
	// private static final Charset UTF_8 = Charset.forName("UTF-8");

	private DynamoDBMapper mapper;// =
									// DynamoDbClientFactory.getMapperInstance();

	/**
	 * A client to use to access Amazon S3. Pulls credentials from the
	 * {@code AwsCredentials.properties} file if found on the classpath,
	 * otherwise will attempt to obtain credentials based on the IAM Instance
	 * Profile associated with the EC2 instance on which it is run.
	 */
	private final AmazonS3Client s3 = new AmazonS3Client(new AWSCredentialsProviderChain(
			new InstanceProfileCredentialsProvider(), new ClasspathPropertiesFileCredentialsProvider()));

	@Override
	public void destroy() {
		// TODO Auto-generated method stub
		super.destroy();
		synchronized (activeServletCount) {
			activeServletCount.add(BigInteger.valueOf(-1));
		}
		logger.debug("Servlet destroyed; Active Servlet count: {}", activeServletCount);
	}

	@Override
	public void init() throws ServletException {
		// TODO Auto-generated method stub
		super.init();
		synchronized (activeServletCount) {
			activeServletCount.add(BigInteger.ONE);
		}
		synchronized (servletCount) {
			servletCount.add(BigInteger.ONE);
		}
		logger.debug("Servlet init; Active Servlet count: {}", activeServletCount);
		logger.debug("Servlet count: {}", servletCount);
	}

	/**
	 * This method is invoked to handle POST requests from the local SQS daemon
	 * when a work item is pulled off of the queue. The body of the request
	 * contains the message pulled off the queue.
	 */
	@Override
	protected void doPost(final HttpServletRequest request, final HttpServletResponse response)
			throws ServletException, IOException {
		logger.info("logger works!{}", new Date());

		try {
			hit();

			// Parse the work to be done from the POST request body.
			// debugHttpRequest(request);

			// WorkRequest workRequest =
			// WorkRequest.fromJson(request.getInputStream());
			// debugRequest(workRequest);
			// DynamoDbSms sms = mapper.load(DynamoDbSms.class,
			// workRequest.getMessage());

			System.out.println("received msg at  " + new Date());
			logger.info("Logger receive message at {}", new Date());
			mapper = DynamoDbClientFactory.getMapperInstance();
			String body = "";
			BufferedReader bodyReader = request.getReader();
			String s;
			while ((s = bodyReader.readLine()) != null) {
				body += s;
			}

			System.out.println("Loading sms with id: " + body);
			DynamoDbSms sms = mapper.load(DynamoDbSms.class, body);
			// logger.debug("Received sms in the worker:{}", sms);
			System.out.println("receivd sms: " + sms.toString());
			logger.debug("logger sms loaded: {}", sms);

			// Simulate doing some work.

			Thread.sleep(10 * 1000);

			// Write the "result" of the work into Amazon S3.

			// byte[] message = workRequest.getMessage().getBytes(UTF_8);
			//
			// s3.putObject(workRequest.getBucket(), workRequest.getKey(), new
			// ByteArrayInputStream(message),
			// new ObjectMetadata());

			// Signal to beanstalk that processing was successful so this work
			// item should not be retried.

			response.setStatus(200);
			System.out.println("reutrn fine.");

		}
		catch (RuntimeException | InterruptedException exception) {

			// Signal to beanstalk that something went wrong while processing
			// the request. The work request will be retried several times in
			// case the failure was transient (eg a temporary network issue
			// when writing to Amazon S3).
			exception.printStackTrace();
			response.setStatus(500);
			try (PrintWriter writer = new PrintWriter(response.getOutputStream())) {
				exception.printStackTrace(writer);
			}
		}
		catch (Exception e) {
			e.printStackTrace();
		}
	}

	private void hit() {
		synchronized (hitCount) {
			hitCount.add(BigInteger.ONE);
		}
	}

	private void debugHttpRequest(HttpServletRequest request) {
		System.err.println("=================================");
		Enumeration<?> enumerator = request.getAttributeNames();
		System.out.println("Request attributes:");
		while (enumerator.hasMoreElements()) {
			String atnString = (String) enumerator.nextElement();
			System.out.println("   " + atnString + ": " + request.getAttribute(atnString));
		}

		Enumeration<?> headEnum = request.getHeaderNames();
		System.out.println("Headers:");
		while (headEnum.hasMoreElements()) {
			String headName = (String) headEnum.nextElement();
			System.out.println("  " + headName + ":" + request.getHeader(headName));
		}
		Enumeration<?> pEnum = request.getHeaderNames();
		System.out.println("Params:");
		while (pEnum.hasMoreElements()) {
			String pname = (String) pEnum.nextElement();
			System.out.println("  " + pname + ":" + request.getParameter(pname));
		}
		System.out.println("Content type: " + request.getContentType());
		System.out.println("Context Path: " + request.getContextPath());
		System.out.println("Method: " + request.getMethod());
		System.out.println("Character Encoding: " + request.getCharacterEncoding());
		System.out.println("Path info: " + request.getPathInfo());
		System.out.println("Query String: " + request.getQueryString());

		System.out.println("Request Url: " + request.getRequestURL());
		// try {
		// BufferedReader body = request.getReader();
		//
		// System.out.println("Body: ");
		// String s;
		// while ((s = body.readLine()) != null) {
		// System.out.println(s);
		// }
		// }
		// catch (IOException e) {
		// e.printStackTrace();
		// }
		System.out.println("=================================");
	}

	private void debugRequest(WorkRequest workRequest) {
		// logger.debug("-------------");
		// logger.debug("Work request:");
		// logger.debug("  key:{}", workRequest.getKey());
		// logger.debug("  Message:{}", workRequest.getMessage());
		// logger.debug("  Bucket:{}", workRequest.getBucket());
		// logger.debug("  Class:{}", workRequest.getClass());
		// logger.debug("  toString()", workRequest.toString());
		// // logger.debug("  toJson()", workRequest.toJson());
		// logger.debug("-------------");

		System.out.println("-------------");
		System.out.println("Work request:");
		System.out.println("  key:{}" + workRequest.getKey());
		System.out.println("  Message:{}" + workRequest.getMessage());
		System.out.println("  Bucket:{}" + workRequest.getBucket());
		System.out.println("  Class:{}" + workRequest.getClass());
		System.out.println("  toString()" + workRequest.toString());
		// System.out.println("  toJson()", workRequest.toJson());
		System.out.println("-------------");
	}

	@Override
	protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
		// TODO Auto-generated method stub
		// doPost(req, resp);
		logger.debug("it works");
	}
}
