package com.orangeandbronze.demos.mapreduce;

import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.EntityNotFoundException;
import com.google.appengine.api.datastore.FetchOptions;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.api.datastore.Query;
import com.google.appengine.api.datastore.Query.SortDirection;
import com.google.appengine.api.files.AppEngineFile;
import com.google.appengine.api.files.FileService;
import com.google.appengine.api.files.FileServiceFactory;
import com.google.appengine.api.users.UserService;
import com.google.appengine.api.users.UserServiceFactory;
import com.google.appengine.tools.mapreduce.MapReduceJob;
import com.google.appengine.tools.mapreduce.MapReduceResult;
import com.google.appengine.tools.mapreduce.MapReduceSettings;
import com.google.appengine.tools.mapreduce.MapReduceSpecification;
import com.google.appengine.tools.mapreduce.Marshallers;
import com.google.appengine.tools.mapreduce.inputs.BlobstoreZipInput;
import com.google.appengine.tools.mapreduce.outputs.BlobFileOutput;
import com.google.appengine.tools.pipeline.JobInfo;
import com.google.appengine.tools.pipeline.NoSuchObjectException;
import com.google.appengine.tools.pipeline.PipelineService;
import com.google.appengine.tools.pipeline.PipelineServiceFactory;

@SuppressWarnings({ "serial", "deprecation" })
public class IndexHandler extends HttpServlet {

	@Override
	protected void doGet(HttpServletRequest request, HttpServletResponse response)
			throws ServletException, IOException {
		// Retrieve list of uploaded files
		UserService userService = UserServiceFactory.getUserService();
		DatastoreService datastoreService = DatastoreServiceFactory.getDatastoreService();
		Query query = new Query(FileMetadata.KIND,
				KeyFactory.createKey("user", userService.getCurrentUser().getUserId()));
		query.addSort("uploadedOn", SortDirection.DESCENDING);
		final Iterable<Entity> entities = datastoreService.prepare(query).asIterable(FetchOptions.Builder.withLimit(10));
		List<Map<String, Object>> items = new LinkedList<Map<String, Object>>();
		PipelineService pipelineService = PipelineServiceFactory.newPipelineService();
		String jobId;
		for (Entity entity : entities) {
			Map<String, Object> properties = new HashMap<String, Object>(entity.getProperties());
			properties.put("key", entity.getKey().getName());
			if ((jobId = (String) entity.getProperty("wordcount_jobId")) != null) {
				getJobInfo(pipelineService, properties, jobId, "wordcount");
			}
			if ((jobId = (String) entity.getProperty("index_jobId")) != null) {
				getJobInfo(pipelineService, properties, jobId, "index");
			}
			if ((jobId = (String) entity.getProperty("phrases_jobId")) != null) {
				getJobInfo(pipelineService, properties, jobId, "phrases");
			}
			items.add(properties);
		}
		request.setAttribute("username", userService.getCurrentUser().getNickname());
		request.setAttribute("items", items);
		request.getRequestDispatcher("/WEB-INF/jsp/index.jsp").forward(request, response);
	}

	@SuppressWarnings("unchecked")
	protected void getJobInfo(PipelineService pipelineService,
			Map<String, Object> properties, String jobId, String prefix) {
		try {
			JobInfo info = pipelineService.getJobInfo(jobId);
			properties.put(prefix + "_jobState", info.getJobState().toString());
			if (JobInfo.State.COMPLETED_SUCCESSFULLY == info.getJobState()) {
				MapReduceResult<List<AppEngineFile>> result = (MapReduceResult<List<AppEngineFile>>) info.getOutput();
				List<AppEngineFile> files = (List<AppEngineFile>) result.getOutputResult();
				Map<String, String> outputFiles = new HashMap<>(files.size());
				FileService fileService = FileServiceFactory.getFileService();
				for (AppEngineFile file : files) {
					try {
						outputFiles.put(file.getFullPath(),
								fileService.getBlobKey(file).getKeyString());
					} catch (Exception e) {
						// TODO: handle exception
					}
				}
				properties.put(prefix + "_outputFiles", outputFiles);
			}
		} catch (NoSuchObjectException e) {
			System.out.println(e.getMessage());
		}
	}

	@SuppressWarnings({ "rawtypes", "unchecked" }) 
	@Override
	protected void doPost(HttpServletRequest request, HttpServletResponse response)
			throws ServletException, IOException {
		// Handle job requests
		String blobkey = request.getParameter("blobkey");
		String filekey = request.getParameter("filekey"); // key to Entity(kind="FileMetadata")
		int shardCount = 2;
		MapReduceSpecification spec = null;
		String type = null;
		if (request.getParameter("word_count") != null) {
			type = "WordCount";
			spec = MapReduceSpecification.of(
					"word_count",
					new BlobstoreZipInput(blobkey, (byte) '\n', shardCount),
					new WordCountMapper(),
					Marshallers.getStringMarshaller(),
					Marshallers.<String[]>getSerializationMarshaller(),
					new WordCountReducer(),
					new BlobFileOutput(filekey + "-%d", "text/plain", shardCount));
		} else if (request.getParameter("index") != null) {
			type = "Index";
			spec = MapReduceSpecification.of(
					"index",
					new BlobstoreZipInput(blobkey, (byte) '\n', shardCount),
					new IndexMapper(),
					Marshallers.getStringMarshaller(),
					Marshallers.getStringMarshaller(),
					new IndexReducer(),
					new BlobFileOutput(filekey + "-%d", "text/plain", shardCount));
		} else if (request.getParameter("phrases") != null) {
			type = "Phrases";
			spec = MapReduceSpecification.of(
					"phrases",
					new BlobstoreZipInput(blobkey, (byte) '\n', shardCount),
					new PhrasesMapper(),
					Marshallers.getStringMarshaller(),
					Marshallers.getStringMarshaller(),
					new PhrasesReducer(),
					new BlobFileOutput(filekey + "-%d", "text/plain", shardCount));
		} else if (request.getParameter("delete") != null) {
			UserService userService = UserServiceFactory.getUserService();
			DatastoreService datastoreService = DatastoreServiceFactory.getDatastoreService();
			try {
				final Key parentKey = KeyFactory.createKey(
						"user", userService.getCurrentUser().getUserId());
				final Key key = KeyFactory.createKey(parentKey, FileMetadata.KIND, filekey);
				Entity entity = datastoreService.get(key);
				PipelineService pipelineService = PipelineServiceFactory.newPipelineService();
				String jobId;
				if ((jobId = (String) entity.getProperty("wordcount_jobId")) != null) {
					try {
						pipelineService.deletePipelineRecords(jobId);
					} catch (Exception e) {
						e.printStackTrace();
					}
				}
				if ((jobId = (String) entity.getProperty("index_jobId")) != null) {
					try {
						pipelineService.deletePipelineRecords(jobId);
					} catch (Exception e) {
						e.printStackTrace();
					}
				}
				if ((jobId = (String) entity.getProperty("phrases_jobId")) != null) {
					try {
						pipelineService.deletePipelineRecords(jobId);
					} catch (Exception e) {
						e.printStackTrace();
					}
				}
				datastoreService.delete(key);
			} catch (EntityNotFoundException e) {
				e.printStackTrace();
			}
		}
		if (spec != null) {
			String pipelineId = MapReduceJob.start(spec, getSettings());
			updateFileMetadata(type, filekey, pipelineId);
		}
		// response.sendRedirect("/_ah/pipeline/status.html?root=" + pipelineId);
		response.sendRedirect("/demo");
	}

	private void updateFileMetadata(String type, String filekey,
			String pipelineId) {
		try {
			new UpdateFileMetadataJob().run(type, filekey, pipelineId);
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	private static MapReduceSettings getSettings() {
		MapReduceSettings settings = new MapReduceSettings()
				.setWorkerQueueName("mapreduce-workers")
				.setControllerQueueName("default");
		return settings;
	}

}
