/**
 * Copyright (C) 2009-2012 Couchbase, Inc.
 *
 * Permission is hereby granted, free of charge, to any person obtaining a copy
 * of this software and associated documentation files (the "Software"), to deal
 * in the Software without restriction, including without limitation the rights
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 * copies of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALING
 * IN THE SOFTWARE.
 */

package net.crawwle.career.analyze.servlet;

import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import java.util.logging.Logger;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import net.crawwle.career.analyze.couchbase.ConnectionManager;
import net.crawwle.career.analyze.dao.JobSearchDao;
import net.spy.memcached.internal.OperationFuture;

import org.elasticsearch.client.Client;
import org.elasticsearch.search.SearchHit;

import com.couchbase.client.CouchbaseClient;
import com.google.gson.Gson;

/**
 * The BeerServlet handles all job-related HTTP Queries.
 *
 * The BeerServlet is used to handle all HTTP queries under the /jobs namespace.
 * The "web.xml" defines a wildcard route for every /jobs/* route, so the
 * doGet() method needs to determine what should be dispatched.
 */
public class JobServlet extends HttpServlet {

	/**
	 * Obtains the current CouchbaseClient connection.
	 */
	//final CouchbaseClient client = ConnectionManager.getInstance();
	/**
	 * Obtains the current CouchbaseClient connection.
	 */
	//final CouchbaseClient clientKeyword = ConnectionManager.getKeywordInstance();

	/**
	 * Google GSON is used for JSON encoding/decoding.
	 */
	final Gson gson = new Gson();

	/**
	 * Dispatch all incoming GET HTTP requests.
	 *
	 * Since the /jobs/* routes are wildcarded and will all end up here, the
	 * method needs to check agains the PATH (through getPathInfo()) and
	 * determine which helper method should be called. The helper method then
	 * does the actual request and response handling.
	 *
	 * @param request
	 *            the HTTP request object.
	 * @param response
	 *            the HTTP response object.
	 * @throws ServletException
	 * @throws IOException
	 */
	@Override
	protected void doGet(HttpServletRequest request,
			HttpServletResponse response) throws ServletException, IOException {
		try {
			if (request.getPathInfo() == null) {
				handleIndex(request, response);
			} else if (request.getPathInfo().startsWith("/show")) {
				handleShow(request, response);
			} else if (request.getPathInfo().startsWith("/delete")) {
				handleDelete(request, response);
			} else if (request.getPathInfo().startsWith("/edit")) {
				handleEdit(request, response);
			} else if (request.getPathInfo().startsWith("/search")) {
				handleSearch(request, response);
			}
		} catch (InterruptedException ex) {
			Logger.getLogger(JobServlet.class.getName()).log(Level.SEVERE,
					null, ex);
		} catch (ExecutionException ex) {
			Logger.getLogger(JobServlet.class.getName()).log(Level.SEVERE,
					null, ex);
		} catch (Exception e) {
			// TODO 自動生成された catch ブロック
			e.printStackTrace();
		}
	}

	/**
	 * Store and validate the job form.
	 *
	 * @param req
	 * @param resp
	 * @throws ServletException
	 * @throws IOException
	 */
	@Override
	protected void doPost(HttpServletRequest request,
			HttpServletResponse response) throws ServletException, IOException {

		System.out.println("---Job doPost---");

		// System.out.println(request.getParameter("keyword"));

		handleIndex(request, response);
		/*
		 * String jobId = request.getPathInfo().split("/")[2]; HashMap<String,
		 * String> job = job = new HashMap<String, String>();
		 * Enumeration<String> params = request.getParameterNames();
		 * while(params.hasMoreElements()) { String key = params.nextElement();
		 * if(!key.startsWith("beer_")) { continue; } String value =
		 * request.getParameter(key); job.put(key.substring(5), value); }
		 *
		 * job.put("type", "job"); job.put("updated", new Date().toString());
		 *
		 * client.set(jobId, 0, gson.toJson(job));
		 * response.sendRedirect("/jobs/show/" + jobId);
		 */
	}

	/**
	 * Handle the /jobs action.
	 *
	 * Based on a defined Couchbase View (job/brewery_beers), the jobs are
	 * loaded, arranged and passed to the JSP layer. Google GSON is used to
	 * handle the JSON encoding/decoding.
	 *
	 * @param request
	 *            the HTTP request object.
	 * @param response
	 *            the HTTP response object.
	 * @throws IOException
	 * @throws ServletException
	 */
	private void handleIndex(HttpServletRequest request,
			HttpServletResponse response) throws IOException, ServletException {}

	/**
	 * Handle the /jobs/show/<job-ID> action
	 *
	 * This method loads up a document based on the given job id.
	 *
	 * @param request
	 *            the HTTP request object.
	 * @param response
	 *            the HTTP response object.
	 * @throws IOException
	 * @throws ServletException
	 */
	private void handleShow(HttpServletRequest request,
			HttpServletResponse response) throws IOException, ServletException {}

	/**
	 * Handle the /jobs/delete/<job-ID> action
	 *
	 * This method deletes a job based on the given job id.
	 *
	 * @param request
	 *            the HTTP request object.
	 * @param response
	 *            the HTTP response object.
	 * @throws IOException
	 * @throws ServletException
	 * @throws InterruptedException
	 * @throws ExecutionException
	 */
	private void handleDelete(HttpServletRequest request,
			HttpServletResponse response) throws IOException, ServletException,
			InterruptedException, ExecutionException {
		CouchbaseClient client = ConnectionManager.getInstance();

		String jobId = request.getPathInfo().split("/")[2];
		OperationFuture<Boolean> delete = client.delete(jobId);

		if (delete.get()) {
			response.sendRedirect("/jobs");
		}
	}

	private void handleEdit(HttpServletRequest request,
			HttpServletResponse response) throws ServletException, IOException {
		CouchbaseClient client = ConnectionManager.getInstance();

		String[] jobId = request.getPathInfo().split("/");
		if (jobId.length > 2) {
			String document = (String) client.get(jobId[2]);

			HashMap<String, String> job = null;
			if (document != null) {
				job = gson.fromJson(document, HashMap.class);
				job.put("id", jobId[2]);
				request.setAttribute("job", job);
			}
			request.setAttribute("title", "Modify job \"" + job.get("name")
					+ "\"");
		} else {
			request.setAttribute("title", "Create a new job");
		}

		request.getRequestDispatcher("/WEB-INF/jobs/edit.jsp").forward(request,
				response);
	}

	/**
	 * Handle the /jobs/search action.
	 *
	 * Based on a defined Couchbase View (job/by_name), the jobs are loaded,
	 * arranged and passed as JSON to be used by the javascript layer.
	 *
	 * @param request
	 *            the HTTP request object.
	 * @param response
	 *            the HTTP response object.
	 * @throws IOException
	 * @throws ServletException
	 */
	private void handleSearch(HttpServletRequest request,
			HttpServletResponse response) throws Exception {

		CouchbaseClient client = ConnectionManager.getInstance();
		Client esClient = ConnectionManager.getEsClient();
		CouchbaseClient clientKeyword = ConnectionManager.getKeywordInstance();

		String keyword = request.getParameter("keyword");
		System.out.println("keyword:" + keyword);

		String position = request.getParameter("position");
		System.out.println("position:" + position);

		if (keyword.length() > 0 || position.length() > 0) {
			KeywordInfo doc = new KeywordInfo();
			doc.setKeyword(keyword);
			doc.setPosition(position);
			clientKeyword.add(Calendar.getInstance().getTime().toString(), gson.toJson(doc)).get();
		}

		JobSearchDao dao = new JobSearchDao();
		//Client es = dao.getElasticSearchClient();
		SearchHit[] hits = null;

		//View view = client.getView("career", "by_url");
		//Query query = new Query();
		String index = ConnectionManager.getEsCluster();
		//query.setIncludeDocs(true).setLimit(200);
		if (keyword.length() > 0 && position.length() > 0) {
			hits = dao.searchDocument(esClient, index, "couchbaseDocument", keyword,
					position);
		} else if (keyword.length() > 0) {
			hits = dao.searchDocument(esClient, index, "couchbaseDocument", keyword);
		} else if (position.length() > 0) {
			hits = dao
					.searchDocument(esClient, index, "couchbaseDocument", position);
		}else{

			hits = dao
					.searchAllDocument(esClient, index, "couchbaseDocument");
		}
		// System.out.println(query);
		// ViewResponse result = client.query(view, query);

		// System.out.println("result:" + result.size());

		ArrayList<HashMap<String, String>> jobs = new ArrayList<HashMap<String, String>>();
		if (hits != null && hits.length > 0) {
			int cnt = 0;
			for (SearchHit hit : hits) {
				cnt = cnt + 1;
				if(cnt > 100){
					break;
				}
				String id = hit.getId();
				String document = (String) client.get(id);

				HashMap<String, String> parsedDoc = null;
				if (document != null) {
					parsedDoc = gson.fromJson(document, HashMap.class);

					String companyName = parsedDoc.get("companyName");
					if(companyName == null || "".equals(companyName)){
						continue;
					}
					StringBuffer strBuf = new StringBuffer();
					if (parsedDoc.get("syokusyu") != null
							&& parsedDoc.get("syokusyu").length() > 0) {
						strBuf.append(parsedDoc.get("syokusyu") + "\n");
					}
					if (parsedDoc.get("nensyu") != null
							&& parsedDoc.get("nensyu").length() > 0) {
						strBuf.append(parsedDoc.get("nensyu") + "\n");
					}
					if (parsedDoc.get("koyouKeitai") != null
							&& parsedDoc.get("koyouKeitai").length() > 0) {
						strBuf.append(parsedDoc.get("koyouKeitai") + "\n");
					}
					if (parsedDoc.get("workDesc") != null
							&& parsedDoc.get("workDesc").length() > 0) {
						strBuf.append(parsedDoc.get("workDesc") + "\n");
					}
					if (parsedDoc.get("workTime") != null
							&& parsedDoc.get("workTime").length() > 0) {
						strBuf.append(parsedDoc.get("workTime"));
					}
					if (parsedDoc.get("kinnmuchi") != null
							&& parsedDoc.get("kinnmuchi").length() > 0) {
						strBuf.append(parsedDoc.get("kinnmuchi"));
					}

					//String text = strBuf.toString().toLowerCase();

					HashMap<String, String> job = new HashMap<String, String>();

					// HashMap<String, String> job = new HashMap<String, String>();

					// job.put("id", row.getId());
					job.put("name", parsedDoc.get("companyName"));
					// if (parsedDoc.get("anchor").length() > 25) {
					// job.put("name", parsedDoc.get("anchor").substring(0, 25));
					// }
					// job.put("brewery", parsedDoc.get("brewery_id"));
					job.put("url", parsedDoc.get("url"));
					job.put("companyName", parsedDoc.get("companyName"));
					job.put("text", strBuf.toString());
					// if (parsedDoc.get("text").length() > 150) {
					// job.put("text", parsedDoc.get("text").substring(0, 150));
					// }
					jobs.add(job);
				}
			}
		}

		HashMap<String, Object> responseResult = new HashMap<String, Object>();
		responseResult.put("jobs", jobs);

		response.setContentType("application/json; charset=UTF-8");
		PrintWriter out = response.getWriter();
		out.print(gson.toJson(responseResult));
		out.flush();

		dao.closeElasticSearchClient();
	}

}
