import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.URL;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;

import org.apache.solr.client.solrj.SolrQuery.ORDER;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.noggit.JSONUtil;
import org.w3c.dom.Document;

import crawler.Crawler;
import crawler.Crawlers;
import edu.uci.ics.crawler4j.crawler.CrawlConfig;
import edu.uci.ics.crawler4j.crawler.CrawlController;
import edu.uci.ics.crawler4j.fetcher.PageFetcher;
import edu.uci.ics.crawler4j.robotstxt.RobotstxtConfig;
import edu.uci.ics.crawler4j.robotstxt.RobotstxtServer;
import util.DAO;
import util.JSONQuery;
import util.$;
import util.Properties;

/**
 * Servlet implementation class Search
 */
@WebServlet("/")
public class Controller extends HttpServlet {
	private static final long serialVersionUID = 1L;
	private static final String charset = "UTF-8";
	private static final String solr = "http://opus.ma/solr";
	private HttpSolrServer solrServer = new HttpSolrServer(solr);
	
	private Crawlers crawlers;
	
	private String db_host;
	private String db_name;
	private String db_user;
	private String db_pass;
	
	private DAO dao; 
	
	
	/**
	 * @see Servlet#init(ServletConfig)
	 */
	public void init(ServletConfig servletConfig) throws ServletException {
		
		
		try {
			ServletContext context = servletConfig.getServletContext();
			String configFile = context.getRealPath("WEB-INF/config.xml");
			File file = new File(configFile);
			
			DocumentBuilder db = DocumentBuilderFactory.newInstance().newDocumentBuilder();
			Document config = db.parse(file);
			db_host = config.getElementsByTagName("db-host").item(0).getTextContent();
			db_name = config.getElementsByTagName("db-name").item(0).getTextContent();
			db_user = config.getElementsByTagName("db-user").item(0).getTextContent();
			db_pass = config.getElementsByTagName("db-pass").item(0).getTextContent();
			dao = new DAO(db_host, db_name, db_user, db_pass);
			
			/*NodeList services = config.getElementsByTagName("service");
			for(int i = 0; i < services.getLength();i++){
				Node service = services.item(i);
				
			}*/
			crawlers = new Crawlers();
			
		} catch (Exception e) {
			e.printStackTrace();
		}
		
		
        
		
	}
	
	/**
	 * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
	 */
	protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
		doGet(request,response);
	}
	/**
	 * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
	 */
	protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
		
		// wich service is called?
		// if service is secure run security check (https and authentication)
		// if security check failed forward to secure area
		// load service handler
		// execute service
		// return response
		
		Map<String,String[]> params = request.getParameterMap();
		String service = request.getParameter("service");
		String action = request.getParameter("action");
		if($.isEmpty(service)){
			if(params.containsKey("id"))
				getItem(request, response);
			else
				search(request, response);
		}else if("crawl".equals(service)){
			if("start".equals(action))
				startCrawl(request,response);
			else if("list".equals(action))
				getCrawlers(request, response);
		}else if("stores".equals(service)){
			if("list".equals(action))
				getStores(request, response);
		}else if(params.containsKey("id")){
			getItem(request, response);
		}else{
			search(request, response);
		}
		
	}

	
	
	private void search(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException{
		
		response.setContentType("application/json");
		response.setCharacterEncoding(charset);
		PrintWriter out = response.getWriter();
		
		JSONQuery query = new JSONQuery();
		
		String q = request.getParameter("q");
		// sanity check on q
		query.setQuery(q);
		
		String start = request.getParameter("start");
		// sanity check on start
		query.setStart(Integer.parseInt(start));
		
		String fq = request.getParameter("fq");
		if(!$.isEmpty(fq)){
			for(String s : fq.split(","))
				query.addFilterQuery(s);
		}
		
		String sort = request.getParameter("sort");
		if("price".equals(sort))
			query.addSort("price",ORDER.asc);
		else if("score".equals(sort))
			query.addSort("score",ORDER.desc);
		
		String json;
		try {
			
			QueryResponse resp = solrServer.query(query);
			json = query.toJSON(resp);
			
			
		} catch (SolrServerException e) {
			json = JSONUtil.toJSON(e);
		}
		out.println(json);
		
	}
	
	private void getItem(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException{
		
		response.setContentType("application/json");
		response.setCharacterEncoding(charset);
		PrintWriter out = response.getWriter();
		
		String query = solr + "/select?q=id:"+request.getParameter("id")+"&fl=name,resourcename,created,url,description,category,price,img_s&wt=json";
		
		URL url = new URL(query);		
        BufferedReader reader = new BufferedReader(new InputStreamReader(url.openStream(),charset));
        
        String line = "";
        while(null != (line = reader.readLine())){
        	out.println(line);
        }		
	}
	
	private void startCrawl(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException{
		
		response.setContentType("application/json");
		response.setCharacterEncoding(charset);
		PrintWriter out = response.getWriter();
		ServletContext context = request.getServletContext();
		
		Map<String,String> map = new HashMap<String,String>();
				
		String storeId = request.getParameter("store_id");
		if($.isEmpty(storeId)){
			map.put("status", "error");
			map.put("message", $.EMPTY_REQUIRED_PARAMETER);
			out.println($.toJSON(map));
			return;
		}
		boolean resume = "true".equals(request.getParameter("resume")) ? true : false;
		
		
		CrawlController controller = crawlers.getCrawler(storeId);
		if(controller!=null && !controller.isFinished()){
			out.println($.toJSON("status", "running"));
			return;
		}
		
		Timestamp timestamp = new Timestamp((new Date()).getTime());
		
		Properties prop = dao.getProperties(storeId);
		prop.put("db_host", db_host);
		prop.put("db_name", db_name);
		prop.put("db_pass", db_pass);
		prop.put("db_user", db_user);
		prop.put("storeId", storeId);
		prop.put("crawlId", timestamp.toString());
		
		int numberOfCrawlers = prop.getInt("crawlers",10);
        
		CrawlConfig config = new CrawlConfig();
        String storageFolder = context.getRealPath("WEB-INF/data/"+storeId);
		config.setCrawlStorageFolder(storageFolder);
        config.setPolitenessDelay(prop.getInt("delay",0));
        config.setMaxDepthOfCrawling(prop.getInt("depth",-1));
        config.setMaxPagesToFetch(prop.getInt("maxPages",-1));
        config.setResumableCrawling(resume);
        
        
        /*
         * Instantiate the controller for this crawl.
         */
        PageFetcher pageFetcher = new PageFetcher(config);
        RobotstxtConfig robotstxtConfig = new RobotstxtConfig();
        RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher);
        int d = 0;
        try {
        	d=1;
			controller = new CrawlController(config, pageFetcher, robotstxtServer);
			d=1;
			for(String seed : prop.getList("seed")){
				controller.addSeed(seed);
			}
			d=2;
			controller.setCustomData(prop);
			d=3;
			crawlers.addCrawler(storeId, controller);
			d=4;
			
			controller.shutdown();
			//controller.startNonBlocking(Crawler.class, numberOfCrawlers);
			
			out.println($.toJSON("status", "started"));
		} catch (Exception e) {
			String message = storageFolder+" - "+d+" - "+e.getMessage();
			out.println($.toJSON("error", message));
			e.printStackTrace();
			if(controller!=null) controller.shutdown();
		}
	}

	private void getStores(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException{
		response.setContentType("application/json");
		response.setCharacterEncoding(charset);
		PrintWriter out = response.getWriter();
		
		List<Map<String,String>> stores = new ArrayList<Map<String,String>>();
		try {
			Map<String,String> map = dao.getStores();
			for(String key : map.keySet()){
				Map<String,String> store = new HashMap<String,String>();
				store.put("id", key);
				store.put("name", map.get(key));
				stores.add(store);
			}
		} catch (SQLException e) {
			out.println($.toJSON("error", e.getMessage()));
		}
		
		out.println($.toJSON("stores", stores));
	}
	
	private void getCrawlers(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException{
		
		response.setContentType("application/json");
		response.setCharacterEncoding(charset);
		PrintWriter out = response.getWriter();
		
		
		out.println(crawlers.toJSON());
	}

	@Override
	public void destroy() {
		crawlers.shutdownAll();
		super.destroy();
	}
	
//	private void startCrawl(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException{
//		
//		String source = request.getParameter("source");
//		ServletContext context = request.getSession().getServletContext();
//		Crawlers crawlers = (Crawlers) context.getAttribute("crawlers");
//		
//	}
	
	

}
