package pl.pelcra.cesar.spider.webcrawler;

import org.apache.http.HttpStatus;
import org.apache.log4j.Logger;

import pl.pelcra.cesar.spider.webcrawler.interfaces.WebTask;
import edu.uci.ics.crawler4j.crawler.CrawlConfig;
import edu.uci.ics.crawler4j.crawler.Page;
import edu.uci.ics.crawler4j.fetcher.CustomFetchStatus;
import edu.uci.ics.crawler4j.fetcher.PageFetchResult;
import edu.uci.ics.crawler4j.frontier.DocIDServer;
import edu.uci.ics.crawler4j.parser.HtmlParseData;
import edu.uci.ics.crawler4j.parser.ParseData;
import edu.uci.ics.crawler4j.parser.Parser;
import edu.uci.ics.crawler4j.url.WebURL;

/**
 * Class crawl pages. First process current page, next check parallel option to
 * process page and then process page until paging ends
 * 
 * @author margielewski
 * 
 */
public class WebCrawler {

	private static final Logger logger = Logger.getLogger(WebCrawler.class
			.getName());

	private CrawlConfig crawlConfig;
	private DocIDServer docIdServer;
	private PageFetcher pageFetcher;
	private Parser parser;

	public WebCrawler() {

	}

	public WebCrawler(CrawlController crawlController) {
		this.init(crawlController);
	}

	/**
	 * initialize object in this class
	 * 
	 * @param crawlController
	 *            object of CrawlController class which contains references to
	 *            required objects to crawl page
	 */
	public void init(CrawlController crawlController) {

		this.pageFetcher = crawlController.getPageFetcher();
		this.docIdServer = crawlController.getDocIdServer();
		this.parser = new Parser(crawlController.getConfig());
		this.crawlConfig = crawlController.getConfig();

	}

	/**
	 * process current page with rules defines in webTask
	 * 
	 * @param webTask
	 *            webTask object with rules to process page
	 */
	public void processTask(WebTask webTask) {
		if (webTask == null) {
			logger.error("Null WikiWebTask");
			return;
		}
		WebURL curURL = webTask.getCurWebURL();

		PageFetchResult fetchResult = null;
		try {
			fetchResult = this.pageFetcher.fetchHeader(curURL,
					webTask.getHttpRequestMethod(),
					webTask.getParameterValuePairs(),
					webTask.isCookieRequired());
			int statusCode = fetchResult.getStatusCode();

			if (statusCode != HttpStatus.SC_OK) {
				if (statusCode == HttpStatus.SC_MOVED_PERMANENTLY
						|| statusCode == HttpStatus.SC_MOVED_TEMPORARILY) {
					logger.info(String.format("Status Code Page is %s",
							statusCode));
				} else if (fetchResult.getStatusCode() == CustomFetchStatus.PageTooBig) {
					logger.info("Skipping a page which was bigger than max allowed size: "
							+ curURL.getURL());
				}
				return;
			}

			Page page = new Page(curURL);
			int docid = curURL.getDocid();
			if (fetchResult.fetchContent(page)
					&& this.parser.parse(page, curURL.getURL())) {
				ParseData parseData = page.getParseData();
				HtmlParseData htmlParseData = null;
				if (parseData instanceof HtmlParseData) {
					htmlParseData = (HtmlParseData) parseData;
					webTask.beforeProcessPage(htmlParseData);

					int maxCrawlDepth = this.crawlConfig
							.getMaxDepthOfCrawling();

					for (WebURL webURL : htmlParseData.getOutgoingUrls()) {
						webURL.setParentDocid(docid);
						webURL.setParentUrl(curURL.getURL());
						int newdocid = this.docIdServer.getDocId(webURL
								.getURL());
						if (newdocid > 0) {
							// This is not the first time that this Url is
							// visited. So, we set the depth to a negative
							// number.
							webURL.setDepth((short) -1);
							webURL.setDocid(newdocid);
						} else {
							webURL.setDocid(-1);
							webURL.setDepth((short) (curURL.getDepth() + 1));
							if (maxCrawlDepth == -1
									|| curURL.getDepth() < maxCrawlDepth) {
								if (webTask.filterUrl(webURL)) {
									webTask.addWebURL(webURL);
								}
							}
						}
					}
					webTask.processListURLs();
				}
				webTask.afterProcessPage(webTask, page, htmlParseData);
			}
		} catch (Exception e) {
			e.printStackTrace();
			logger.error(e.getMessage() + ", while processing: "
					+ curURL.getURL());
		} finally {
			if (fetchResult != null) {
				fetchResult.discardContentIfNotConsumed();
			}
		}
	}

	/**
	 * process current page with parallel options
	 * 
	 * @param webTask
	 *            webTask object with rules to process page
	 */
	public void processTasks(WebTask webTask) {
		// proccess into
		processTask(webTask);
		// process in parallel
		WebURL currentParallelWebUrl = webTask.getCurWebURL();
		while (webTask.isParallelProcess()) {
			WebURL filterParallelWebUrl = webTask.getFilterParallelWebUrl();
			if (webTask.isNextParallelPage(currentParallelWebUrl,
					filterParallelWebUrl)) {
				WebTask parallelWikiWebTask = webTask.getParallelWebTask();
				if (parallelWikiWebTask == null) {
					parallelWikiWebTask = WebTask.newInstance(webTask);
				}

				parallelWikiWebTask.setCurWebURL(filterParallelWebUrl);
				parallelWikiWebTask.addParametrToRules(filterParallelWebUrl);
				processTask(parallelWikiWebTask);
				webTask.addWebURL(parallelWikiWebTask.getListWebUrl());

				currentParallelWebUrl = filterParallelWebUrl;
			} else {
				break;
			}
		}
		webTask.finishProcessWebTasks();
	}
}