package pl.pelcra.cesar.spider.webcrawler;

import java.io.File;

import org.apache.log4j.Logger;

import pl.pelcra.cesar.data.interfaces.ICommonData;
import pl.pelcra.cesar.spider.data.MapLangUrl;
import pl.pelcra.cesar.spider.webcrawler.interfaces.WebTask;
import pl.pelcra.cesar.tools.Tools;

import com.sleepycat.je.Environment;
import com.sleepycat.je.EnvironmentConfig;

import edu.uci.ics.crawler4j.crawler.Configurable;
import edu.uci.ics.crawler4j.crawler.CrawlConfig;
import edu.uci.ics.crawler4j.frontier.DocIDServer;
import edu.uci.ics.crawler4j.url.URLCanonicalizer;
import edu.uci.ics.crawler4j.url.WebURL;
import edu.uci.ics.crawler4j.util.IO;

/***
 * Configurable class that runs recursively download pages as thread
 * 
 * @author margielewski
 * 
 */
public class CrawlController extends Configurable implements Runnable {

	private static final Logger logger = Logger.getLogger(CrawlController.class
			.getName());

	private final CrawlConfig config;
	private final DocIDServer docIdServer;
	private final PageFetcher pageFetcher;

	private ICommonData<MapLangUrl> spiderThreadData;

	private Class webCrawler;

	private WebTask webTask;

	public CrawlController(CrawlConfig config, PageFetcher pageFetcher)
			throws Exception {
		super(config);

		this.config = config;

		config.validate();
		File folder = new File(config.getCrawlStorageFolder());
		if (!folder.exists()) {
			if (!folder.mkdirs()) {
				throw new Exception("Couldn't create this folder: "
						+ folder.getAbsolutePath());
			}
		}

		boolean resumable = config.isResumableCrawling();

		EnvironmentConfig envConfig = new EnvironmentConfig();
		envConfig.setAllowCreate(true);
		envConfig.setTransactional(resumable);
		envConfig.setLocking(resumable);

		File envHome = new File(config.getCrawlStorageFolder() + "/frontier");
		if (!envHome.exists()) {
			if (!envHome.mkdir()) {
				throw new Exception("Couldn't create this folder: "
						+ envHome.getAbsolutePath());
			}
		}
		if (!resumable) {
			IO.deleteFolderContents(envHome);
		}

		Environment env = new Environment(envHome, envConfig);
		this.docIdServer = new DocIDServer(env, config);

		this.pageFetcher = pageFetcher;
	}

	/***
	 * 
	 * @return return DocIdServer object
	 */
	public DocIDServer getDocIdServer() {
		return this.docIdServer;
	}

	/***
	 * 
	 * @return return PageFetcher object
	 */
	public PageFetcher getPageFetcher() {
		return this.pageFetcher;
	}

	/***
	 * 
	 * @param pageUrl
	 *            url to page as String
	 * @return return WebURL object from url as String
	 */
	public WebURL getSeed(String pageUrl) {
		return getSeed(pageUrl, -1);
	}

	/***
	 * 
	 * @param pageUrl
	 * @param docId
	 * @return return WebURL object from url as String
	 */
	public WebURL getSeed(String pageUrl, int docId) {
		String canonicalUrl = URLCanonicalizer.getCanonicalURL(pageUrl);
		if (canonicalUrl == null) {
			logger.error("Invalid seed URL: " + pageUrl);
			return null;
		}
		if (docId < 0) {
			docId = this.docIdServer.getDocId(canonicalUrl);
			if (docId > 0) {
				// This URL is already seen.
				return null;
			}
			docId = this.docIdServer.getNewDocID(canonicalUrl);
		} else {
			try {
				this.docIdServer.addUrlAndDocId(canonicalUrl, docId);
			} catch (Exception e) {
				logger.error("Could not add seed: " + e.getMessage());
			}
		}

		WebURL webUrl = new WebURL();
		webUrl.setURL(canonicalUrl);
		webUrl.setDocid(docId);
		webUrl.setDepth((short) 0);

		return webUrl;
	}

	/***
	 * 
	 * @param webTask
	 *            start WebTask
	 * @param webCrawler
	 *            class that crawl pages
	 */
	private <T extends WebCrawler> void recursionProcessWebTask(
			WebTask webTask, final T webCrawler) {
		if (webTask.isNextWebTask()) {

			Tools.threadSleep(this.config.getPolitenessDelay());

			for (WebURL webUrl : webTask.getListWebUrl()) {
				WebTask tmpWikiWebTask = WebTask.newInstance(webTask
						.getNextWebTask());
				tmpWikiWebTask.setCurWebURL(webUrl);
				tmpWikiWebTask.addParametrToRules(webUrl);
				webCrawler.processTasks(tmpWikiWebTask);

				this.recursionProcessWebTask(tmpWikiWebTask, webCrawler);
			}
		}
	}

	/***
	 * overridden method from interface Runnable
	 */
	@Override
	public void run() {
		start(this.webCrawler, this.webTask, this.spiderThreadData);
	}

	/***
	 * set SpiderThreadData in CrawlController
	 * 
	 * @param spiderThreadData
	 *            object which stores MapLangUrls
	 */
	public void setThreadData(ICommonData<MapLangUrl> spiderThreadData) {
		this.spiderThreadData = spiderThreadData;
	}

	/***
	 * set webCrawler object in CrawlController
	 * 
	 * @param webCrawler
	 *            object which is responsible for page crawling
	 */
	public void setWebCrawler(Class webCrawler) {
		this.webCrawler = webCrawler;
	}

	/***
	 * set start webTask object in CrawlController
	 * 
	 * @param webTask
	 *            this is a start webTask for page crawling
	 */
	public void setWebTask(WebTask webTask) {
		this.webTask = webTask;
	}

	/***
	 * function start recursion process of crawling pages
	 * 
	 * @param clazz
	 *            webCrawler class, object is create in this class from Class<T>
	 *            object
	 * @param webTask
	 *            webTask object
	 * @param spiderThreadData
	 *            object which stores MapLangUrls
	 */
	public <T extends WebCrawler> void start(final Class<T> clazz,
			WebTask webTask, ICommonData<MapLangUrl> spiderThreadData) {
		try {
			if (this.webTask == null) {
				logger.info("Cannot start WikiCrawlController, because 'webTask' is null");
			} else {
				logger.info("WikiCrawlController starts working");

				T wikiWebCrawler = clazz.newInstance();
				wikiWebCrawler.init(this);
				wikiWebCrawler.processTasks(this.webTask);

				this.recursionProcessWebTask(webTask, wikiWebCrawler);

				logger.info("WikiCrawlController ends working");
			}
			spiderThreadData.endPutting();
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
}
