package br.com.intelsys.crawler;

import java.io.File;
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.NumberFormat;

import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import br.com.intelsys.utils.Knife;
import edu.uci.ics.crawler4j.crawler.CrawlConfig;
import edu.uci.ics.crawler4j.crawler.CrawlController;
import edu.uci.ics.crawler4j.fetcher.PageFetcher;
import edu.uci.ics.crawler4j.robotstxt.RobotstxtConfig;
import edu.uci.ics.crawler4j.robotstxt.RobotstxtServer;

public class CrawlerBrowser {
	public final String url;
	private int maxPagesToFetch = -1;
	private String crawlStorageFolder = "C:/tmp/va";
	private String urlId;
	private int threadSize = 1;
	private ControllerData controllerData = null;
	private static final Log log = LogFactory.getLog(CrawlerBrowser.class);

	public CrawlerBrowser(String url) {
		this.url = url;
		urlId = Knife.getMD5(url).substring(8);
	}

	public CrawlerBrowser controllerData(ControllerData controllerData) {
		this.controllerData = controllerData;
		return this;
	}

	public CrawlerBrowser maxPagesToFetch(int maxPages) {
		this.maxPagesToFetch = maxPages;
		return this;
	}

	public CrawlerBrowser threadSize(int size) {
		this.threadSize = size;
		return this;
	}

	private String getCrawlStorageFolder() {
		File f = new File(crawlStorageFolder + "/" + urlId);
		f.mkdirs();
		return f.getAbsolutePath();
	}

	public ControllerData start() {
		long startTime = 0;
		if (log.isDebugEnabled()) {
			log.debug("Start crawlling [" + urlId + "]: [maxPagesToFetch: " + maxPagesToFetch + ", threadSize: " + threadSize + "] " + url);
			startTime = System.currentTimeMillis();
		}
		CrawlConfig config = new CrawlConfig();
		config.setCrawlStorageFolder(getCrawlStorageFolder());
		if (maxPagesToFetch >= 0) {
			config.setMaxPagesToFetch(maxPagesToFetch);
		}
		PageFetcher pageFetcher = new PageFetcher(config);
		RobotstxtConfig robotstxtConfig = new RobotstxtConfig();
		RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher);
		ControllerData ctrlData = controllerData != null ? controllerData : new BasicControllerData(url);
		ctrlData.setUniqueUrlId(urlId);
		try {
			CrawlController controller = new CrawlController(config, pageFetcher, robotstxtServer);
			controller.setCustomData(ctrlData);
			controller.addSeed(url);
			controller.start(GuardWebCrawler.class, threadSize);
			// clearData();
			if (log.isDebugEnabled()) {
				NumberFormat formatter = new DecimalFormat("#0.00000");
				log.debug("Finished crawlling [" + urlId + "]: totalTime: "
						+ formatter.format((System.currentTimeMillis() - startTime) / 1000d) + " seconds");
			}
		} catch (Exception e) {
			e.printStackTrace();
		}
		return ctrlData;
	}

	private void clearData() {
		try {
			FileUtils.deleteDirectory(new File(getCrawlStorageFolder()));
		} catch (IOException e) {
			if (log.isErrorEnabled()) {
				log.error(e);
			}
		}
	}
}
