package sk.tonyb.crawler;

import org.apache.log4j.Logger;

import sk.tonyb.crawler.configuration.ConfigValues;
import sk.tonyb.crawler.storage.CrawlerLocalStorage;
import sk.tonyb.library.file.FileReader;
import sk.tonyb.library.file.FileWriter;
import sk.tonyb.library.textprocessor.html.HtmlParser;
import sk.tonyb.library.web.downloader.WebDocumentDownloader;

/** 
 * Class, which contains algorithms used to crawl web. 
 * 
 * @author Anton Balucha
 * @since 12.10.2011
 * @last_modified 12.10.2011
 */
public class Executor implements Runnable {
	
	/** Log4j logger. */
	public static Logger logger = Logger.getLogger(Executor.class);
	
	private int pageCounter = 50;
	
	/** Constructor. */
	public Executor() {
		this.initValues();
		this.initClass();
	}

	/** Method, which initialize values of this class. */
	private synchronized void initValues() {
		this.pageCounter = ConfigValues.getInstance().getPageCountMax();
	}
	
	/** Method, which initialize thread of this class. */
	private synchronized void initClass() {

		Thread executor = null;
		
		try {
			executor = new Thread(this, "executor");
			executor.start();
		}
		
		catch(Exception e) {
			logger.error("Exception", e);
		}
	}
	
	/** Method, which provides running of thread. */
	@Override
	public synchronized void run() {
		this.crawlWeb();
	}
	
	/** Method, which contains algorithm used to crawl web. */
	private synchronized void crawlWeb() {
		
		CrawlerLocalStorage.getInstance().insertLink(ConfigValues.getInstance().getPageStart());
		String linkToProcess = null;
		String page = null;
		String[] listOfLinks = null;
		
		while(this.isAllowedDownload()) {
			
			linkToProcess = CrawlerLocalStorage.getInstance().getLinkToProcess();
			logger.debug("LINK TO PROCESS: " + linkToProcess);
			
			if(linkToProcess != null && linkToProcess.trim().length() > 0) {
				
				try {
				
					page = new WebDocumentDownloader(linkToProcess).getContentHtml();
					logger.trace("PAGE: " + page);
									
					listOfLinks = new HtmlParser(page).getListOfLinks2();
					logger.debug("LIST OF LINKS: " + listOfLinks);
					
					new FileWriter(ConfigValues.getInstance().getDownloadLocation() + this.pageCounter + ".html", page);
					logger.debug("LOCATION: " + ConfigValues.getInstance().getDownloadLocation() + this.pageCounter + ".html");
					
					CrawlerLocalStorage.getInstance().insertLinks(listOfLinks);
					logger.debug("LIST OF LINKS: " + listOfLinks);
				}
				
				catch(NullPointerException e) {
					logger.error("NullPointerException", e);
				}
				
				catch(Exception e) {
					logger.error("Exception", e);
				}
			}
			
			this.finishDownloadCycle();
		}
	}
	

	/** Method, which determine, if download is allowed. */
	private synchronized boolean isAllowedDownload() {
		
		if(this.pageCounter == -1 || this.pageCounter > 0) {
			return true;
		}
		
		else {
			return false;
		}
	}
	
	/** Method, which provides actions needed to finish download cycle. */
	private synchronized void finishDownloadCycle() {
		this.pageCounter--;
	}
}
