package be.destin.harvesting;

import java.io.IOException;
import java.util.List;
import java.util.Map;

import org.apache.log4j.Logger;

import be.destin.skos.core.Count;
import edu.uci.ics.crawler4j.crawler.CrawlController;
import edu.uci.ics.crawler4j.crawler.Page;
import edu.uci.ics.crawler4j.crawler.WebCrawler;
import edu.uci.ics.crawler4j.parser.HtmlParseData;
import edu.uci.ics.crawler4j.parser.TextParseData;
import edu.uci.ics.crawler4j.url.WebURL;

public class UsageCrawler extends WebCrawler {

	/** log4j category */
	private static Logger log = Logger.getLogger(UsageCrawler.class);

	boolean fatalError = false;
	int count = 0;

	/**
	 * You should implement this function to specify whether
	 * the given url should be crawled or not (based on your
	 * crawling logic).
	 */
	@Override
	public boolean shouldVisit(WebURL url) {
		//String href = url.getURL().toLowerCase();
		return !fatalError;
	}

	/**
	 * This function is called when a page is fetched and ready 
	 * to be processed by your program.
	 */
	@Override
	public void visit(Page page) {          
		int docId = page.getWebURL().getDocid();
		count++;
		String rawData = null;
		if (page.getParseData() instanceof HtmlParseData) {
			HtmlParseData htmlParseData = (HtmlParseData) page.getParseData();
			//String text = htmlParseData.getText();
			rawData = htmlParseData.getHtml();
			//List<WebURL> links = htmlParseData.getOutgoingUrls();
		} else if (page.getParseData() instanceof TextParseData) {
			TextParseData textParseData = (TextParseData) page.getParseData();
			rawData = textParseData.getTextContent();
		}
		boolean failed = true;
		String message = count+": ";
		CrawlController controller = this.getMyController();
		UsageCrawlerData ucd = (UsageCrawlerData) controller.getCustomData();
		if (rawData != null && !rawData.isEmpty()) {
			Map<Integer,List<UsageCrawlerItem>> harvestItems = ucd.harvestItems;
			List<UsageCrawlerItem> theHarvestItemList = harvestItems.get(docId);
			if (theHarvestItemList != null) {
				message = count+": Harvested data list size="+theHarvestItemList.size()+", url="+page.getWebURL().getURL();
				ucd.trace(message);
				for (UsageCrawlerItem aHarvestItem : theHarvestItemList){
					failed = aHarvestItem.usage.harvestFailed(rawData);
					if (!failed) {
						Count harvestCount = aHarvestItem.usage.harvestCount(aHarvestItem.concept, rawData);
						if (harvestCount != null) {
							log.error(aHarvestItem.concept.getScheme_About()+':'+harvestCount);
							aHarvestItem.concept.putApplicationUsage(aHarvestItem.usage.getApplication(), harvestCount.getRole(),
									harvestCount.getCount(), harvestCount.getCategory(), harvestCount.isPure());
							if (ucd.cacheWriter != null) {
								try {
									//TODO: category and "pure" are not written for now but could be added to then end of the line
									message = count+": "+aHarvestItem.concept.getScheme_About()+'\t'+harvestCount.getRole()+'\t'+harvestCount.getCount();
									ucd.cacheWriter.write(message);
									ucd.cacheWriter.newLine();
								} catch (IOException e) {
									message = count+": "+e.toString();
									// TODO Auto-generated catch block
									e.printStackTrace();
									fatalError = true;
									ucd.fatalError = true;
								}
							}
						} else {
							message = count+": "+aHarvestItem.concept.getScheme_About()+": NO COUNT";
							log.error(message);
						}
					} else {
						message = count+": "+aHarvestItem.concept.getScheme_About()+": FAILED";
						log.error(message);
					}
					ucd.trace(message);
				}
			} else {
				message = count+": "+"NO HARVESTED DATA LIST, docId="+docId+", url="+page.getWebURL().getURL()+", ucd Map size="+ucd.harvestItems.size();
				log.error(message);
				ucd.trace(message);
			}
		} else {
			message = count+": "+"NO DATA HARVESTED";
			log.error(message);
			ucd.trace(message);
		}
	}
}