package edu.kit.aifb.swala.data.crawler;

import java.io.BufferedOutputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Iterator;
import java.util.logging.Logger;

import org.semanticweb.yars.util.CallbackNxOutputStream;

import com.ontologycentral.ldspider.Crawler;
import com.ontologycentral.ldspider.CrawlerConstants;
import com.ontologycentral.ldspider.frontier.BasicFrontier;
import com.ontologycentral.ldspider.frontier.Frontier;
import com.ontologycentral.ldspider.hooks.content.ContentHandler;
import com.ontologycentral.ldspider.hooks.content.ContentHandlerRdfXml;
import com.ontologycentral.ldspider.hooks.error.ErrorHandler;
import com.ontologycentral.ldspider.hooks.error.ErrorHandlerLogger;
import com.ontologycentral.ldspider.hooks.error.ObjectThrowable;
import com.ontologycentral.ldspider.hooks.fetch.FetchFilterSuffix;
import com.ontologycentral.ldspider.hooks.links.LinkFilter;
import com.ontologycentral.ldspider.hooks.links.LinkFilterDefault;
import com.ontologycentral.ldspider.hooks.sink.Sink;
import com.ontologycentral.ldspider.hooks.sink.SinkCallback;

@SuppressWarnings("deprecation")
public class LDCrawler {

	private String seedURI = "http://www.w3.org/People/Berners-Lee/card";
	private int numberOfThreads = 2;
	private int maxuris = 10;
	
	private String outputFilename = "data.nq";
	private String logFilename = "crawl.log";
	
	private final static Logger _log = Logger.getLogger(LDCrawler.class.getSimpleName());
	
	public LDCrawler() {
	}
	
	public LDCrawler(String _seedURI, int _numberOfThreads, int _maxuris, String _outputFilename) {
		this.seedURI = _seedURI;
		this.numberOfThreads = _numberOfThreads;
		this.maxuris = _maxuris;
		this.outputFilename = _outputFilename;
	}
	
	public void crawl() throws URISyntaxException, IOException {
		OutputStream os = new BufferedOutputStream(new FileOutputStream(outputFilename));
		
		CallbackNxOutputStream cbos = new CallbackNxOutputStream(os, false);
		
		Sink sink = new SinkCallback(cbos);
		
		OutputStream accOs = new FileOutputStream(logFilename);
		PrintStream ps = new PrintStream(new BufferedOutputStream(accOs));
		
		ErrorHandler eh = new ErrorHandlerLogger(ps, null, false);
		
		Frontier frontier = new BasicFrontier();
		frontier.setErrorHandler(eh);
		frontier.add(new URI(seedURI));
		_log.info("frontier done");
		
		LinkFilter links = new LinkFilterDefault(frontier);
		links.setErrorHandler(eh);

		long time = System.currentTimeMillis();

		FetchFilterSuffix blacklist = new FetchFilterSuffix(CrawlerConstants.BLACKLIST);

		_log.info("init crawler");
		
		Crawler c = new Crawler(numberOfThreads);
		
		ContentHandler ch = new ContentHandlerRdfXml();
		c.setContentHandler(ch);
		
		c.setErrorHandler(eh);
		c.setOutputCallback(sink);
		c.setLinkFilter(links);

		c.setBlacklistFilter(blacklist);
		
		_log.info("load balanced crawl with " + numberOfThreads + " threads, maxuris " + maxuris);

		c.evaluateLoadBalanced(frontier, maxuris);
		
		for (Iterator<ObjectThrowable> it = eh.iterator(); it.hasNext() ; ) {
			ObjectThrowable ot = it.next();
			System.err.println(ot.getThrowable().getMessage() + " " + ot.getObject());
		}

		System.err.println(eh);

		c.close();
		os.close();
		accOs.close();

		long time1 = System.currentTimeMillis();
		
		System.err.println("time elapsed " + (time1-time) + " ms " + (float)eh.lookups()/((time1-time)/1000.0) + " lookups/sec");
				
	}
	
	public String getSeedURI() {
		return seedURI;
	}

	public void setSeedURI(String seedURI) {
		this.seedURI = seedURI;
	}

	public int getNumberOfThreads() {
		return numberOfThreads;
	}

	public void setNumberOfThreads(int numberOfThreads) {
		this.numberOfThreads = numberOfThreads;
	}

	public int getMaxuris() {
		return maxuris;
	}

	public void setMaxuris(int maxuris) {
		this.maxuris = maxuris;
	}

}
