package edu.kit.aifb.ldstream.repository;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;

import java.net.URISyntaxException;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;

import org.openrdf.model.URI;
import org.openrdf.model.vocabulary.RDFS;
import org.openrdf.query.BindingSet;
import org.openrdf.query.impl.MutableTupleQueryResult;
import org.openrdf.repository.RepositoryException;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.RDFParseException;
import org.semanticweb.yars.util.CallbackNxOutputStream;

import com.ontologycentral.ldspider.Crawler;
import com.ontologycentral.ldspider.CrawlerConstants;
import com.ontologycentral.ldspider.frontier.BasicFrontier;
import com.ontologycentral.ldspider.frontier.Frontier;
import com.ontologycentral.ldspider.hooks.content.ContentHandler;
import com.ontologycentral.ldspider.hooks.content.ContentHandlerAny23;
import com.ontologycentral.ldspider.hooks.content.ContentHandlers;
import com.ontologycentral.ldspider.hooks.error.ErrorHandler;
import com.ontologycentral.ldspider.hooks.error.ErrorHandlerDummy;
import com.ontologycentral.ldspider.hooks.fetch.FetchFilter;
import com.ontologycentral.ldspider.hooks.fetch.FetchFilterSuffix;
import com.ontologycentral.ldspider.hooks.links.LinkFilter;
import com.ontologycentral.ldspider.hooks.sink.Sink;
import com.ontologycentral.ldspider.hooks.sink.SinkCallback;

import edu.kit.aifb.ldstream.env.Environment;
import edu.kit.aifb.ldstream.learner.query.SPARQLQuery;
import edu.kit.aifb.ldstream.repository.event.EventManager;
import edu.kit.aifb.ldstream.repository.ldcrawler.LinkFilterAllowURI;
import edu.kit.aifb.ldstream.repository.listeners.AbstractListeningThread;
import edu.kit.aifb.ldstream.repository.listeners.FileListeningThread;
import edu.kit.aifb.ldstream.repository.listeners.HTTPListeningThread;
import edu.kit.aifb.ldstream.repository.nquad.NQuadHandler;
import edu.kit.aifb.ldstream.repository.nquad.NQuadsParser;

public class RepositoryBuilder extends Thread{

	private boolean working = false;

	private LinkedList<AbstractListeningThread> listeners;
	private EventManager eventManager = new EventManager();
	private ExecutorService threadPool;

	public RepositoryBuilder(ExecutorService threadPool){
		this.threadPool = threadPool;
		this.listeners = new LinkedList<AbstractListeningThread>();
	}


	/**
	 * Adding streams and creating {@code HTTPListingThread} if listening is not in progress otherwise call will be ignored.
	 * The Streams will be deleted after listening.
	 * 
	 * @param stream URL to be added.
	 * @param baseURI of the RDF data.
	 * @param listeningTime in milliseconds.
	 */

	public void addStream(String stream, String baseURI, int listeningTime){
		if(!this.working){
			this.listeners.add(new HTTPListeningThread(stream, baseURI, listeningTime, this.eventManager));
		}
	}

	/**
	 * Adding a N3 file to the global repository by creating a {@code FileListeningThread}.
	 * 
	 * @param filePath
	 * @param baseURI
	 */
	public void addFile(String filePath, String baseURI, RDFFormat format){
		if(!this.working){
			if(baseURI != null){
				if(format == null){
					this.listeners.add(new FileListeningThread(filePath, baseURI));
				}else{
					this.listeners.add(new FileListeningThread(filePath, baseURI, format));	
				}
			}else{
				if(format == null){
					this.listeners.add(new FileListeningThread(filePath, "http:/noBaseURIgiven/"));	
				}else{
					this.listeners.add(new FileListeningThread(filePath, "http:/noBaseURIgiven/", format));	
				}
			}
		}
	}

	/** {@inheritDoc} */
	public void run(){

		this.working = true;
		for(AbstractListeningThread listener : this.listeners){
			listener.start();
		}
		for(AbstractListeningThread listener : this.listeners){
			try {
				listener.join();
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}
		// Synchronous shutdown. Waiting for them to finish. 
		this.eventManager.shutDown();
		if(Environment.VERBOSE){
			System.out.println("Listening Complete.");
		}

		if(Environment.LOOKUPS){
			lookups();
			saveCrawlFileToRepository();
		}

		shutdown();

	}

	private void lookups(){
		
		if(Environment.VERBOSE){
			System.out.println("Lookups started.");
		}
		String query = "SELECT DISTINCT $pred WHERE { $subj $pred $obj}";
		Future<MutableTupleQueryResult> future = null;
		Set<java.net.URI> uriSet = new HashSet<java.net.URI>();
		try {
			SPARQLQuery propertyQuery = new SPARQLQuery(Environment.REPO.getConnection(), query, true);
			future = this.threadPool.submit(propertyQuery);
		} catch (RepositoryException e) {
			e.printStackTrace();
		}
		try {
			MutableTupleQueryResult result = future.get();
			BindingSet set;
			String uri;
			while(result.hasNext()){
				set = result.next();
				try{
					uri = ((URI)set.getValue("pred") ).stringValue().replace(".rdfs", "");
					uriSet.add(new java.net.URI(uri));
				} catch (URISyntaxException e) {
					System.out.println(set.getValue("pred").stringValue() + " is not a propper URI.");
				}

			}
			result.close();
		} catch (InterruptedException | ExecutionException e) {
			e.printStackTrace();
		}

		Crawler crawler = new Crawler(Environment.NUM_THREADS);

		ContentHandler ch;
		try {
			ch = new ContentHandlers(/*new ContentHandlerNx() , new ContentHandlerRdfXml(),*/
					new ContentHandlerAny23(new java.net.URI("http://127.0.0.1:8080/any23/")));
			crawler.setContentHandler(ch);
		} catch (URISyntaxException e) {
			e.printStackTrace();
		}


		Frontier frontier = new BasicFrontier();
		frontier.addAll(uriSet);

		// LinkFilterAllowURI extends the DefaultLinkFilter 
		// we can specify the URIs we want to follow.
		LinkFilterAllowURI lf = new LinkFilterAllowURI(frontier);
		lf.setFollowABox(false);
		lf.setFollowTBox(true);

		// Adding the the rdfs links for type definitions.
		
		try {
			lf.addAllowedURI(new java.net.URI(RDFS.RANGE.stringValue()));
			lf.addAllowedURI(new java.net.URI(RDFS.SUBPROPERTYOF.stringValue()));
			lf.addAllowedURI(new java.net.URI(RDFS.SUBCLASSOF.stringValue()));
			lf.addAllowedURI(new java.net.URI(RDFS.DOMAIN.stringValue()));
		} catch (URISyntaxException e1) {
			e1.printStackTrace();
		}
		
		crawler.setLinkFilter(lf);

		FetchFilter blff = new FetchFilterSuffix(CrawlerConstants.BLACKLIST);
		crawler.setBlacklistFilter(blff);

		// setting the output to the default file.
		setCrawlOutput(crawler);

		// configure the error handler to the default file.
		setErrorHandling(crawler, frontier, lf);

		// start the crawling
		crawler.evaluateBreadthFirst(frontier, 100, 100, 20);
		crawler.close();

		if(Environment.VERBOSE){
			System.out.println("Finished Crawling.");
		}
	}


	private void setCrawlOutput(Crawler crawler){
		File f = new File(Environment.CRAWL_FILE_LOCATION);
		f.deleteOnExit();
		OutputStream os = null;
		try {
			f.createNewFile();
			os = new FileOutputStream(f);
			Sink sink = new SinkCallback(new CallbackNxOutputStream(os));
			crawler.setOutputCallback(sink);
		} catch (IOException e) {
			System.err.println("Unable to set Lookup Output");
			e.printStackTrace();
		}


	}

	private void setErrorHandling(Crawler crawler, Frontier frontier, LinkFilter lf){
		ErrorHandler eh = new ErrorHandlerDummy();
		// register the new error handler to all components
		crawler.setErrorHandler(eh);
		frontier.setErrorHandler(eh);
		lf.setErrorHandler(eh);
	}

	private void saveCrawlFileToRepository(){

		if(Environment.VERBOSE){
			System.out.println("Started to save the crawled file to the repository.");
		}
		NQuadsParser parser = new NQuadsParser();
		parser.setRDFHandler(new NQuadHandler(Environment.REPO));
		try {
			InputStream is = new FileInputStream(Environment.CRAWL_FILE_LOCATION);
			parser.parse(is, "");
		} catch (RDFParseException | RDFHandlerException | IOException e) {
			e.printStackTrace();
		}
		if(Environment.VERBOSE){
			System.out.println("Crawl file saved to repository.");
		}
	}


	/**
	 * Shows if the Listener is still listening.
	 * 
	 * @return true if work is in progress.
	 */
	public boolean atWork(){
		return this.working;
	}

	/**
	 * Shutting down the RepositoryBuilder after calling this method.
	 * New listeners can be added and the RepositoryBuilder can be restarted.
	 */
	public void shutdown(){
		this.listeners = new LinkedList<AbstractListeningThread>();
		this.working=false;
		this.eventManager.shutDown();
	}

}
