/*
 * Improbability Drive
 * Phillip Cardon, Thach Nguyen, Cristopher Claeys
 * 4/26/2011
 */

package ui;

import java.net.URL;
import java.util.LinkedList;
import java.util.List;

import org.jsoup.nodes.Document;

import structures.Reporter;
import structures.Tuple;

import background.DataGatherer;
import background.PageParser;
import background.PageRetriever;
import background.SingleThread;
import buffers.SynchronizedBuffer;
import buffers.URLsRetrieved;

public class Launcher {
	
	private static SynchronizedBuffer<URL> toRetrieve;
	private static URLsRetrieved got;
	private static SynchronizedBuffer<Tuple<Document, URL>> downloaded;
	private final static List<PageRetriever> retrievers = new LinkedList<PageRetriever>();
	private final static List<PageParser> parsers = new LinkedList<PageParser>();
	private static String[] keywords = null;
	private static Reporter reporting = null;
	private static SingleThread slow;
	private static DataGatherer dg = null;
	private static URL seedURL;
	
	public static void main(String args[]){
		CrawlerWindow cw = new CrawlerWindow();
		cw.start();
	}
	
	public static Reporter single(int toCrawl) {
		toRetrieve = new SynchronizedBuffer<URL>();
		toRetrieve.enqueue(seedURL);
		got = new URLsRetrieved();
		downloaded = new SynchronizedBuffer<Tuple<Document,URL>>();
		reporting = new Reporter(keywords, toCrawl);
		
		slow = new SingleThread(toRetrieve, downloaded, got, reporting, keywords, toCrawl);
		slow.start();
		return reporting;
	}
	
	public static Reporter multi(int toCrawl, int retrieve, int parse) {
		toRetrieve = new SynchronizedBuffer<URL>();
		toRetrieve.enqueue(seedURL);
		got = new URLsRetrieved();
		downloaded = new SynchronizedBuffer<Tuple<Document,URL>>();
		reporting = new Reporter(keywords, toCrawl);
		
		dg = new DataGatherer(keywords, reporting);
		
		retrievers.clear();
		parsers.clear();
		for (int i = 0; i < retrieve; i++) {
			retrievers.add(new PageRetriever(toRetrieve, downloaded, got, toCrawl));
		}
		
		for (int i = 0; i < parse; i++) {
			parsers.add(new PageParser(toRetrieve, downloaded, dg));
		}
		
		for (Thread t : retrievers) {
			
			t.start();
		}
		
		for (Thread t : parsers) {
			t.start();
		}
		
		dg.start();
		return reporting;
	}
	
	public static void killSingle() {
		if (slow.isAlive()) {
			slow.interrupt();
		}
		
	}
	
	public static void killThreads() {
		for (PageRetriever t : retrievers) {
			if (t.isAlive()) {
				t.kill();
			}
		}
		
		for (PageParser t : parsers) {
			t.kill();
		}
		
		if (dg.isAlive()) {
			dg.interrupt();
		}
	}
	
	public static void addKeys(String[] keys) {
		keywords = keys;
	}

	public static void plantSeed(URL the_url) {
		seedURL = the_url;
	}
}
