/*
 * University Of Washington - Tacoma
 * TCSS-422, Operating Systems
 * Assignment 1 - Web Crawler
 * April 26, 2011
 * 
 * Team: Kernel Monkeys
 * Andrew Boguk
 * Sergiy Kulyk
 * Nicholas Swartzendruber
 */

package crawler.engine;

import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Observable;
import java.util.Observer;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;

import crawler.main.UI;

/** 
 * Controller. Contains BlockedQueue's (thread-safe) for websites, and
 * another BlockedQueue that holds the information from the website. 
 * 
 * The engine keeps tracks of operations, thread limit, seeds, and
 * is responsible for passing information to PageRetriever, PageParser,
 * and DataGatherer class.
 * 
 * @author Andrew Boguk, 
 * @author Nicholas Swartzendruber
 * @version 1.0
 */
public class Engine extends Observable implements Runnable, Observer
{
	/** Max pages, we must go deeper. */
	public static final int MAX_PAGES = 5000;
	
	/** Max threads limit */
	private final int maxTotalThreads;
	
	/** Thread-safe queue for holding links to be retrieved/downloaded. */
	private final BlockingQueue<String> weblinks;
	/** Thread-safe queue for holding pages that are awaiting parsing. */
	private final BlockingQueue<List<String>> webinfo;
	
	/** Already visited websites. */
	private final Set<String> parserVisited;
	/** Words to search for. */
	private final Set<String> parserWords;
	/** Gathers data. */
	private final DataGatherer dataGatherer;
	
	/** Total Retriever threads in progress. */
	private int totalRetrieveThreads;
	/** Total Parser threads in progress. */
	private int totalParseThreads;
	
	/** Initial seed. */
	private final String seed;
	
	/** The total number of threads used at ONE time. */
	private int maxThreads;
	
	/**
	 * Constructor. It constructs any data needed before engine runtime.
	 * 
	 * Puts a seed into the weblinks queue, and starts the entire
	 * thread spawning loop.
	 * 
	 * @param ui The UI that's calling the engine.
	 * @param seed String to be put into queue for processing.
	 * @param words The set of words to look for.
	 * @param threads The max number of threads that can be used.
	 */
	public Engine(final UI ui, final String seed, final Set<String> words,
			      final int threads)
	{
		this.seed = seed;
		
		maxTotalThreads = threads;
		
		weblinks = new LinkedBlockingQueue<String>();
		webinfo = new LinkedBlockingQueue<List<String>>();
		
		parserVisited = Collections.synchronizedSet(new HashSet<String>());
		parserWords = Collections.synchronizedSet(new HashSet<String>(words));
		
		dataGatherer = new DataGatherer(ui);
		
		totalRetrieveThreads = 0;
		totalParseThreads = 0;
		
		/* Does not hang on a page. Put here just as a note. */
		System.setProperty("sun.net.client.defaultConnectTimeout", "500");
		System.setProperty("sun.net.client.defaultReadTimeout", "1000");
		parserVisited.add("http://questioneverything.typepad.com/"); //One exception.
	}

	/**
	 * Class is threaded because if it is spawned from the UI on the same
	 * thread, it can cause slow downs and lock ups.
	 */
	public void run() 
	{
		weblinks.add(seed);
		parserVisited.add(seed);
		spawnRetriever();
	}

	/**
	 * When a pop-up thread dies, it notifies engine to create
	 * more threads. Counters for threads are updated.
	 * 
	 * If both queues are empty, and all threads are dead, the engine
	 * reports that the program is done crawling.
	 * 
	 * @param caller Caller that is notifying engine of its death.
	 * @param info Information being passed by dead thread.
	 */
	public void update(final Observable caller, final Object info)
	{	
		
		int currThreads = totalParseThreads + totalRetrieveThreads - 1;
		
		//Order matters, incase there is only one thread max.
		if (caller.getClass().equals(PageParser.class))
		{
			totalParseThreads--;
			maxThreads = maxThreads < currThreads ? currThreads : maxThreads;
			spawnRetriever();
			spawnParser();
		}
		else if (caller.getClass().equals(PageRetriever.class))
		{
			totalRetrieveThreads--;
			maxThreads = maxThreads < currThreads ? currThreads : maxThreads;
			spawnParser();
			spawnRetriever();
		}
		
		//We have finished.
		if ((weblinks.size() == 0 && webinfo.size() == 0 &&
				totalRetrieveThreads == 0 && totalParseThreads == 0) ||
				dataGatherer.getCurrPage() >= MAX_PAGES)
			{
				System.gc();
				System.gc();
				System.gc();
				this.setChanged();
				this.notifyObservers(maxThreads);
				return;
			}
	}
	
	/**
	 * Creates a new retriever thread, increments thread count.
	 * Passes required information through parameters.
	 * 
	 * Check for too many retriever threads is done here.
	 */
	private synchronized void spawnRetriever()
	{
		while(totalRetrieveThreads + totalParseThreads <= maxTotalThreads && !weblinks.isEmpty() && dataGatherer.getCurrPage() < MAX_PAGES)
		{
			//URL class, why not pull from queue, engine is one thread only.
			try
			{
				String url = weblinks.poll();
				final PageRetriever worker = new PageRetriever(new URL(url), webinfo);
				worker.addObserver(this);
				new Thread(worker).start();
				totalRetrieveThreads++;
			}
			catch (MalformedURLException e)
			{
				e.printStackTrace();
			}
		}
	}
	
	/**
	 * Creates a new parser thread, increments thread count.
	 * Passes required information through parameters.
	 * 
	 * Check for too many parser threads is done here.
	 */
	private synchronized void spawnParser()
	{
		while (totalRetrieveThreads + totalParseThreads <= maxTotalThreads && !webinfo.isEmpty() && dataGatherer.getCurrPage() < MAX_PAGES)
		{			
			final PageParser worker = new PageParser(weblinks, parserVisited, parserWords, dataGatherer, webinfo.poll());
			worker.addObserver(this);
			new Thread(worker).start();
			totalParseThreads++;
		}
	}
}