import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;

import org.jsoup.nodes.Document;

/*
 * Team Huskies: Aakanksha Gaur, Alice Robinson, Paul Winters, and Steven Bradshaw
 * Course: TCSS 422
 * Assignment: Project 1
 * Date: April 26th 2011
 */

/**
 * This class creates and controls the PageParser(s) and
 * PageRetreiver(s).  It also creates a PageBuffer queue and
 * PageToReceive queue.  URLs from the PageParser are
 * added to a PageToReceive queue.  When a URL is 
 * pulled from the PageToReceive queue, it is checked against 
 * the HashSet for duplicates and added to the HashSet if it is 
 * new. If it is found to be in the HashSet, it is not allowed to 
 * be sent to the PageRetreiver.
 * @author Team Huskies: Aakanksha Gaur, Alice Robinson, Paul Winters, and Steven Bradshaw
 * @version 1, April 26th 2011
 */
public class PageControl
{
	private final DataGatherer my_data_gatherer;
	
	/**
	 * Constructor which initializes the pages examined.
	 */
	public PageControl(final DataGatherer the_data_gatherer)
	{
		my_data_gatherer = the_data_gatherer;
	}

	/**
	 * Starts the page control functions.
	 * @param the_url_seed The seed url that we will start from.
	 */
	public void start(final String the_url_seed, final boolean the_multi_threading)
	{
		if(the_url_seed == null)
		{
			throw new NullPointerException("The seed URL is null.");
		}

		@SuppressWarnings("unchecked")
		final BlockingQueue<String> url_queue = new CrawlerQueue<String>();
		final BlockingQueue<Document> page_buffer = new LinkedBlockingQueue<Document>();
			
		url_queue.add(the_url_seed.toString());

		if(the_multi_threading == false)
		{
			while(my_data_gatherer.getNumberOfPagesRetrived() < my_data_gatherer.getSearchLimit() && (!url_queue.isEmpty() || !page_buffer.isEmpty()))
			{
				if(!url_queue.isEmpty())
				{
					final ExternalRetriever retriver = new ExternalRetriever(url_queue.poll(), page_buffer);
					retriver.run();
				}

				if(!page_buffer.isEmpty())
				{
					final ExternalParser parser = new ExternalParser(url_queue, page_buffer.poll(), my_data_gatherer);
					parser.run();
				}
			}
		}
		else
		{
			final ExecutorService buffer_executor = Executors.newFixedThreadPool(200);
			final ExecutorService parser_executor = Executors.newFixedThreadPool(200);
			
			while(my_data_gatherer.getNumberOfPagesRetrived() < my_data_gatherer.getSearchLimit())
			{
				try 
				{
					if(!url_queue.isEmpty())
					{
						final ExternalRetriever retriver = new ExternalRetriever(url_queue.take(), page_buffer);
						buffer_executor.execute(retriver);
					}

					while(!page_buffer.isEmpty())
					{
						final ExternalParser parser = new ExternalParser(url_queue, page_buffer.take(), my_data_gatherer);
						parser_executor.execute(parser);
					}		
				} 
				catch(final InterruptedException the_exception) 
				{
					System.out.println("Retriever interrupt");
				}
			}
			
		}
	}	
}
