

import java.util.ArrayList;
import java.util.HashMap;
import java.util.Queue;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ConcurrentLinkedQueue;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

public class MultiThreadEnv
{
	ArrayList<WebThread> my_web_thread_array 		= new ArrayList<WebThread>();
	ArrayList<ParserThread> my_parser_thread_array 	= new ArrayList<ParserThread>();
	ArrayList<ParserThread> my_analyzer_array 		= new ArrayList<ParserThread>();
	ArrayList<String> my_keyword_array = new ArrayList<String>();
	HashMap<Integer, Integer> my_keyword_map = new HashMap<Integer, Integer>();
	
	Queue<String> web_pages 					= new ConcurrentLinkedQueue<String>();
	Queue<PageObject> web_documents 			= new ConcurrentLinkedQueue<PageObject>();
	Queue<PageObject> web_hash_arrays 			= new ConcurrentLinkedQueue<PageObject>();
	
	int total_parsed_pages 						= 0;
	int parse_page_limit						= 200;
	int total_words_parsed						= 0;
	int total_urls_found						= 1;
	long total_parse_time					 	= 0;
	
	int allowed_web_threads						= 6;
	int allowed_parser_threads					= 4;
	int allowed_analyzer_threads				= 1;
	
	CustomLock visited_page_lock 				= new CustomLock();
	HashMap<Integer, Integer> visited_pages 	= new HashMap<Integer, Integer>();
	
	long starttime = 0;
	public CustomLock main_thread_lock 	= new CustomLock();
	public CustomLock web_lock 			= new CustomLock();
	public CustomLock document_lock 	= new CustomLock();
	public CustomLock analyzer_lock 	= new CustomLock();
	
	public int parsed_pages = 0;
	
	public MultiThreadEnv()
	{
	}
	/**
	 * Use this method to add a keyword to search for before running this environment
	 * @param the_keyword The keyword to search for such as Banana or Mango.
	 */
	public void addKeyword(String the_keyword)
	{
		my_keyword_array.add(the_keyword.toLowerCase());
		my_keyword_map.put(the_keyword.toLowerCase().hashCode(), 0);
	}
	
	/**
	 * This is the startup method. Pass it in a full url with http:// and then
	 * this will start the multithreaded processing of the link.
	 * @param the_url The url to start multithread processing with.
	 */
	public void takeInputURL(String the_url)
	{
		try
		{
			//Add the first URL
			main_thread_lock.lock();
				web_pages.add(the_url);
			main_thread_lock.unlock();

			addKeyword("the");
			addKeyword("maybe");
			addKeyword("ansdlk;fja;sldkjfl;aksndf;asdf");
			//Start each thread backwards so that way they are all ready and awaiting 
			//When the retriever gets a page.
			for (int i = 0; i < allowed_analyzer_threads; i++)
			{
				AnalyzerThread analyzer_thread = new AnalyzerThread();
				analyzer_thread.setDelegate(this);
				analyzer_thread.start();
			}
			for (int i = 0; i < allowed_parser_threads; i++)
			{
				ParserThread parser_thread = new ParserThread();
				parser_thread.setDelegate(this);
				parser_thread.start();
				my_parser_thread_array.add(parser_thread);
			}
			for (int i = 0; i < allowed_web_threads; i++)
			{
				WebThread web_thread = new WebThread();
				web_thread.setDelegate(this);
				web_thread.start();
				my_web_thread_array.add(web_thread);
			}
			starttime = System.currentTimeMillis();
		}
		catch (Exception e) {
			
		}
	}
	public boolean haveVisitedPage(String the_url)
	{
		try
		{
			visited_page_lock.lock();
			boolean value = visited_pages.containsKey(the_url.hashCode());
			visited_page_lock.unlock();
			return value;
		} catch (Exception e){return false;}
	}
	public void savePageAsVisited(String the_page_url)
	{
		try
		{
			visited_page_lock.lock();
			visited_pages.put(the_page_url.hashCode(), 1);	
			visited_page_lock.unlock();
		}
		catch(Exception the_exception)
		{
		}
	}
	public void killWebProcesses()
	{
		System.out.println("Finished after: " + ((System.currentTimeMillis() - starttime) / 1000) + " seconds.");
		for (int i = 0; i < allowed_web_threads; i++)
		{
			my_web_thread_array.get(i).killRetrieve();
		}
	}
	public Queue<String> getWebPages()
	{
		return web_pages;
	}
	public void processDocument(PageObject the_document)
	{
		try
		{
			document_lock.lock();//CS
				if (parsed_pages > 200)
				{
					killWebProcesses();
					return;
				}
				if (the_document != null) 
				{
					web_documents.add(the_document);	
				}
			document_lock.unlock();//END CS
		}
		catch (Exception e)
		{
			System.out.println(e.getMessage());
		}
	}
}
