

import java.util.ArrayList;
import java.util.HashMap;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;

public class MultiThreadEnv
{
	enum ThreadTypes {
		SINGLETHREAD,
		MULTITHREAD;
	}
	ThreadTypes my_multi_or_single_thread = ThreadTypes.MULTITHREAD;
	public MyUI my_gui_env;
	private ArrayList<WebThread> my_web_thread_array 		= new ArrayList<WebThread>();
	private ArrayList<ParserThread> my_parser_thread_array 	= new ArrayList<ParserThread>();
	private ArrayList<ParserThread> my_analyzer_array 		= new ArrayList<ParserThread>();
	ArrayList<String> my_keyword_array 						= new ArrayList<String>();
	HashMap<Integer, Integer> my_keyword_map 				= new HashMap<Integer, Integer>();
	
	private Queue<String> web_pages 					= new ConcurrentLinkedQueue<String>();
	private Queue<PageObject> web_documents 			= new ConcurrentLinkedQueue<PageObject>();
	private Queue<PageObject> web_hash_arrays 			= new ConcurrentLinkedQueue<PageObject>();
	
	private int parse_page_limit						= 250;
	private int total_words_parsed						= 0;
	private int total_urls_found						= 1;
	private long total_parse_time					 	= 0;
	
	int allowed_web_threads						= 1;
	int allowed_parser_threads					= 1;
	int allowed_analyzer_threads				= 1;
	
	CustomLock visited_page_lock 				= new CustomLock();
	HashMap<Integer, Integer> visited_pages 	= new HashMap<Integer, Integer>();
	
	long starttime = 0;
	public CustomLock main_thread_lock 	= new CustomLock();
	public CustomLock web_lock 			= new CustomLock();
	public CustomLock document_lock 	= new CustomLock();
	public CustomLock analyzer_lock 	= new CustomLock();

	WebThread webThread 	= new WebThread();
	public int parsed_pages = 0;
	
	public MultiThreadEnv() {
		visited_pages.put("http://questioneverything.typepad.com/".hashCode(), 1);
	}
	
	/**
	 * Use this method to set how many web threads can be retrieving information.
	 * @param the_web_thread_count
	 */
	public void setAllowedWebThreads(int the_web_thread_count) {
		allowed_web_threads = the_web_thread_count;
	}
	
	/**
	 * Set the limit on the amount of pages the parser can parse. 
	 * This is the same as setting the web page count.
	 * @param the_limit
	 */
	public void setPageLimit(int the_limit) {
		parse_page_limit = the_limit;
	}
	
	/**
	 * Set how many parser threads can be processing information given from
	 * web request.
	 * @param the_parser_thread_count
	 */
	public void setAllowedParserThreads(int the_parser_thread_count) {
		allowed_parser_threads = the_parser_thread_count;
	}
	
	/**
	 * Set how many analyzer threads can be analyzing information given from
	 * web request.
	 * @param the_analyzer_thread_count
	 */
	public void setAllowedAnalyzerThreads(int the_analyzer_thread_count) {
		allowed_analyzer_threads = the_analyzer_thread_count;
	}
	
	/**
	 * Use this method to add a keyword to search for before running this environment
	 * @param the_keyword The keyword to search for such as Banana or Mango.
	 */
	public void addKeyword(String the_keyword) {
		my_keyword_array.add(the_keyword.toLowerCase());
		my_keyword_map.put(the_keyword.toLowerCase().hashCode(), 0);
	}
	
	/**
	 * Use this method to specify whether this environment should do a single
	 * thread run or if it should do a multi thread run. Use MultiThreadEnv.ThreadTypes.MULTITHREAD or
	 * MultiThreadEnv.ThreadTypes.SINGLETHREAD.
	 * @param the_type
	 */
	public void setMultiOrSingleThread(ThreadTypes  the_type)
	{
		my_multi_or_single_thread = the_type;
	}
	
	private void startMultiThread()
	{
		//Start each thread backwards so that way they are all ready and awaiting 
		//When the retriever gets a page.
		for (int i = 0; i < allowed_analyzer_threads; i++)
		{
			AnalyzerThread analyzer_thread = new AnalyzerThread();
			analyzer_thread.setDelegate(this);
			analyzer_thread.start();
		}
		for (int i = 0; i < allowed_parser_threads; i++)
		{
			ParserThread parser_thread = new ParserThread();
			parser_thread.setDelegate(this);
			parser_thread.start();
			my_parser_thread_array.add(parser_thread);
		}
		for (int i = 0; i < allowed_web_threads; i++)
		{
			WebThread web_thread = new WebThread();
			web_thread.setDelegate(this);
			web_thread.start();
			my_web_thread_array.add(web_thread);
		}	
	}
	
	private void startSingleThread()
	{
		SingleThreadEnv single_env = new SingleThreadEnv(parse_page_limit, web_pages.poll(), this, my_keyword_array, my_keyword_map);
		single_env.start();
	}
	
	/**
	 * This is the startup method. Pass it in a full url with http:// and then
	 * this will start the multithreaded processing of the link.
	 * @param the_url The url to start multithread processing with.
	 */
	public void takeInputURL(String the_url)
	{
		try
		{
			//Add the first URL
			main_thread_lock.lock();
				web_pages.add(the_url);
			main_thread_lock.unlock();
			switch(my_multi_or_single_thread)
			{
				case MULTITHREAD:
					startMultiThread();
					break;
				case SINGLETHREAD:
					startSingleThread();
					break;
				
			}
			
			starttime = System.currentTimeMillis();
		}
		catch (Exception e) {
			
		}
	}
	
	public boolean haveVisitedPage(String the_url) {
		
		try
		{
			visited_page_lock.lock();
			boolean value = visited_pages.containsKey(the_url.hashCode());
			visited_page_lock.unlock();
			return value;
		} catch (Exception e){return false;}
	}
	
	public void savePageAsVisited(String the_page_url) {
		
		try
		{
			visited_page_lock.lock();
			visited_pages.put(the_page_url.hashCode(), 1);	
			visited_page_lock.unlock();
		}
		catch(Exception the_exception)
		{
		}
	}
	
	public void killWebProcesses() {
		
		System.out.println("Finished after: " + ((System.currentTimeMillis() - starttime) / 1000) + " seconds.");
		for (int i = 0; i < allowed_web_threads; i++)
		{
			my_web_thread_array.get(i).killRetrieve();
		}
	}
	
	public Queue<String> getWebPages() {
		return web_pages;
	}
	
	public void processDocument(PageObject the_document) {
		
		try
		{
			document_lock.lock();//CS
				if (parsed_pages + allowed_parser_threads >= parse_page_limit )
				{
					killWebProcesses();
					return;
				}
				if (the_document != null && parsed_pages <= parse_page_limit) 
				{
					web_documents.add(the_document);	
				}
			document_lock.unlock();//END CS
		}
		catch (Exception e)
		{
			System.out.println(e.getMessage());
		}
	}
	public void handleOutputString(String the_string){
		my_gui_env.setOutputText(the_string);
	}
	public void setTotalParseTime(long the_parse_time){
		total_parse_time = the_parse_time;
	}
	public long getTotalParseTme(){
		return total_parse_time;
	}
	public void setTotalWordsParsed(int the_word_count){
		total_words_parsed = the_word_count;
	}
	public int getTotalWordsParse(){
		return total_words_parsed;
	}
	public void setTotalFoundURLs(int the_count){
		total_urls_found = the_count;
	}
	public int getTotalFoundURLs(){
		return total_urls_found;
	}
	public int getParsePageLimit(){
		return parse_page_limit;
	}
	public Queue<PageObject> getHashArrayQueue(){
		return web_hash_arrays;
	}
	public Queue<PageObject> getWebDocuments(){
		return web_documents;
	}
}
