import java.io.FileWriter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingQueue;

import org.jsoup.nodes.Document;

/*
 * Team Huskies: Aakanksha Gaur, Alice Robinson, Paul Winters, and Steven Bradshaw
 * Course: TCSS 422
 * Assignment: Project 1
 * Date: April 26th 2011
 */

/**
 * Takes the complete data passed to it by the PageParser
 * and extracts only the relevant data,
 * passing it on to the Reporter.
 * @author Team Huskies: Aakanksha Gaur, Alice Robinson, Paul Winters, and Steven Bradshaw
 * @version 1, April 26th 2011
 */
public class DataGatherer
{	
	/**
	 * The holder for the DataNode's that contain the useful
	 * information about the keywords.
	 */
	private final Map<String, Integer> my_keyword_map;
	
	/**
	 * The number of PageNodes encountered.
	 */
	private int my_pages_retrieved;
	
	/**
	 * The total words encountered so far (not just keywords.
	 */
	private int my_total_words;
	
	/**
	 * The total number of urls encountered so far.
	 */
	private int my_total_urls;
	
	/**
	 * The total time taken to parse each page.
	 */
	private long my_total_time;
	
	/**
	 * The URL that is currently being parsed.
	 */
	private String my_current_url;
	
	/**
	 * The number of pages to search limit 
	 */
	private int my_search_limit;
	
	/**
	 * The UI that called the DataGatherer.
	 */
	private final UserInterface my_ui;
	
	/**
	 * FileWriter used in recording to a file.
	 */
	private FileWriter my_writer;
	
	/**
	 * Boolean flag is FileWriter is enabled.
	 */
	private boolean my_file_write;
	
	/**
	 * Boolean flag if String echo is enabled.
	 */
	private boolean my_ui_echo;
	
	/**
	 * Constructs a new DataGatherer that will do a webcrawl looking for occurrences of List of String Keywords 
	 * until it reaches a maximum breadth.
	 * @param the_keywords The List of Strings to gather hits. 
	 * @param the_search_limit The unique page limit.
	 * @param the_ui The UI that called the DataGatherer.
	 */
	public DataGatherer(List<String> the_keywords, int the_search_limit, final UserInterface the_ui)
	{
		my_search_limit = the_search_limit; 
		my_keyword_map = new TreeMap<String, Integer>();
		my_total_words = 0;
		my_total_urls = 0;
		my_pages_retrieved = 0;
		my_total_time = 0;
		
		for(String iter : the_keywords)
		{
			my_keyword_map.put(iter, 0);
		}
		
		my_ui = the_ui;
		my_writer = null;

		try 
		{
			my_writer = new FileWriter("output"+ my_search_limit +".txt");
		} 
		catch(final IOException the_exception) 
		{
			the_exception.printStackTrace();
		}
	}
	
	/**
	 * Begins the webcrawler with the given seed String URL, along with three boolean flags for 
	 * multithreading, file writing, and GUI echoing.
	 * @param the_url_seed The initial String URL seed.
	 * @param the_multi_threading If the webcrawler is to use multithreading.
	 * @param the_file_write If the DataGatherer is to write to a file.
	 * @param the_ui_echo If the DataGatherer is to echo to the GUI.
	 */
	public final void start(final String the_url_seed, final boolean the_multi_threading, final boolean the_file_write, final boolean the_ui_echo)
	{
		final PageControl page_controller = new PageControl(this);
		my_file_write = the_file_write;
		my_ui_echo = the_ui_echo;
		
		if(!the_file_write)
		{
			try 
			{
				my_writer.close();
			} 
			catch(final IOException the_exception) 
			{

			}
		}
		
		page_controller.start(the_url_seed, the_multi_threading);
//		@SuppressWarnings("unchecked")
//		final BlockingQueue<String> url_queue = new CrawlerQueue<String>();
//		final BlockingQueue<Document> page_buffer = new LinkedBlockingQueue<Document>();
//			
//		url_queue.add(the_url_seed.toString());
//
//		if(the_multi_threading == false)
//		{
//			while(my_pages_retrieved< my_search_limit && (!url_queue.isEmpty() || !page_buffer.isEmpty()))
//			{
//				if(!url_queue.isEmpty())
//				{
//					final ExternalRetriever retriver = new ExternalRetriever(url_queue.poll(), page_buffer);
//					retriver.run();
//				}
//
//				if(!page_buffer.isEmpty())
//				{
//					final ExternalParser parser = new ExternalParser(url_queue, page_buffer.poll(), this);
//					parser.run();
//				}
//			}
//		}
//		else
//		{
//			final ExecutorService buffer_executor = Executors.newFixedThreadPool(200);
//			final ExecutorService parser_executor = Executors.newFixedThreadPool(200);
//			
//			while(my_pages_retrieved< my_search_limit)
//			{
//				try 
//				{
//					if(!url_queue.isEmpty())
//					{
//						final ExternalRetriever retriver = new ExternalRetriever(url_queue.take(), page_buffer);
//						buffer_executor.execute(retriver);
//					}
//
//					while(!page_buffer.isEmpty())
//					{
//						final ExternalParser parser = new ExternalParser(url_queue, page_buffer.take(), this);
//						parser_executor.execute(parser);
//					}		
//				} 
//				catch(final InterruptedException the_exception) 
//				{
//					System.out.println("Retriever interrupt");
//				}
//			}
//			
//		}

	}
			
	/**
	 * Returns the total number of unique URLs visited.
	 * @return The total number of unique URLs visited.
	 */
	public int getNumberOfPagesRetrived()
	{
		return my_pages_retrieved;
	}
	
	/**
	 * Returns the limit of the webcrawler's breadth.
	 * @return The limit of the webcrawler's breadth.
	 */
	public int getSearchLimit() 
	{	
		return my_search_limit;
	}
	
	/**
	 * Returns the set of keywords used to gather data on.
	 * @return The set of keywords used to gather data on.
	 */
	public Set<String> getKeywords()
	{
		return my_keyword_map.keySet();
	}
	
	/**
	 * Updates the the DataGatherer's averages with the various arguments given. If in the start(...) method 
	 * the_file_write was set to true the DataGather's results will be outputed to a txt file. If in
	 * the start(...) the_ui_echo was set to true the DataGatherer's results will be echoed to the UI. This method
	 * is synchronized so that only one thread may access it at a time to avoid deadlock and inconsistent data collection. 
	 * @param the_run_time The the amount of time that last parse took in milliseconds.
	 * @param the_keyword_map The map of keyword hits found in the last parse.
	 * @param the_number_of_words The total number of non html tags words found in the last parse.
	 * @param the_url The URL used in the last parse.
	 * @param the_number_of_urls The number of URLs found within the last parse.
	 */
	public synchronized void parsePassedData(final Long the_run_time, final Map<String, Integer> the_keyword_map, 
			final int the_number_of_words, final String the_url, final int the_number_of_urls)
	{
		my_current_url = the_url;
		my_total_urls += the_number_of_urls;
		my_total_words += the_number_of_words;
		my_total_time += the_run_time;
		
		for(String iter : the_keyword_map.keySet())
		{
			my_keyword_map.put(iter, my_keyword_map.get(iter) + the_keyword_map.get(iter));
		}
		my_pages_retrieved++;
		
		if(my_file_write)
		{
			try 
			{
				my_writer.write("Parsed:  " + my_current_url + "\n");
				my_writer.write("Pages Retrieved:  " + my_pages_retrieved + "\n");
				my_writer.write("Average words per page:  " + my_total_words/my_pages_retrieved + "\n");
				my_writer.write("Average URLs per page:  " + my_total_urls/my_pages_retrieved + "\n");
				my_writer.write("Keyword\t\t\tAve. hits per page\tTotal hits" + "\n");

				for (String iter : my_keyword_map.keySet())
				{
					my_writer.write("  " + iter +  " " + "\t\t\t" + String.format("%.3f",(float)my_keyword_map.get(iter)/my_pages_retrieved) + "\t\t   " + my_keyword_map.get(iter) + "\n");
				}

				my_writer.write("Page limit: " + my_search_limit + "\n");
				my_writer.write("Average parse time per page: " + (my_total_time/my_pages_retrieved) + "msec" + "\n");
				my_writer.write("Total running time: " + my_ui.getTime() + "sec" + "\n");
				my_writer.write("\n");
			}
			catch(final IOException the_exception) 
			{
				the_exception.printStackTrace();
			}
		}
		
		if(my_ui_echo)
		{
			final StringBuilder builder = new StringBuilder();
			builder.append("Parsed:  " + my_current_url + "\n");
			builder.append("Pages Retrieved:  " + my_pages_retrieved + "\n");
			builder.append("Average words per page:  " + my_total_words/my_pages_retrieved + "\n");
			builder.append("Average URLs per page:  " + my_total_urls/my_pages_retrieved + "\n");
			builder.append("Keyword\t\t\tAve. hits per page\tTotal hits" + "\n");
			
			for (String iter : my_keyword_map.keySet())
			{
				builder.append("  " + iter +  " " + "\t\t\t" + String.format("%.3f",(float)my_keyword_map.get(iter)/my_pages_retrieved) + "\t\t   " + my_keyword_map.get(iter) + "\n");
			}
			
			builder.append("Page limit: " + my_search_limit + "\n");
			builder.append("Average parse time per page: " + (my_total_time/my_pages_retrieved) + "msec\n");	
			builder.append("Total running time: " + my_ui.getTime() + "sec" + "\n");
			my_ui.printString(builder.toString());
		}
	}	
}
