//Fusion of the 422 Proportion
//Dustin Striplin, Tyler Simrell, Lawrence Grass, and Jacob Hall
//Finished 5/1/13

package model;

import java.io.BufferedWriter;
import java.io.FileWriter;
import java.util.Collection;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;

/**
 * The DataGatherer class for the webcrawler program
 * @author Jacob Hall
 * @version 4/15/2013
 */
public class DataGatherer
{
	/**
	 * The Queue that holds the PageInfo Objects as they are input into the DataGatherer
	 */
	private Queue<PageInfo> page_info_input_queue = new ConcurrentLinkedQueue<PageInfo>();
	
	/**
	 * The Queue that holds the PageInfo Objects before they are written to file.
	 */
	private Queue<PageInfo> page_info_write_to_file_queue = new ConcurrentLinkedQueue<PageInfo>();
	
	/**
	 * Temporary holder for the last PageInfo Object to compare against the next one
	 * that will be processed.
	 */
	private PageInfo last_page_in = null;
	
	/**
	 * Temporary holder for the current PageInfo Object as it is processed.
	 */
	private PageInfo current_working_page = null;
	
	/**
	 * The variable that keeps track of the time stamp for when the program began.
	 */
	private long program_start_time = 0;
	
	/**
	 * The variable that keeps track of the total time it takes to run the program.
	 */
	private long program_run_time = 0;
	
	boolean queue_empty = true;
	
	public DataGatherer()
	{
		program_start_time = System.nanoTime();
	}
	
	/**
	 * Allows the controller to add a PageInfo Object to the collection of Pages already
	 * processed.
	 * @param the_input_page_info The PageInfo Object that needs to be processed
	 */
	public synchronized void addPageInfo(final PageInfo the_input_page_info)
	{
		//I can remove the synchronized.  I am just seeing if it makes a difference
		
		page_info_input_queue.add(the_input_page_info);
		if (queue_empty = true)  //my attempt to only call the processPageInfo() method
		{						 //when it is sitting idle (when the queue has been cleared)
			queue_empty = false; //after it runs, it sets the queue_empty flag true
			processPageInfo();	 //this loop only runs when that flag is true, it sets the 
								 //flag false.  While the processPageInfo is running,
								 //multiple pages may be added to the queue by the parser
		}
	}
	
	/**
	 * The method conducts all the statistical analysis of the data provided.
	 * @throws InterruptedException 
	 */
	private void processPageInfo()
	{
		while (!page_info_input_queue.isEmpty())
		{
			PageInfo the_input_page_info = page_info_input_queue.poll();
			if (last_page_in == null)   //first iteration needs to establish a starting
			{							//point for gathering statistics.
				last_page_in = the_input_page_info;
			}
			
			current_working_page = the_input_page_info;
				//Increment the number of pages retrieved
			current_working_page.setNumPagesRetrieved(last_page_in.getNumPagesRetrieved()+ 1);
				//Add the current page's # words to the running total
			current_working_page.setTotalNumWords(last_page_in.getTotalNumWords() + 
					current_working_page.getNumWordsOnPage());
				//calculate and update the average words per page up to this point
			current_working_page.setAveWordsPerPage(current_working_page.getTotalNumWords()/
					current_working_page.getNumPagesRetrieved());
				//Add the # of URL's on the current page to the running total
			current_working_page.setTotalNumURLs(last_page_in.getTotalNumURLs() + 
					current_working_page.getNumURLOnPage());
				//calculate and update the average URL's per page up to this point
			current_working_page.setAveURLsPerPage(current_working_page.getTotalNumURLs() /
					current_working_page.getNumPagesRetrieved());
				//create a collection of keywords based on the map "keys"
				//go to each element in the map and update the running total number of
				//hits for each keyword and update the ave number of hits for those keywords.
			Collection <String> tempKeyWord = current_working_page.getKeyWordMap().keySet();
			for (String s: tempKeyWord)
			{
				int temp = 0;
				if (last_page_in.getKeywordTotalHitsMap().get(s) != null)
				{
					temp = last_page_in.getKeywordTotalHitsMap().get(s);
				}

				current_working_page.getKeywordTotalHitsMap().put(s, 
						temp + current_working_page.getKeyWordMap().get(s));
				current_working_page.getKeywordAveHitsMap().put(s, 
						((double)current_working_page.getKeywordTotalHitsMap().get(s) / 
						(double)current_working_page.getNumPagesRetrieved()));
			}	
			//sets the total running time =
			//(current time - the point in time the DataGatherer was instantiated)
			program_run_time = (System.nanoTime() - program_start_time);
			String formated_program_run_time = formatTime(program_run_time);
			current_working_page.setTotalRunTime(formated_program_run_time);
			
			//calculate the time it took to parse the page
			current_working_page.setTimeToParsePage(current_working_page.getPageEndTime() 
												- current_working_page.getPageStartTime());
			
			//Use the latest time to parse the page to update the running total and 
			//average time to parse each page
			current_working_page.setTotalTimeToParsePages(last_page_in.getTotalTimeToParsePages() + 
					current_working_page.getTimeToParsePage());
			String formated_average_parse_time = formatTime(current_working_page.getTotalTimeToParsePages() /
					current_working_page.getNumPagesRetrieved());
			current_working_page.setAveParseTimePerPage(formated_average_parse_time);
			
			last_page_in = current_working_page;
			page_info_write_to_file_queue.add(current_working_page);
		}
		queue_empty = true;
	}
	
	/**
	 * Prints a .txt file by pulling one PageInfo Object off of the head of the
	 * page_info_write_to_file_queue at a time.  Pages are printed to the file
	 * sequentially in the order in which they were parsed.
	 */
	public void printFile(final String the_file_name)
	{
		try
		{
			FileWriter fStream = new FileWriter(the_file_name);
			BufferedWriter out = new BufferedWriter(fStream);
			
			out.write("Team: FusionOfThe422Proportion");
			out.newLine();
			out.write("Team Members:");
			out.newLine();
			out.write("      Dustin Striplin");
			out.newLine();
			out.write("      Lawrence Grass");
			out.newLine();
			out.write("      Tyler Simrell");
			out.newLine();
			out.write("      Jacob Hall");
			out.newLine();
			out.write("Date: 1/1/13");
			out.newLine();
			out.write("--------------------------------------------------------");
			out.newLine();
			out.newLine();
			
			for (PageInfo P : page_info_write_to_file_queue)
			{
				out.write("Parsed: " + P.getURL());
				out.newLine();
				out.write("Total number of words: "
						+ P.getTotalNumWords());
				out.newLine();
				out.write("Pages Retrieved: "
						+ P.getNumPagesRetrieved());
				out.newLine();
				out.write("Average words per page: "
						+ P.getAveWordsPerPage());
				out.newLine();
				out.write("Average URLs per page: "
						+ P.getAveURLsPerPage());
				out.newLine();
				Collection<String> tempKeyWord = P
						.getKeyWordMap().keySet();
				out.write("Keyword:            " + "Ave. Hits per page:          "
						+ "total hits:    ");
				out.newLine();
				for (String s : tempKeyWord) {
					double d = P.getKeywordAveHitsMap().get(s);
					out.write(String.format("%-20s", s) + " " + String.format("%15.6f", d)
							+ "             "
							+ P.getKeywordTotalHitsMap().get(s));
					out.newLine();
				}
				out.write("Page Limit: " + P.getPageLimit());
				out.newLine();
				out.write("Average parse time per page: "
						+ P.getAveParseTimePerPage());
				out.newLine();
				out.write("Total running time: "
						+ P.getTotalRunTime() + "\n");
				out.newLine();
				out.write("--------------------------------------------------------");
				out.newLine();
				out.newLine();
			}
			out.close();
		}
		catch (Exception e)
		{
			System.err.println("Error in DataGatherer: " + e.getMessage());
		}
	}
	
	public static String formatTime(final long the_program_run_time)
	{
		int counter = 1;
		String unit = "";
		long program_run_time = the_program_run_time;
		String out = "";
		while (program_run_time > 1000 || counter == 5)
		{
			long quotient = program_run_time/1000;
			long remainder =  program_run_time%1000;
			program_run_time = program_run_time/1000;
			counter++;
			if (counter == 1)
			{
				unit = "nanosec";
			}
			else if (counter == 2)
			{
				unit = "microsec";
			}
			else if (counter == 3)
			{
				unit = "millisec";
			}
			else if (counter == 4)
			{
				unit = "sec";
			}
			String temp = Long.toString(remainder);
			while(temp.length() < 3)
			{
				temp = "0" + temp;
			}		
			out = Long.toString(quotient) + "." + temp + " " + unit;
		}
		return out;
	}
	
	public int getNumPagesProcessed()
	{
		return current_working_page.getNumPagesRetrieved();
	}
	
	/**
	 * A print to console method used in testing.  It loosely matches the formatting
	 * laid out in the project specs for how the information is to be displayed.
	 */
	public void printOutToConsole()
	{
		System.out.println("Parsed: " + current_working_page.getURL());
		System.out.println("Total number of words: " + current_working_page.getTotalNumWords());
		System.out.println("Pages Retrieved: " + current_working_page.getNumPagesRetrieved());
		System.out.println("Average words per page: " + current_working_page.getAveWordsPerPage());
		System.out.println("Average URLs per page: " + current_working_page.getAveURLsPerPage());
		Collection <String> tempKeyWord = current_working_page.getKeyWordMap().keySet();
		System.out.println("Keyword:     " + "Ave. Hits per page:     " +  "total hits:    ");
		for (String s: tempKeyWord)
		{
			System.out.println(s + "    " + current_working_page.getKeywordAveHitsMap().get(s) +
					"                 " + current_working_page.getKeywordTotalHitsMap().get(s));
		}
		System.out.println("Page Limit: " + current_working_page.getPageLimit());
		System.out.println("Time to parse page: " + formatTime(current_working_page.getTimeToParsePage()));
		System.out.println("Average parse time per page: " + current_working_page.getAveParseTimePerPage());
		System.out.println("Total running time: " + current_working_page.getTotalRunTime() + "\n");
	}
}
