/**
 * Application Name: TCSS422_WebCrawler
 * Group Name: The Other Guys
 * Members: Scott Freeman, Anthony Melcher, Jason Green
 * Date: November 10, 2011
 * 
 * Related libraries: Jericho HTML Page Parser (author unknown) 
 * 					  http://jericho.htmlparser.net/docs/index.html
 */
package report;

import java.io.BufferedWriter;
import java.util.concurrent.TimeUnit;

import controller.Spider;

import model.Page;

/**
 * This class gathers data about a particular web page
 * and calculates various statistics. Such as calculating
 * the average amount of appearances of a keyword amongst
 * all the processed pages.
 */
public class DataGatherer implements Runnable {

	/**
	 * The number of pages this class has collected data from.
	 */
	public int pagesRetrieved;

	/**
	 * The total number of words from all pages retrieved so far.
	 */
	private int cumulativeWordCount;

	/**
	 * The initial time when this class
	 * starts to execute.
	 */
	private long start_time;

	/**
	 * The total number of URLs found.
	 */
	private int cumulativeURLCount;

	/**
	 * A file writer construct.
	 */
	protected static BufferedWriter out;

	/**
	 * Instantiates an instance of this class. 
	 */
	public DataGatherer() {
		pagesRetrieved = 0;
		start_time = System.currentTimeMillis();
	}

	/**
	 * Collects all required data fields from each page and sends updated data
	 * to the Reporter.
	 */
	@Override
	public void run() {		
		if (!Spider.DATA_PAGE_LIST.isEmpty()) {
			try {
				int maxPages = Spider.maxPages.get();
				Page current_page;
				current_page = Spider.DATA_PAGE_LIST.poll(1, TimeUnit.NANOSECONDS);
				
				if (current_page == null) {
					return;
				}
				
				pagesRetrieved++;
				if (pagesRetrieved <= maxPages) {
					double avgWrdPerPage = 0;
					double avgURLPerPage = 0;
					cumulativeWordCount += current_page.getWordCount();
					cumulativeURLCount += current_page.getUrlCount();
					avgWrdPerPage = cumulativeWordCount / pagesRetrieved;
					avgURLPerPage = cumulativeURLCount / pagesRetrieved;
					double total_duration = (System.currentTimeMillis() - start_time) / 1000.00;
					double avg_parse_time = total_duration / pagesRetrieved * 1000.0;

					Reporter.displayOnConsole(current_page.getMy_url().toString(), 
							pagesRetrieved, cumulativeWordCount, avgWrdPerPage, avgURLPerPage, 
							avg_parse_time, total_duration);
					
					if (pagesRetrieved == maxPages) {
						Reporter.recordDuration(avg_parse_time, total_duration);
					}
				}
			} catch (final InterruptedException e) {
				// do nothing
			}
		}
	}
}
