//Steve Hipolito, Daniel Beraun - Project 1

import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.Queue;

/**
 * The PageAnalyzer class examines the words and embedded URLs 
 * within each retrieved document, tabulating the summary statistics 
 * that will be reported when the web crawler finishes.
 * 
 * @author Steve Hipolito
 */
public class RunnablePageAnalyzer implements Runnable{
	/** Array list of Keywords. */
	private ArrayList<String> keywords = new ArrayList<String>();
	
	/** Array list of Keywords's hits. */
	private Integer[] hits;
	
	/** Array list of total words found on the web page. */
	private ArrayList<String> words = new ArrayList<String>();

	/** Number of pages crawled. */
	public int crawledPages = 0;
	
	/** Number of links found. */
	public int foundLinks = 0;
	
	/** Number of unique links found (every push to the links queue). */
	public int uniqueLinks = 0;
	
	private Queue<String> wordsToBeAnalyzed = new LinkedList<String>();
	
	public long timerStart = 0;
	public long timerStop = 0;
	private boolean stop = false;
	
	DecimalFormat df = new DecimalFormat("#.###");
	
	public RunnablePageAnalyzer(final ArrayList<String> the_keywords) {
		keywords = the_keywords;
		hits = new Integer[keywords.size()];
		for (int i = 0; i < keywords.size(); i++)
			hits[i] = 0;
	}
	
	public synchronized void addWordToQueue(String word) { //called by parser
		wordsToBeAnalyzed.add(word);
	}
	
	public ArrayList<String> getKeywords() { //called by GUI
		final ArrayList<String> clone = new ArrayList<String>();
		for (int i = 0; i < keywords.size(); i++) {
			clone.add(keywords.get(i));
		}
		return clone;
	}
	
	public double getAvgWordsPerPage() { //called by GUI
		double num = 0;
		if (crawledPages > 0)
			num = ((double)words.size() / (double)crawledPages);
		return Double.valueOf(df.format(num));
	}
	
	public double getAvgEmbeddedURLsPerPage() { //called by GUI
		double num = 0;
		if (crawledPages > 0)
			num = ((double)foundLinks / (double)crawledPages);
		return Double.valueOf(df.format(num));
	}
	
	public long getAvgTimePerPage() { //called by GUI
		long num = 0;
		if (crawledPages > 0)
			num = getTotalRunningTime() / crawledPages;
		return num;
	}
	
	public double getAvgKeywordHitsPerPage(String the_keyword) { //called by GUI
		double num = 0;
		int hits = getTotalHitsForKeyword(the_keyword);
		if (crawledPages > 0)
			num = ((double)hits / (double)crawledPages);
		return Double.valueOf(df.format(num));
	}
	
	public int getTotalHitsForKeyword(String the_keyword) { //called by GUI
		int num = 0;
		for (int i = 0; i < keywords.size(); i++) {
			if (keywords.get(i).equals(the_keyword))
				num = hits[i];
		}
		return num;
	}
	
	public int getTotalWordsFound() { //called by GUI
		return words.size();
	}
	
	public long getTotalRunningTime() { //called by GUI
		return timerStop - timerStart;
	}
	
	/**
	 * It analyzes the word from the queue and compares it 
	 * to the array keywords to increase the total hits.
	 */
	private synchronized void addWord() {
		String the_word = "";
		the_word = wordsToBeAnalyzed.peek();
		words.add(the_word);
		for (int i = 0; i < keywords.size(); i++) {
			if (keywords.get(i).equals(the_word))
				hits[i]++; //increase count of total hits for this keyword
		}
		wordsToBeAnalyzed.remove();
	}
	
	public void run() {	
		while(!stop) {
			while (!wordsToBeAnalyzed.isEmpty()) {
				addWord();
			}
		}
	}
	
	public boolean done() {
		return wordsToBeAnalyzed.isEmpty();
	}
	
	public void stop() {
		stop = true;
	}
}
