/* Liviu Patrasco & Holly Beach
 * TCSS422 Project 1: Web Crawler
 * February 6, 2012
 */

import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * The MThreadedPageAnalyzer acts as a consumer
 * class that collects statistical data
 * in a multi-threaded environment using
 * a blocking queue.
 * @author Liviu and Holly
 */
public class MThreadedPageAnalyzer implements Runnable {
	private BlockingQueue<Page> parsed_pages_q;
	private String[] keywords;
	private Map<String, Integer> keywords_map = new HashMap<String, Integer>();
	private boolean done = false;
	private double avg_words_per_page = 0;
	private double avg_links_per_page = 0;
	private double avg_parse_time = 0;
	private long total_parse_time = 0;
	private int total_pages;
	private boolean out_of_urls = false;
	private boolean comm_error = false;
	private boolean interrupted = false;
	
	public MThreadedPageAnalyzer(
			final BlockingQueue<Page> pages, final String[] words) {
		parsed_pages_q = pages;
		keywords = words;
		for (String word: keywords) {
			keywords_map.put(word, 0);
		}
	}

	public boolean done(){
		return done;
	}

	@Override
	public void run() {
		int total_words = 0;
		int total_links = 0;
		total_pages = 0;
		Page page;
		while (!done) {
			try {
				page = parsed_pages_q.take();
				if(page.equals(MThreadedPageParser.NO_MORE_PAGES_MARKER)){
					done = true;
				}else if (page.equals(MThreadedPageParser.OUT_OF_URLS_MARKER)){
					done = true;
					out_of_urls = true;
				}else if (page.equals(MThreadedPageParser.INTERRUPTED_MARKER)){
					done = true;
					interrupted = true;
				}else if (page.equals(MThreadedPageParser.COMM_ERROR_MARKER)){
					done = true;
					comm_error = true;
				}
				else{
					total_words += page.getText().split(" ").length;
					total_links += page.getLinks().size();
					total_parse_time+=page.getParseTime();
					total_pages++;
					analyzeKeywords(page);
					if(total_pages > 0){
						avg_words_per_page = total_words / total_pages;
						avg_links_per_page = total_links / total_pages;
						avg_parse_time = total_parse_time / total_pages;
					}
				}
			}catch (InterruptedException e) {
				continue;
			}
		}
		// analyzer is done
	}

	public String toString() {
		StringBuilder str = new StringBuilder();
		str.append(getMessageString());

		str.append("\nPages retrieved: ");
		str.append(total_pages);
		str.append("\n");
		str.append("Average words per page: ");
		str.append(avg_words_per_page);
		str.append("\n");
		str.append("Average URLs per page: ");
		str.append(avg_links_per_page);
		str.append("\n\n");
		str.append(String.format("%-20s%-20s%15s","Keyword", "Avg Hits Per Page", "TotalHits\n"));
		for (String kw : keywords){
			int cnt = keywords_map.get(kw);
			if (total_pages == 0){
				str.append(String.format(
					"%-3s%-20s%8d%20d\n"," ", kw,total_pages,cnt));		
			}else{
				str.append(String.format(
					"%-3s%-20s%8.2f%20d\n"," ", kw,((float)cnt/total_pages),cnt));
			}
		}
		str.append("\nAverage parse time per page: ");
		str.append(String.format("%.4f sec\n", avg_parse_time/1000f));

		return str.toString();
	}

	private void analyzeKeywords(Page p) {
		for (String k_word : keywords){
			int k_word_count = 0;
			Pattern keyword = Pattern.compile("\\b" + k_word + "\\b", Pattern.CASE_INSENSITIVE);
			Matcher matcher = keyword.matcher(p.getText());
			while (matcher.find()) {
				k_word_count++;
			}
			keywords_map.put(k_word, keywords_map.get(k_word) + k_word_count);
		}
	}
	
	private String getMessageString(){
		StringBuilder str = new StringBuilder("\n");
		if(interrupted)
			str.append("THE RETRIEVER THREAD WAS INTERRUPTED.  ");
		if(comm_error)
			str.append("COMMUNICATION TIMEOUT.  ");
		if(out_of_urls){
			str.append("THERE WERE ");
			str.append(total_pages); 
			str.append(" PAGE(S) TO CRAWL");
		}
		return str.toString();
	}
	
}
