/*  ResultsString.Java
 * 
 *  WebCrawler Program
 *  @author Amandeep Jhajj and Craig Markham
 *  @date 26Apr2012
 */

package Crawler;

import java.util.Map;

import Crawler.DataGatherer.WordPair;

public class ResultsString {
	
	private final String the_results;

	public ResultsString(final String the_page,	final int webpageNum,
			final int wordCount, final int urlCount,
			final Map<String, WordPair> words, final int pageTime,
			final int totalTime) {
	StringBuilder sb = new StringBuilder();
	sb.append("Parsed: ").append(the_page).append("\n");
	sb.append("Pages Retrieved: ").append(webpageNum).append("\n");
	sb.append("Average words per page: ").append(wordCount).append("\n");
	sb.append("Average URLs per page: ").append(urlCount).append("\n");
	
	sb.append(String.format("  %-20s%-25s%-20s\n", "Keyword", "Ave. hits per page", "Total hits"));
	
	String format = "  %-20s%-25.4f%-20d\n";
	
	for(String word : words.keySet()) {
		WordPair wp = words.get(word);
		sb.append(String.format(format, word, wp.my_avg, wp.my_total));
	}
	sb.append("Page limit: ").append(Controller.MAX_LINKS).append("\n");
	sb.append("Average parse time per page: ").append(pageTime).append(" msec\n");
	sb.append("Total running time: ").append(String.format("%.4f sec", totalTime / 1000.0)).append("\n\n\n");
	the_results = sb.toString();
	}
	
	public String getResultsString (){
		return the_results;
	}
}
