
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import data.Page;
import threads.*;


public class testSteps {

	
	public static String getDisplayResults(List<Page> my_pages, List<String> my_keywords, Long start_time){
			
			/**
			 *  Pages Retrieved: 12
				Average words per page: 321
				Average URLs per page: 11
				Keyword Avg. hits per page Total hits
				albatross 0.250 3
				carrots 0.417 5
				everywhere 1.583 19
				intelligence 0.000 0
				Average parse time per page: .01 sec
				Total running time: 0.96 sec
			 */
			
			int pages_retrieved = my_pages.size();
			int avg_words_per_page = 0;
			int avg_url_per_page = 0;
			int avg_parse_time_per_page = 0;
			Map<String, Integer> results = new HashMap<String, Integer>();
			Map<String, Integer> totalresults = new HashMap<String, Integer>();
			String output = "";
			
			for (String word : my_keywords){
				results.put(word, 0);
				totalresults.put(word, 0);
			}
			
			for (Page page : my_pages){
				avg_words_per_page += page.getNumWords();
				avg_url_per_page += page.getNumUrls();
				avg_parse_time_per_page += page.getParseTime();
				// For every Key in the Data, increase the results value by that amount 
				for (String key : page.getData().keySet()){
					totalresults.put(key, totalresults.get(key) + page.getData().get(key));
					// We need two of these arrays, 
					results.put(key, results.get(key) + page.getData().get(key));
				}
			}
			
			for (String key : my_keywords){
				results.put(key, results.get(key) / pages_retrieved);
			}
			
			avg_words_per_page = avg_words_per_page / pages_retrieved;
			avg_url_per_page = avg_url_per_page / pages_retrieved;
			avg_parse_time_per_page = avg_parse_time_per_page / pages_retrieved;
			
			long end_time = System.currentTimeMillis();
			long total_runtime = end_time - start_time;
			
			output += "Pages Retrieved: " + pages_retrieved + "\n";
			output += "Average words per page: " + avg_words_per_page + "\n";
			output += "Average URLs per page: " + avg_url_per_page + "\n";
			output += "Keyword    Avg. hits per page\tTotal hits\n";
			for (String word : results.keySet()){
				output += word + "\t\t" + results.get(word) + "\t\t" + totalresults.get(word) + "\n";
			}
			output += "Average Parse time per page " + avg_parse_time_per_page + "\n";
			output += "Total running time " + total_runtime / 1000+ "s\n";
			
			return output;
		}
	
	
	/**
	 * @param args
	 */
	public static void main(String[] args) {
		long starttime = System.currentTimeMillis();
		BlockingQueue<Runnable> queue = new PriorityBlockingQueue<Runnable>();
		List<String> keywords = new ArrayList<String>();
		List<Page> my_pages = new ArrayList<Page>();
		List<String> my_urls = new ArrayList<String>();
		
		keywords.add("Hello");
		keywords.add("Hitler");
		keywords.add("Jesus");
		keywords.add("This");
		keywords.add("The");
		
		my_urls.add("http://en.wikipedia.org/wiki/Adolf_Hitler");
		my_urls.add("http://en.wikipedia.org/wiki/Paul_von_Hindenburg");
		my_urls.add("http://en.wikipedia.org/wiki/Hans_Luther");
		my_urls.add("http://en.wikipedia.org/wiki/Wilhelm_Marx");
		
		
		for (String url : my_urls){
			
			Page newPage = new Page(url);
			my_pages.add(newPage);
			
			Fetcher newFetcher = new Fetcher(newPage);
			newFetcher.run();
			
			Parser newParser = new Parser(newPage);
			newParser.run();
			
			Analyser newAnalyser = new Analyser(newPage, keywords);
			newAnalyser.run();
		}
		
		System.out.println(getDisplayResults(my_pages, keywords, starttime));

	}
	
}
