import java.text.DecimalFormat;
import java.util.Hashtable;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;

/** The DataGatherer is an object that looks for specific characteristics within the page and
 *  acts as a helper for the parser. Your program should allow the user to designate up to 10 
 *  keywords that the parser will search for in the text (including headers and between anchor 
 *  tags. If you use an already built parser you may have to modify it to report each 
 *  occurrence of each word in the list.
 *  
 *
 * The DataGatherer's job is to collect the necessary data to provide the below statistics. 
 * 	Parsed: www.tacoma.washington.edu/calendar/
	Pages Retrieved: 12
	Average words per page: 321
	Average URLs per page: 11
	Keyword               Ave. hits per page       Total hits
	  albatross               0.001                     3
	  carrots                 0.01                      5
	  everywhere              1.23                      19
	  etc..........
	
	  intelligence            0.000                     0
	
	Page limit: 5000
	Average parse time per page .001msec
	Total running time:       0.96 sec 
*/

public class DataGatherer {
	// INSTANCE FIELDS **********************************************
	/**
	 * Queue of DataObjs to handle.
	 */
	private ConcurrentLinkedQueue<DataObj> objs;
	
	/**
	 * Current object were formatting output for.
	 */
	private DataObj currentObj;	
	
	/**
	 * Reporter to send output to.
	 */
	private Reporter rep;
	
	/**
	 * When did the search start.
	 */
	private long start_time;
	
	/**
	 * How much time has been spent parsing so far.
	 */
	private long total_parseTime;
	
	/**
	 * How many pages we have found and parsed.
	 */
	private int total_pages;
	
	/**
	 * How many words we have found.
	 */
	private int total_words;
	
	/**
	 * How many urls we have found.
	 */
	private int total_urls;
	
	/**
	 * List of keywords that we are looking for.
	 */
	private List<String> keywords;
	
	/**
	 * Hashtable for running total of keyword counts.
	 */
	private Hashtable<String, Integer> keyword_hash;
	
	/**
	 * How many pages to gather data on.
	 */
	private int pageCap;
	
	/**
	 * Is the gatherer currently running?
	 */
	private boolean isRunning;
	
	// CONSTRUCTORS *************************************************	
	/**
	 * Private default constructor to prevent it being called.
	 */
	private DataGatherer() {}
	
	/**
	 * Constructor.
	 * @param reporter The reporter to send output to.
	 */
	public DataGatherer(Reporter reporter) {
		this();	// gets rid of the unused private constructor warning.
		start_time = System.currentTimeMillis();
		objs = new ConcurrentLinkedQueue<DataObj>();
		rep = reporter;	
		keywords = rep.getKeywords();
		
		keyword_hash = new Hashtable<String, Integer> ();	
	}
	
	// ACCESSORS ****************************************************
	/**
	 * Returns the list of keywords.
	 */
	public List<String> getKeywords() {
		return keywords;
	}
	
	public void setPageCap(int depth) {
		pageCap = depth;
	}
	
	// METHODS ******************************************************
	/**
	 * Adds a DataObj to the queue.
	 */
	public void addObj(DataObj data) {
		objs.add(data);
	}
	
	/**
	 * @return Is that DataGatherer done working?
	 */
	public boolean isDone() {
		return total_pages >= pageCap && !isRunning;
	}
	
	/**
	 * Executes one cycle of parsing the DataObj and outputting to the Reporter.
	 */
	public void tick() {
		isRunning = true;
		if(!objs.isEmpty()) {
			currentObj = objs.poll();
			
			// accumulate data on the whole run.
			total_parseTime += currentObj.runTime();  // am I interpretting the runtime correctly?
			total_pages++;
			total_words += currentObj.numWords();
			total_urls += currentObj.numUrls();
			
			Hashtable<String, Integer> currentHash = currentObj.htable();
			
			// *** add this obj's hashtable data to total_Keywords hashtable here ***	
			if (keyword_hash.isEmpty()) {	// if we're parsing the first page...
				keywords = rep.getKeywords();
				for (int i = 0; i < keywords.size(); i++) {
					keyword_hash.put(keywords.get(i), 0);
				}
				//keyword_hash.putAll(currentHash);	// this sets up the keywords.
			} else {								
				for( String key : currentHash.keySet()) { // I hope I'm doing this right...
					keyword_hash.put(key, keyword_hash.get(key) + currentHash.get(key));
				}
			}			
			
			rep.putStuff(buildOutput());
		} // end if		
		isRunning = false;
	} // end Tick()
	
	/**
	 * @return Builds the string that is output.
	 */
	private String buildOutput() {
		StringBuilder sb = new StringBuilder();
		final DecimalFormat df = new DecimalFormat("##0.00");
		
		// Parsed: www.tacoma.washington.edu/calendar/
		sb.append("Parsed: ");
		sb.append(currentObj.url());
		sb.append("\n");
		
		// current page data - added for debugging
		sb.append("CURRENT PAGE:\n ");
		sb.append("Words on this page: \t");
		sb.append(Integer.toString(currentObj.numWords()));
		sb.append("\n");
		sb.append("URLs on this page: \t");
		sb.append(Integer.toString(currentObj.numUrls()));
		sb.append("\n");
		sb.append("Parse time for this page: \t");
		sb.append(Long.toString(currentObj.runTime()));
		sb.append("msec.\n");
		
		// Keyword               Ave. hits per page       Total hits
//		sb.append("Keyword\t\tTotal hits\n");
//		
//		for (String key : currentObj.htable().keySet()) {
//			sb.append(key);
//			sb.append("\t\t");			
//			sb.append(currentObj.htable().get(key));
//			sb.append("\n");
//		}	
		
		sb.append("RUNNING AVERAGES:\n ");
		
		// Pages Retrieved: 12
		sb.append("Pages Retrieved: \t");
		sb.append(Integer.toString(total_pages));
		sb.append("\n");

		// Average words per page: 321
		sb.append("Average words per page: \t");
		sb.append(Integer.toString(total_words / total_pages));
		sb.append("\n");
		
		// Average URLs per page: 11
		sb.append("Average URLs per page: \t");
		sb.append(Integer.toString(total_urls / total_pages));
		sb.append("\n");
		
		// Keyword               Ave. hits per page       Total hits
		sb.append("Keyword\t\t\tAvg. hits per page\t\tTotal hits\n");
		
		for (String key : keyword_hash.keySet()) {
			sb.append(key);
			if (key.length() < 8) {
				sb.append("\t");
			}
			sb.append("\t\t");
			sb.append(df.format(keyword_hash.get(key) / (double) total_pages));
			sb.append("\t\t\t\t");
			sb.append(keyword_hash.get(key));
			sb.append("\n");
		}	
		
		// Page limit: 5000
		// sb.append("Page limit:  ");  // this is displayed in the gui.
		// *** need to access the page limit from the gui(?) ***
		sb.append("\n");
		
		// Average parse time per page .001msec 
		sb.append("Average parse time per page: ");
		sb.append(Long.toString(total_parseTime / total_pages));
		sb.append(" msec.\n");
		
		// Total running time:       0.96 sec
		sb.append("Total running time: ");
		sb.append(Long.toString(System.currentTimeMillis() - start_time));
		sb.append("msec.\n------------------------------------------\n");
		
		return sb.toString();
	}
}
