/*
 * Improbability Drive
 * Phillip Cardon, Thach Nguyen, Cristopher Claeys
 * 4/26/2011
 */

package background;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.StringTokenizer;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import structures.GathererStruct;
import structures.Reporter;
import structures.Tuple;
import ui.Launcher;
import buffers.SynchronizedBuffer;
import buffers.URLsRetrieved;

/**
 * Single Thread, compiled from separate thread code and setup to run in a serial manner.
 * @author Phillip Cardon
 * @author Chris Claeys
 * @author Thach Nguyen
 * @author Jonathan Hedley - Contributed JSoup library
 */
public class SingleThread extends Thread {
    //CLASS CONSTANTS
	/**
	 * The nano to milli ratio.
	 */
	private static final int RATIO = 1000000;
	
	/**
	 * Prohibited domain
	 */
	private static final String PROHIBITED = "http://questioneverything.typepad.com/";
	
	//FIELDS
	/**
	 * Buffer containing URLs of pages which need to be retrieved.
	 */
	private final SynchronizedBuffer<URL> pageToRetrieveBuffer;
	
	/**
	 * Buffer to place retrieved pages.
	 */
    private final SynchronizedBuffer<Tuple<Document, URL>> pageBuffer;
    
    /**
     * Set of pages that have been retrieved.
     */
    private final URLsRetrieved visited;
    
	/**
	 * The ready queue which the page parser may add new data to.
	 */
	private final SynchronizedBuffer<GathererStruct> ready;
	
	/**
	 * An array of the keywords to search for.
	 */
	private final String[] keys;
	
	/**
	 * The current total word count.
	 */
	private int words;
	
	/**
	 * The current total link count.
	 */
	private int links;
	
	/**
	 * The latest parsed URL.
	 */
	private URL latest;
	
	/**
	 * An array of ints representing the count of each keyword with corresponding indexes. 
	 */
	private int[] count;
	
	/**
	 * Current count of the pages parsed so far.
	 */
	private int pages;
	
	/**
	 * The reporter to talk to the UI.
	 */
	private Reporter report;
	
	/**
	 * Pages to crawl.
	 */
	private final int crawlingPages;
	
	/**
	 * The parse time so far.
	 */
	private long parse_time;
    
    /**
     * Constructor
     * @param theRetrieveBuffer buffer for urls.
     * @param thePages buffer for pages.
     * @param theVisited pages visited.
     */
    public SingleThread(SynchronizedBuffer<URL> theRetrieveBuffer, SynchronizedBuffer<Tuple<Document, URL>> thePages,
    				 URLsRetrieved theVisited, Reporter reporting, String[] keywords, int toCrawl) {
    	pageToRetrieveBuffer = theRetrieveBuffer;
    	pageBuffer = thePages;
    	visited = theVisited;
    	ready = new SynchronizedBuffer<GathererStruct>();
		keys = keywords;
		count = new int[keys.length];
		report = reporting;
		crawlingPages = toCrawl;
		pages = 0;
    }
    
    public void run() {
    	while(pages < crawlingPages)
    	{
    		runRetrieve();
    		runParser();
    		runDataGather();
    	}
    }
    
    /**
	 * Adds data to the queue to be processed. 
	 * 
	 * @param input The new data to add.
	 */
	public void add(GathererStruct input) {
		ready.enqueue(input);
	}
	
	/**
	 * The main run method for the DataGatherer.
	 */
	public void runDataGather() {
		while(ready.size() != 0) {
			GathererStruct next_data = ready.dequeue();
			links += next_data.getLinks();
			latest = next_data.getLatest();
			pages++;
			parse_time += next_data.getParseTime();
			StringTokenizer token = new StringTokenizer(next_data.getParsed());
			
			while (token.hasMoreTokens()) {
				String to_check = token.nextToken().replaceAll("\\W", "");
				words++;
				for(int i = 0; i < keys.length; i++) {      
					if (to_check.equalsIgnoreCase(keys[i])) {	
						count[i]++;							
						break;								
					}										
				}											
			}
			report.update(words, links, latest, count, pages, parse_time);
		}
	}
    
    /**
     * Runs until pageBuffer is empty.
     */
	public void runParser()
	{
		Document page;
		Tuple<Document, URL> fromBuffer = null;
		page = null;
		
		if (pageBuffer.size() > 0)
		{
			fromBuffer = pageBuffer.dequeue();
			page = fromBuffer.getFirst();	
		}
		
		if (fromBuffer != null && page != null) //check for null page
		{ 
			int linkCount = 0;
			long startTime = System.nanoTime();
			Elements links = page.select("a"); //Extract all links
			//System.out.println(links.size());
			for (Element link : links)
			{
				//System.out.println("stuck in first for");
				String theLink = link.attr("abs:href");
				//Filter out links.
				if (theLink.contains("#"))
					theLink = theLink.substring(0, theLink.indexOf('#'));
				if (theLink.endsWith("html") || theLink.endsWith("htm") ||
					theLink.endsWith("txt") || theLink.endsWith("/"))
				{
					//System.out.println(theLink);
					try
					{
						pageToRetrieveBuffer.enqueue(new URL(theLink));
						linkCount++;
					} catch (MalformedURLException mue)
					{
						System.err.println("These aren't links.");
					}
				}
			}
			long parseTime = (System.nanoTime() - startTime) / RATIO;
			add(new GathererStruct(linkCount, fromBuffer.getSecond(),
					       page.body().text(), parseTime));
	
		}
    } //end run()
    
    /**
     * Runs until pageToRetrieve buffer is empty.
     */
	public void runRetrieve()
	{
		URL u;
		if (pageToRetrieveBuffer.size() > 0)
		{
			u = pageToRetrieveBuffer.dequeue();	
			if (u.toString().contains(PROHIBITED))
			{
				visited.visited(u);
				u = null;
			}
			if (u != null && !visited.visited(u))//check for null url and visited	
			{ 			
				Document page = null;
				try {
					page = Jsoup.connect(u.toString()).get();
					pageBuffer.enqueue(new Tuple<Document, URL>(page, u));
					visited.visit(u); //add url to visited
					//System.out.println("Got page ");
				} catch (IOException e)
				{
					System.err.println("Dead link");
					//e.printStackTrace();
				}
			}// end if
		}

    } //end run()

}
