package MultiThreaded;

import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.StringTokenizer;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;

//import net.htmlparser.jericho.Source; AS - Commented this out since we're not using it

/**
 * The Main class. This class initializes the main fields and threads.
 * @author zsyed
 */
public class MultiThreadSetup {

	/**
	 * The page limit.
	 */
	protected static int page_limit;
	/**
	 * Pages already processed by PageRetriever.
	 */
	protected static int  page_counter;
	/**
	 * the key words.
	 */
	private HashMap<String, Integer> keywords;
	/**
	 * The starting url.
	 */
	private String my_starting_url;
	protected static double avg_words_per_page;
	protected static double avg_urls_per_page;
	protected static int[] keywords_total_hits;
	protected static double avg_parse_time;

	/**
	 * Initialize fields.
	 * @param the_max_pages number of pages crawl.
	 * @param the_url the starting url crawl, input by user.
	 * @param the_keywords the keywords, input by user.
	 */
	public MultiThreadSetup(int the_max_pages, String the_url, String the_keywords) {
		page_limit = the_max_pages;
		avg_words_per_page = 0;
		avg_urls_per_page = 0;
		avg_parse_time = 0;
		page_counter = 0;
		my_starting_url = the_url;
		keywords = setupKeywords(the_keywords);
		keywords_total_hits = new int[keywords.size()];
	}
	/**
	 * Set up the keywords.
	 * @param the_keywords The keywords
	 * @return A map of the keywords with value 0.
	 */
	private HashMap<String, Integer> setupKeywords(String the_keywords) {
		HashMap<String, Integer> keywords = new HashMap<String, Integer>();
		StringTokenizer st = new StringTokenizer(the_keywords.toLowerCase()," ");
		while (st.hasMoreTokens()) {
			keywords.put(st.nextToken().toLowerCase(), 0);
		}
		return keywords;
	}

	/**
	 * Starts the thread calling process.
	 */
	public void Crawl() {
		if (!my_starting_url.startsWith("http")) {
			my_starting_url = "http://" + my_starting_url;
		}
		if (isURL(my_starting_url)) {
			BlockingQueue<String> sources = new LinkedBlockingQueue<String>();
			BlockingQueue<String> completed_sources = new LinkedBlockingQueue<String>();
			BlockingQueue<URL> downloaded_sources = new LinkedBlockingQueue<URL>();
			BlockingQueue<List<String>> content = new LinkedBlockingQueue<List<String>>();
			BlockingQueue<List<String>> embedded_urls = new LinkedBlockingQueue<List<String>>();

			sources.add(my_starting_url);
			

			PageRetriever pr = new PageRetriever(sources, completed_sources, downloaded_sources);
			PageParser pp = new PageParser(sources, content, downloaded_sources, embedded_urls, MultiThreadSetup.page_limit);
			PageAnalyzer pa = new PageAnalyzer(content, embedded_urls, keywords);
			
			Thread pageRetriever = new Thread(pr);
			Thread pageParser = new Thread(pp);
			Thread pageAnalyzer = new Thread(pa);
			pageRetriever.start();
			pageParser.start();
			pageAnalyzer.start();
			try
            {
                Thread.sleep(100);
            } catch (InterruptedException e)
            {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
			
			while (pageRetriever.isAlive() || pageParser.isAlive() || pageAnalyzer.isAlive()){
			    try
                {
                    Thread.sleep(50);
                    if (pr.getIsWaiting() && pp.getIsWaiting() && pa.getIsWaiting()){
                        List<String> poison = new ArrayList<String>();
                        poison.add("POISON");
                        embedded_urls.add(poison);
                        content.add(poison);//We've exhausted the links on this page
                        Thread.sleep(50);
                        while(!pa.getIsFinished()){
                            
                        }
                        return;
                    }

                } catch (InterruptedException e)
                {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
			}
			System.out.println("Content: " + content);
			
		}
	}		

	public String getURLstart() {
		return my_starting_url;
	}

	public double getAvgParseTime() {
		return avg_parse_time;
	}

	public double getKeywordsAvgHits(int index) {
		double[] average = new double[keywords_total_hits.length];
		int i = 0;
		for (String key : keywords.keySet()) {
			average[i] = (double)keywords.get(key) / (double)page_counter;
			i++;
		}
		return average[index];
	}
	public HashMap<String, Integer> getKeywords() {
		return keywords;
	}
	public double getAvgUrlsPerPage() {
		return avg_urls_per_page;
	}
	public double getAvgWordsPerPage() {
		return avg_words_per_page;
	}
	public int getPagesRetrieved() {
		return page_counter;
	}
	/** helper class to check the validity of a url using url patterns matching */
	private boolean isURL(String url) {
		// Assigning the url format regular expression
		String urlPattern = "^http(s{0,1})://[a-zA-Z0-9_/\\-\\.]+\\.([A-Za-z/]{2,5})[a-zA-Z0-9_/\\&\\?\\=\\-\\.\\~\\%]*";
		return url.matches(urlPattern);
	}

}
