package multithreaded;

import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import java.util.concurrent.BlockingQueue;

import utility.Keyword;

/**
 * Threaded version of the page analyzer.
 * Constantly checks the text and url Blocking Queues for work to be done.
 * When a String is available from my_text it is dequeued an analyzed for keywords and wordcount.
 * When a url is avaiable from my_unfiltered_urls it is checked against duplicate urls, and then checked if it is valid
 * to be added into the main url queue.
 * @author Daniel Anderson and Luc H. Le
 * @version TCSS422 2012
 */
public class ThreadedPageAnalyzer implements Runnable {

	/**Number of pages analyzed so far*/
	private int my_analyzed_count;
	
	/**Number of words analyzed so far*/
	private int my_word_count;
	
	/**Queue of urls that the analyzer fills as it checks them.*/
	private BlockingQueue<String> my_urls;
	
	/**Queue of unchecked urls the analyzer connects to.*/
	private BlockingQueue<String> my_unfiltered_urls;
	
	/**Queue of strings that the analyzer tokenizes and checks for keywords.*/
	private BlockingQueue<String> my_text;
	
	/**List of urls to skip if they are dequeued from the unilftered queue again.*/
	private List<String> my_old_urls;
	
	/**List of user inputed keywords to scan for in text*/
	List<Keyword> my_keywords;
	
	/**Ends the thread if it is set to true*/
	private boolean my_finish;
	
	/**
	 * Constructor for ThreadedPageAnalyzer.
	 * @param urls URL queue to add valid urls to.
	 * @param unfiltered_urls URLs to check for connection and form.
	 * @param text Text to scan for keywords and count.
	 * @param keywords List of user inputed keywords to scan text for.
	 */
	public ThreadedPageAnalyzer(BlockingQueue<String> urls, BlockingQueue<String> unfiltered_urls, BlockingQueue<String> text, List<Keyword> keywords) {
		my_word_count = 0;
		my_analyzed_count = 1; //initial page
		my_urls = urls;
		my_unfiltered_urls = unfiltered_urls;
		my_text = text;
		my_keywords = keywords;
		my_old_urls = new ArrayList<String>();
		my_finish = false;
	}//end of constr
	

	/**
	 * Run method for the ThreadedPageParser thread.
	 * Ends when my_finish is set to true by outside sources
	 * or an unexpected interrupt. 
	 */
	public void run() {
		while(!my_finish) { //TODO handle limits
			doWork();
		}
	}//end of run
	
	/**Ends the thread.*/
	public void finish() {
		my_finish = true;
	}//end of finish
	
	/**Get the number of words scanned so far.*/
	public int getWordCount() {
		return my_word_count;
	}//end of get
	
	/**Get the number of pages analyzed so far.*/
	public int getPageCount() {
		return my_analyzed_count;
	}//end of get
	
	/**Method where the main work of the thread takes place.*/
	private void doWork() {
		try {
			checkURL();
		} catch (InterruptedException e) {
			my_finish = true;
			//System.out.println("checkURL() interupted!");
		}
		try {
			checkText();
		} catch (InterruptedException e) {
			my_finish = true;
			//System.out.println("checkText() interupted!");
		}
	}//end of doWork
	
	/**
	 * Constantly called by doWork.
	 * Whenever there is a String available from the my_text queue
	 * it is dequeued and scanned for keywords.
	 * Thread is idle otherwise.
	 * @throws InterruptedException The thread was interrupted and should exit.
	 */
	private void checkText() throws InterruptedException {
		if(!my_text.isEmpty()) {
			String text = my_text.take();
			//System.out.println(text);
			tokenizeScan(text);
		}
	}//end of checkText
	
	/**
	 * Constantly called by doWork.
	 * Whenever there is a URL available from the my_unfiltered_urls queue
	 * it is dequeued and checked for connection errors and validity.
	 * Thread is idle otherwise.
	 * @throws InterruptedException The thread was interrupted and should exit.
	 */
	private void checkURL() throws InterruptedException {
		if(!my_unfiltered_urls.isEmpty()) {
			String url_string = my_unfiltered_urls.take();
			if(!my_old_urls.contains(url_string)) {
				my_old_urls.add(url_string);
				try {
					URL url = new URL(url_string);
					URLConnection c = url.openConnection();
					c.connect();
					my_urls.add(url_string);
					my_analyzed_count++;
					//System.out.println(my_analyzed_count + ": " + url_string);
				} catch (MalformedURLException e) {
					//Invalid URL
					System.out.println("Invalid URL: " + url_string);
				} catch (IOException e) {
					//Connection refused
					System.out.println("Connection refused to: " + url_string);
				}
			}
		}
	}//end of checkURL
	
	/**
	 * Tokenize and scan the string for keywords.
	 * @param text The text to be tokenized.
	 */
	private void tokenizeScan(String text) {
		StringTokenizer t = new StringTokenizer(text);
		while (t.hasMoreTokens()) {
			checkKeyword(t.nextToken());
			my_word_count++;
		}
	}//end of tokenizeScan
	
	/**
	 * Checks if my_keywords contains the keyword k, then if it does
	 * we find the index of key, and get the element at that index.
	 * We then call addHit() on that element.
	 * @param word The word to be checked.
	 */
	private void checkKeyword(String word) {
		for (int i = 0; i < my_keywords.size(); i++) {
			if(my_keywords.get(i).isWord(word)) {
				my_keywords.get(i).addHit();
			}
		}
	}//end of checkKeyword
}//end of class
