package httpclient;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;

import queue.ListQueue;
import queue.QueueADT;
import controller.Manager;

public class Parser implements Callable<List<URL>>, Runnable{
	
	/**
	   * The ASCII hex value of an upper case 'A'.
	   */
	  private static final int MY_UPPER_A = 0x41;
	  /**
	   * The ASCII hex value of a lower case 'z'.
	   */
	  private static final int MY_LOWER_Z = 0x7A;
	  /**
	   * The ASCII hex value of a upper case 'Z'.
	   */
	  private static final int MY_UPPER_Z = 0x5A;
	 /**
	 * The ASCII hex value of a lower case 'a'.
	 */
	private static final int MY_LOWER_A = 0x61;
	  
	private Website my_website;
	private Map<String, Integer> my_dictionary;
	private DataGatherer my_gatherer;
	
	private Manager my_manager;
	private boolean shouldStop;
	private double my_time;
	
	public Parser(DataGatherer the_gatherer) throws IllegalArgumentException
	{
		my_dictionary = new HashMap<String, Integer>();
		
		my_gatherer = the_gatherer;
	}
	
	public Parser(DataGatherer the_gatherer, Manager man)
	{
		my_dictionary = new HashMap<String, Integer>();
		my_manager = man;
		my_gatherer = the_gatherer;
		shouldStop = false;
	}
	
	public void setWebsite(Website the_website)
	{
		my_dictionary.clear();
		my_website = the_website;
	}
	
	/**
	 * Get a list of URL's provided in the Website's htmlContentBuffer.
	 * @return Returns a list of URLs
	 */
	private List<URL> parseWebsite()
	{
		List<String> anchor_tags = parseHTMLContent();
		List<URL> urls = new ArrayList<URL>(anchor_tags.size());
		for(String offset_tag : anchor_tags)
		{
			URL offset_url = parseAnchorTag(offset_tag);
			if (offset_url != null)
			{
				urls.add(offset_url);
			}
		}
		return urls;
	}
	
	@Override
	public List<URL> call() throws Exception {
		long parse_time = System.nanoTime();
		List<URL> urls = parseWebsite();
		my_gatherer.gather(my_dictionary);
		my_time = (System.nanoTime() - parse_time) / (double)1000000;
		
		my_manager.setTime(my_time);
		my_manager.printData(my_website.my_url);
		return urls;
	}
	
	/**
	 * Parses an HTML content buffer into a list of <a href="[URL]"> formed strings
	 * @return Returns a list of Anchor tags to feed into parseAnchorTag.
	 * @throws NullPointerException Will throw a null pointer exception if the end of the queue has been reached.
	 */
	private List<String> parseHTMLContent() throws NullPointerException
	{
		List<String> a_tag_list = new LinkedList<String>();
		String content_buffer = my_website.my_htmlContent;
		content_buffer = content_buffer.toLowerCase();
		char[] content_char = content_buffer.toCharArray();
		boolean a_tag = false;
		boolean found_word = false;
		
		int a_tag_ind = 0;
		int a_tag_end = 0;
		
		int word_ind = 0;
		int word_end = 0;
		
		
		
		for (int i = 0; i < content_char.length; i++)	
		{
			if (content_char[i] == '<' && content_char[i+1] == 'a')	
			{
				a_tag =	true;
				a_tag_ind = i;
				continue;	
			}
			
			if (a_tag && content_char[i] == '>')
			{
				a_tag_end = i;	
				a_tag = false;
				a_tag_list.add(content_buffer.substring(a_tag_ind, a_tag_end + 1));
			}
			
			if (!a_tag && content_char[i] == ' ')
			{
				found_word = !found_word;
				
				if (found_word)
				{
					word_ind = i;
				}
				else
				{
					word_end = i;
					putWord(my_dictionary, content_buffer.substring(word_ind, word_end).trim());
					word_ind = 0;
					word_end = 0;
				}
			}
			
		}
		
		return a_tag_list;
	}
	
	/**
	 * Parses an <a href"" [other params]> formated string
	 * @param aTagContent Contents, in a string form, of: <a href="[URL]">
	 * @return Returns a URL representation of the anchor tag. Returns null if the URL is malformed
	 */
	private static URL parseAnchorTag(String aTagContent)
	{
		String href = new String();
		
		char[] aTagContent_char = aTagContent.toCharArray();
		
		boolean found_href = false;
		int beg_ind = 0;
		
		for (int i = 0; i < aTagContent_char.length; i++)
		{
			if (found_href == false && aTagContent_char[i] == '=')
			{
				found_href = true;
				beg_ind = i+1;
				continue;
			}
			
			if (found_href && aTagContent_char[i] == ' ' ||
					found_href && aTagContent_char[i] == '>')
			{
				
				href = aTagContent.substring(beg_ind, i);
				
				found_href = false;
				break;
			}
		}
		
		href = href.replaceAll("\"", "");
		URL new_url;
		try
		{
			new_url = new URL(href);
		}
		catch(Exception e)
		{
			new_url = null;
		}
		return new_url;
	}
	
	/**
	   * Parses and places a word into the map provided with the occurrence count.
	   * @param the_map The map which contains the words.
	   * @param the_word The word to place in the specified map.
	   */
	  private static void putWord(final Map<String, Integer> the_map, final String the_word) {
	    //word = word.replaceAll("^([A-Z]|[a-z])", "");
	    final StringBuffer new_word = new StringBuffer();
	    for (int i = 0; i < the_word.length(); i++) {
	      final char offset = the_word.charAt(i);
	      //If the offset char is a letter, uses the ASCII table to check.
	      if (!(offset < MY_UPPER_A || offset > MY_LOWER_Z ||
	          (offset > MY_UPPER_Z && offset < MY_LOWER_A))) {
	        new_word.append(offset);
	      }
	    }
	    //Force all chars to lower-case so these are words case-insensitive.
	    final String new_word_str = new_word.toString().toLowerCase();
	    //Don't process null strings or empty strings.
	    if (new_word_str == null || "".equals(new_word_str)) {
	      return;
	    }
	    Integer word_occurence = the_map.get(new_word_str);
	    if (the_map.containsKey(new_word_str)) {
	      the_map.put(new_word_str, ++word_occurence);
	    } else {
	      the_map.put(new_word_str, 1);
	    }
	  }
	
	@Override
	public void run() {
		while(!shouldStop)
		{
			QueueADT<Website> websites = my_manager.grabWebsitesToParse(1);
			List<URL> urls = new LinkedList<URL>();
			//Parse all websites first
			while (!websites.isEmpty())
			{
				try{
				setWebsite(websites.deque());
				urls.addAll(call());
				
				}catch(Exception e) 
				{
					//Keep trying!
					continue;
				}
			}
			
			
			//Enqueue all urls which were parsed.
			for (URL url : urls)
			{
				my_manager.enqueURL(url);
			}
			
		}
		
	}
	
	public void stop()
	{
		shouldStop = true;
	}
	
	public double my_time() {
		return my_time;
	}
}
