import java.util.ArrayList;
import java.util.HashMap;
import java.util.Queue;
import java.util.StringTokenizer;
import java.util.concurrent.ConcurrentLinkedQueue;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

public class SingleThreadEnv extends Thread
{
	MultiThreadEnv my_delegater;
	private Queue<String> web_pages 					= new ConcurrentLinkedQueue<String>();
	private Queue<PageObject> web_documents 			= new ConcurrentLinkedQueue<PageObject>();
	private Queue<PageObject> web_hash_arrays 			= new ConcurrentLinkedQueue<PageObject>();
	ArrayList<String> my_keyword_array 					= new ArrayList<String>();
	HashMap<Integer, Integer> my_keyword_map 			= new HashMap<Integer, Integer>();
	
	HashMap<Integer, Integer> visited_pages 			= new HashMap<Integer, Integer>();
	
	private boolean still_retrieving 					= true;

	private int parse_page_limit						= 200;
	private int total_words_parsed						= 0;
	private long my_start_time 							= 0;
	private int total_urls_found						= 1;
	private long total_parse_time					 	= 0;
	public int parsed_pages = 0;
	
	public SingleThreadEnv(int the_page_limit, String the_start_url, MultiThreadEnv the_delegate, ArrayList<String> the_key_array, HashMap<Integer, Integer> the_key_map)
	{
		my_keyword_array 	= the_key_array;
		my_keyword_map 		= the_key_map;
		visited_pages.put("http://questioneverything.typepad.com/".hashCode(), 1);
		parse_page_limit 	= the_page_limit;
		web_pages.add(the_start_url);
		my_delegater 		= the_delegate;
	}
	public void run()
	{
		my_start_time = System.currentTimeMillis();
		while (still_retrieving)
		{
			String url = null;
			try
			{
				url = web_pages.poll();
				
				if (url != null)
				{
					if (url.endsWith(".html") || url.endsWith(".html") || url.endsWith(".txt") || url.trim().endsWith("/"))
					{
						PageObject temp_page_object = new PageObject(url);
						Document document 			= Jsoup.connect(temp_page_object.getUrl()).get();
					
						temp_page_object.setDocument(document);
						if (parsed_pages >= parse_page_limit)
						{
							still_retrieving = false;
							return;
						}
						if (document != null) 
						{
							web_documents.add(temp_page_object);	
						}
						parsePhase();
						analyzePhase();
					}
					else
					{
						continue;
					}
				}
			}
			catch(Exception e){
				if (url != null){
					System.out.println("Could not navigate to URL: " + url  + e.getMessage());
					this.startParsing();
				}
			}
		}
	}
	public void analyzePhase()
	{
		PageObject my_values;
		try
		{
			my_values = web_hash_arrays.poll();
			if (my_values != null)
			{
				try
				{
					StringBuilder output_string = new StringBuilder("<html>");
					output_string.append("<br/>");
					output_string.append("<br/>");
					output_string.append("Parsed:" + my_values.getUrl());
					output_string.append("<br/>");
					output_string.append("Pages Retrieved: " + (my_values.getPageNumber() + 1));
					output_string.append("<br/>");
					output_string.append("Average words per page: " + (my_values.getTotalWordsReadAtTime() / (my_values.getPageNumber() + 1) ));
					output_string.append("<br/>");
					output_string.append("Average URLs per page: " + (my_values.getTotalURLCount() / (my_values.getPageNumber() + 1) ));
					output_string.append("<br/>");
					output_string.append("<table><tr><td>Keyword</td><td>AverageHitsPerPage</td><td>TotalHits</td></tr>");
					
					for (int i = 0; i < my_values.getKeywordArray().size();i++)
					{
						StringBuilder output = new StringBuilder("<tr><td>");
						output.append(my_values.getKeywordArray().get(i));
						output.append("</td>");
						output.append("<td>");
						output.append(my_values.getKeywordMap().get(my_values.getKeywordArray().get(i).hashCode()) / (my_values.getPageNumber() + 1));
						output.append("</td>");
						output.append("<td>");
						output.append(my_values.getKeywordMap().get(my_values.getKeywordArray().get(i).hashCode()));
						output.append("</td></tr>");
						
						output_string.append(output.toString());
					}
					output_string.append("</table>");
					output_string.append("<br/>");
					output_string.append("Page limit: " + parse_page_limit);
					output_string.append("<br/>");
					output_string.append("Average parse time per page: " + my_values.getAverageParseTime() / (my_values.getPageNumber() + 1) + "ms");
					output_string.append("<br/>");
					output_string.append("Total running time: " + my_values.getRunningTime()/1000 + "sec.");
					output_string.append("<br/>");
					my_delegater.handleOutputString(output_string.toString());
					output_string.append("<br/>");
					output_string.append("</html>");
				} catch(Exception the_e){}
			}
		}
		catch (Exception the_e){
		}
	}
	private void parsePhase() {
		
		PageObject parse_doc 	= null;
		boolean parse_increased = false;
		
		parse_doc = web_documents.poll();
		
		if (parse_doc != null)
		{	
			long starttime = System.currentTimeMillis();
			int url_count = 0;
			Elements links 		= parse_doc.getDocument().select("a");
			for (int i = 0; i < links.size(); i++)
			{
				Element element = links.get(i);
				String abshref 	= element.attr("abs:href");
				url_count++;
				if (!(haveVisitedPage(abshref)))
				{
					savePageAsVisited(abshref);
					web_pages.add(abshref);	
				}
					if (!parse_increased)
					{
						parse_doc.setPageNumber(parsed_pages);
						parsed_pages++;	
						parse_increased = true;
					}
			}
			
			int word_count 							= 0;
			String html_elements 					= parse_doc.getDocument().text();
			StringTokenizer tokenizer 				= new StringTokenizer(html_elements, " ,./\'\\=!@#$%^&*()\":;");
			HashMap<Integer, Integer> found_words 	= new HashMap<Integer, Integer>();
			ArrayList<String> word_array 			= new ArrayList<String>();
			
			while(tokenizer.hasMoreElements())
			{
				word_array.add(tokenizer.nextToken().trim().toLowerCase());
			}
			for (int i = 0; i < word_array.size(); i++)
			{
				Integer hashcode_value = found_words.get(word_array.get(i).toString().toLowerCase().hashCode());
				if(hashcode_value != null)
				{
					found_words.put(word_array.get(i).toString().toLowerCase().hashCode(), found_words.get(word_array.get(i).toString().toLowerCase().hashCode()).intValue() + 1);	
				}
				else
				{
					found_words.put(word_array.get(i).toString().toLowerCase().hashCode(), 1);
				}
				word_count++;
			}
				parse_doc.setHashMap(found_words);
				
				//This will increase the key words count for this page.
				for (int i = 0; i < my_keyword_array.size(); i++)
				{
					if (found_words.get(my_keyword_array.get(i).hashCode()) != null){
						
						my_keyword_map.put(my_keyword_array.get(i).hashCode(), 
								my_keyword_map.get(my_keyword_array.get(i).hashCode()) + 
								found_words.get(my_keyword_array.get(i).hashCode()));
					}
				}
				long endtime 		= System.currentTimeMillis();
				total_parse_time 	+= + (endtime - starttime);
				parse_doc.setRunningTime(System.currentTimeMillis() - my_start_time);
				parse_doc.setAverageParseTime(total_parse_time);
				parse_doc.setKeywordArray(my_keyword_array);
				parse_doc.setKeywordMap(my_keyword_map);
			
				total_urls_found 	+= url_count;
				parse_doc.setTotalReadURLCount(total_urls_found);
			
				total_words_parsed 	+= word_count;
				parse_doc.setTotalWordsAtReadTime(total_words_parsed);
				
				web_hash_arrays.add(parse_doc);
				
		}
	}
	public void startParsing(){
	}
	public void savePageAsVisited(String the_page_url)
	{
		try
		{
			visited_pages.put(the_page_url.hashCode(), 1);
		}
		catch(Exception the_exception)
		{
		}
	}
	public boolean haveVisitedPage(String the_url)
	{
		try
		{
			boolean value = visited_pages.containsKey(the_url.hashCode());
			return value;
		} catch (Exception e){return false;}
	}
}
