import java.util.*;
import java.io.*;
import java.net.MalformedURLException;
import java.net.URL;
import javax.swing.text.html.HTMLEditorKit;

/**
 * This class acts as the indexer, taking the loaded list of URL's
 * and creating and saving the index to file. The URL's are visited, the page
 * data retrieved processed and stored until all URL's visited. Then using this data
 * the indecx file is created
 * 
 * @author Mitchell Whitehouse
 * @author Andrew Evans	
 * @author Duncan Grant
 * @version     1                                 
 * @since       1.6                                      
 */

public class MADIndexer	extends SearchEngine implements Runnable
{
	// Delay to stop the programme making large page requests per second
	private static final long DELAY = 200;
	private HashMap<String,ArrayList<String>> dataDict;
	private SearchGUI gui;
	
	public MADIndexer(SearchGUI gui)
	{
		this.gui = gui;
		dataDict = new HashMap<String,ArrayList<String>>();
		URLs = loadFile(INPUT_FILENAME);
	}
	
	public void run()
	{
		getPageData();
		createIndex();
	}
	

	public void saveIndex(String fileName,HashMap<String,ArrayList<String>> index)
	{
		 try{
			    // Create file 
			    FileWriter fstream = new FileWriter(fileName);
			    BufferedWriter out = new BufferedWriter(fstream);
			    Set<String> words = index.keySet();
			    Iterator<String> iter = words.iterator();
			    while (iter.hasNext()) 
			    {
			    	String word = iter.next();
			    	String sentence = word + " ";
			    	
			    	ArrayList<String> addresses = index.get(word);
			    	//System.out.println(addresses.size());
			    	for (int x = 0; x < addresses.size(); x++ )
			    	{
			    		//Print all URLS for current word on one line
			    		sentence = sentence + addresses.get(x) + " ";
			    	}
			    out.newLine();
			    out.write(sentence);
			    }
			    out.close();
			    }
		 		catch (Exception e){//Catch exception if any
			      System.err.println("Error: " + e.getMessage());
			    }
	}
		
	/**
	 * Parsers and processes data from URLs and saves in HashMap
	 * 
	 * Method parsers page data by extracting human readable text from sites,
	 * processing it(makes lower case, remove punctuation and remove stop words) 
	 * and saves in a HashMap <String,ArrayList<String>>, for use later in indexing pages
	 * 
	 */

	public void getPageData()
	{
		for (int x = 0; x < URLs.size(); x++ )
		{
			String status = "Processing (" + (x+1) + " of " + URLs.size() + "): " + URLs.get(x);
			gui.setText(status);
			System.out.println(status);
			URL u = null;
			try 
			{
				u = new URL(URLs.get(x));
			} catch (MalformedURLException e1) 
			{
				e1.printStackTrace();
			}
			BufferedReader in = null;
			String pageData = "";
			//HTML Parser to extract page data
			HTMLStripper hp = new HTMLStripper();
			
			try 
			{
				in = new BufferedReader(new InputStreamReader(u.openStream()));
			} catch (IOException e) 
			{
				// TODO Auto-generated catch block
				e.printStackTrace();
						
			} 
				HTMLEditorKit.Parser parse = new HTMLParse().getParser();
				try 
				{
					parse.parse(in, hp, true);
				} catch (IOException e1) 
				{
					// TODO Auto-generated catch block
					e1.printStackTrace();
					continue;
				}
				pageData = hp.getDataString();
				//Strip punctuation and other charcters, leaving only
				//digits and letters
				pageData = pageData.replaceAll("[^A-Za-z0-9 ]", "");
				//Remove whitespaces that may have been introduced
				pageData = pageData.replaceAll(" +{2,}", " ");
				String[] ss = pageData.split(" ");
				ArrayList<String> pageWords = new ArrayList<String>();
				for(int y = 0; y < ss.length;y++)
				{
					//Remove stop words, they are too ubiquitous to be usefull
					if(stopWords.contains(ss[y])==false)
					{
						pageWords.add(ss[y]);
					}
				}
				System.out.println(u);
				dataDict.put(u.toExternalForm(), pageWords);
			
			try 
			{
				//Delay to reduce server load
				Thread.sleep(DELAY);
			} catch (InterruptedException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
	}
	
	/**
	 * Method builds the index from a collection of URL's
	 */
	
	public void createIndex()
	{
		for (int x = 0; x < URLs.size(); x++ )
		{
			String temp = URLs.get(x);
			ArrayList<String> words = dataDict.get(temp);
			for(int y = 0; y < words.size(); y++)	
			{
				String word = words.get(y);
				//System.out.println(word);
				if(index.containsKey(word)){
					if(index.get(word).contains(temp))
					{
						//DO nothing, already in index
					}
					else 
					{
						index.get(word).add(temp);
					}
				}
				else 
				{
					ArrayList<String> containingSite = new ArrayList<String>();
					containingSite.add(temp);
					index.put(word, containingSite);
				}
			}
		}
		saveIndex(OUTPUT_FILENAME,index);
	}
}