package nz.ac.massey.spidernetpn.webcrawler;

import java.io.BufferedReader;

import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.Authenticator;
import java.net.MalformedURLException;

import java.net.Socket;
import java.net.SocketAddress;
import java.net.PasswordAuthentication;

import java.net.URL;
import java.util.Queue;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import org.apache.commons.validator.UrlValidator;


public class PrototypeWebCrawler {
	private static final int MAX_URLS = 50;
	private static final UrlValidator VALIDATOR = new UrlValidator();
	private static final Set<String> URLS = new TreeSet<String>();
	private static final Queue<String> WAITING_URLS_QUEUE = new ConcurrentLinkedQueue<String>();
	private static final Set<Pattern> BLACKLIST = new TreeSet<Pattern>();
	private static final Pattern HTTP_PATTERN = Pattern.compile("http://[a-zA-Z1-9&$_\\.\\-]+");
	
	/**
	 * This is just a dummy web crawler.
	 * @author Rosanna Fong - email.to.rose@gmail.com
	 */
	public static void main(String[] args) {
		//Configuring proxy.
		System.getProperties().setProperty("http.proxySet", "true");

		 System.setProperty("http.proxyHost","tur-cache2.massey.ac.nz") ;
		    System.setProperty("http.proxyPort", "8080") ;
		 
		    Authenticator.setDefault(new Authenticator() {
		      protected PasswordAuthentication getPasswordAuthentication() {
		        return new
		           PasswordAuthentication("SEAT\\06147763","9988".toCharArray());
		    }});
		
		// Setup the blacklist
		BLACKLIST.add(Pattern.compile(".*w3c.*"));
		
		// Data to be entered by the user.
		BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
		
		try {
			// Enter validation cycle.
			boolean valid = false;
			String start = null;
			while(!valid) {
				// Check if URL has been entered and validate the URL entered.
				System.out.print("Please enter your url: ");
				start = reader.readLine();
				valid = VALIDATOR.isValid(start);
			}
			
			// You'll at least have the url you entered.
			WAITING_URLS_QUEUE.add(start);
			System.out.println("Starting crawl.");
			
			while(!WAITING_URLS_QUEUE.isEmpty()) {
				final String nextURL = WAITING_URLS_QUEUE.remove();
				// This method adds more URLs (on the current page) to the WAITING_URLS_QUEUE
				processUrl(nextURL);
			}
			
		} catch (IOException e) {
			System.out.println("An unexpected error occured.");
			e.printStackTrace();
		}
	}
	
	public static void processUrl(String url) {
		if (!VALIDATOR.isValid(url))
			return;
		if (URLS.contains(url))
			return;
		
		URLS.add(url);
		
		// Open URL connection and stream.
		InputStream connection = null;
		try {
			connection = new URL(url).openConnection().getInputStream();
			
			BufferedReader pageReader = new BufferedReader(new InputStreamReader(connection));
			System.out.println("URL stream: " + url + " opened.");
			
			String line = pageReader.readLine();
			while(line!=null) {
				//Check using regular expressions.
				final Matcher matcher = HTTP_PATTERN.matcher(line);
				
				//Print matching patterns.
				while(matcher.find()){
					String anotherUrl = matcher.group();
					
					// Stop if you find a url in the black list
					for(Pattern badUrl : BLACKLIST)
						if(!badUrl.matcher(anotherUrl).find())
							continue;
							
					WAITING_URLS_QUEUE.add(anotherUrl);
					
					// If max is reached, then print everything and quit.
					if(MAX_URLS < URLS.size()) {
						for(String s : URLS)
							System.out.println(s);
						
						System.exit(1);
					}
				}
				
				line = pageReader.readLine();
			}
		}
		catch (MalformedURLException e) {
			e.printStackTrace();
			return;
		}
		catch (IOException e) {
			e.printStackTrace();
			return;
		}
		finally {
			if(connection!=null) {
				try {
					connection.close();
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		}
	}
	
}
