import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Queue;
import java.util.Set;
import java.util.StringTokenizer;

import jsoup.Jsoup;
import jsoup.nodes.Document;

/**
 * Page retriever class. Threaded program that retrieves pages to be parsed.
 * 
 * @version 1
 */
public class pageRetriever implements Runnable {

	private Queue<String> urls;
	private Queue<WebPage> pages;
	private String targetURL;
	Set<String> disallowed;

	/**
	 * Constructor for the pageRetriever object.
	 */
	public pageRetriever(Queue<WebPage> bufferQueue, Queue<String> urlQueue, 
			Set<String> disallowList) {
		pages = bufferQueue;
		urls = urlQueue;
		disallowed = disallowList;
		if (!urls.isEmpty()) {
			targetURL = urls.remove();
		}

	}

	@Override
	public void run() {
		Document url;
		// InputStream in = null;
		if (targetURL != null && robotSafe(targetURL)) {
			try {
				url = Jsoup.connect(targetURL).get();

				WebPage page = new WebPage(targetURL, url);
				pages.offer(page);

			} catch (MalformedURLException mue) {
				// error with URL -- push to some output, or just catch and
				// ignore?
			} catch (IOException ioe) {
				// error with opening page -- push to some output, or just catch
				// and ignore?
			}

		}

	}

	public boolean robotSafe(String url) {
		URL myUrl = null;
		try {
			myUrl = new URL(url);
		} catch (MalformedURLException e1) {
			// shouldn't happen.
		}
		String strHost = myUrl.getHost();
		String strRobot = "http://" + strHost + "/robots.txt";
		URL urlRobot;
		try {
			urlRobot = new URL(strRobot);
		} catch (MalformedURLException e) {
			// something weird is happening, so don't trust it
			return false;
		}

		String file = urlRobot.getFile();
		//no robots file means nothing is disallowed.
		if (file == null || file.length() == 1) return true;		
		// assume that this robots.txt refers to us and
		// search for "Disallow:" commands.
		String strURL = myUrl.getFile();
		int index = 0;
		while ((index = file.indexOf("Disallow:", index)) != -1) {
			index += "Disallow:".length();
			String strPath = file.substring(index);
			StringTokenizer st = new StringTokenizer(strPath);

			if (!st.hasMoreTokens())
				break;

			String strBadPath = st.nextToken();
			
			// if the URL starts with a disallowed path, it is not safe
			if (strURL.contains(strBadPath)) {
				disallowed.add(strURL);
				return false;
			}
		}

		return true;
	}
	
	
}
