package crawler;

import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Queue;

public class URLFrontier {
	private HashMap<URL, Date> visitedURLs = new HashMap<URL, Date>();

	private Queue<URL> queue = new LinkedList<URL>();

	public URLFrontier(ArrayList<URL> initialURLs) {
		for (URL u : initialURLs) {
			offerQueue(u);
		}
	}

	private void offerQueue(URL url) {
		if (!queue.contains(url))
			// TODO: Now we never visit the same page again. Improve to take the
			// date into account
			if (!visitedURLs.containsKey(url)){
				System.out.println("Adding URL to frontier: "+url);
				queue.offer(url);
			}
	}

	/*
	 * This function gives us the element from the head and delete it
	 * 
	 * @return the url in the head
	 */
	public URL pollQueue() {
		return queue.poll();
	}

	public boolean isEmpty() {
		return queue.isEmpty();
	}

	public void setVisited(URL visitedURL) {
		visitedURLs.put(visitedURL, new Date());
	}

	public void offerQueue(ArrayList<URL> linkList) {
		System.out.println("Adding "+linkList.size()+" URLs to the frontier");
		for (URL url : linkList) {
			offerQueue(url);
		}
		
	}
}