/**
 * 
 */
package webCrawler;

import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
 * @author mingfan
 *
 */
public class WebCrawler6 implements LinkHandler {
	
	//
	private final Collection<String> visitedLinks = Collections.synchronizedSet(new HashSet<String>());
	//
	private final String url;
	//
	private final ExecutorService threadPool;
	
	public WebCrawler6 (final String startingURL, final int threadPoolSize) {
		url = startingURL;
		threadPool = Executors.newFixedThreadPool(threadPoolSize);
	}

	@Override
	public void queueLink(String link) throws Exception {
		// TODO Auto-generated method stub
		startCrawlingOneLink(link);
		
	}

	@Override
	public int getNumberOfVisitedLinks() {
		// TODO Auto-generated method stub
		return visitedLinks.size();
	}

	@Override
	public boolean isVisited(String link) {
		// TODO Auto-generated method stub
		return visitedLinks.contains(link);
	}

	@Override
	public void addVisitedLink(String link) {
		// TODO Auto-generated method stub
		visitedLinks.add(link);
		
	}

	private void startCrawlingOneLink(final String url) {
		threadPool.execute(new LinkFinder(url, this));
	}
	
	private void startCrawling() {
		startCrawlingOneLink(this.url);
	}
	
	public static void main(String[] args) {
		new WebCrawler6("http://www.montrealgazette.com/index.html", 80).startCrawling();
	}
}
