package crawler;

import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;

public class Crawler {
	URLFrontier urlFrontier;
	Fetcher fetcher = new Fetcher();
	Parser parser = new Parser();
	ArrayList<Robot> robotList = new ArrayList<Robot>();
	Index index = new Index();

	public Crawler(ArrayList<URL> initialURLs) throws Exception {
		if (initialURLs == null || initialURLs.size() == 0)
			throw new Exception("No initial URL given");

		urlFrontier = new URLFrontier(initialURLs);

		while (!urlFrontier.isEmpty()) {

			URL url = urlFrontier.pollQueue();

			String rawHTTP = fetcher.doRequestURL(url);

			urlFrontier.setVisited(url);
			if (rawHTTP != null) {
				ParsedStuff pars = parser.parse(rawHTTP, url);
				index.newDocumentToProcess(pars);
				if (urlIsAllowed(url, pars))
					urlFrontier.offerQueue(pars.getLinkList());
			}

		}
		System.out.println("Crawler stopping. Nothing left in URLFrontier");
	}

	private boolean urlIsAllowed(URL url, ParsedStuff pars)
			throws MalformedURLException {
		ArrayList<URL> linkList = pars.getLinkList();
		Robot robot = null;
		for (int j = 0; j < linkList.size(); j++) {
			URL linkaux = linkList.get(j);
			for (Robot r : robotList) {
				if (r.getHost().equalsIgnoreCase(linkaux.getHost())) {
					robot = r;
					break;
				}
			}

			if (robot == null) {
				String rules = fetcher.doRequestURL(new URL(linkaux
						.getProtocol(), linkaux.getHost(), "/robots.txt"));
				robot = new Robot(rules,linkaux.getHost());
				robotList.add(robot);
			}

			return robot.isAllowed(linkaux);
		}
		return false;
	}
}
