/*
 * TCSS 422 Web Spider Project
 * Group Money: Al McKenzie, Michael Pitts, Taylor Zielske
 */
package model.domain;

import model.URLInputCooker;

/**
 * A Black List domain denies certain urls from being searched or spidered.  The urls
 * and directories are specified at creation.
 * 
 * @author Michael Pitts
 * @version Oct 19, 2011
 */
public class BlackListDomain extends AbstractListDomain {

	/**
	 * Creates a new Black list domain.
	 * @param the_domain is the string representation of the domain.
	 * @param the_cooker is the url cooker that will set up urls.
	 * @param the_directories are the directories that should not be searched.
	 * @param the_urls are the urls that should not be searched.
	 */
	public BlackListDomain(final String the_domain, final URLInputCooker the_cooker,
			final String[] the_directories, final String[] the_urls) {
		super(the_domain, the_cooker, the_directories, the_urls);
	}

	/**
	 * {@inheritDoc}
	 */
	@Override public void addURL(final String the_url) {
		if (! listedURL(the_url)) {
			super.addURL(the_url);
		}
	}
}
