/*
 * TCSS 422 Web Spider Project
 * Group Money: Al McKenzie, Michael Pitts, Taylor Zielske
 */
package model;

import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;

import model.domain.Domain;
import model.domain.PlaceHolderDomain;

/**
 * This class stores urls in their respective domains, providing a place for new urls
 * to be spidered.  It checks if the urls have been searched already and also only 
 * allows the urls okayed by the domain's robots.txt rules.
 * 
 * @author Michael Pitts
 * @version Oct 20, 2011
 */
public class URLStore {
	
	/**
	 * The URLInputCooker that search-able urls will be passed to.
	 */
	private final URLInputCooker my_cooker;
	
	/**
	 * Stores the map of domains, synchronized.
	 */
	private final Map<String, Domain> my_domains;
	
	/**
	 * Creates a new URLStore class which uses the designated url cooker.
	 * @param the_cooker is the cooker that will be used to get data from urls.
	 */
	public URLStore(final URLInputCooker the_cooker) {
		my_cooker = the_cooker;
		my_domains = Collections.synchronizedMap(new HashMap<String, Domain>());
	}
	
	/**
	 * Adds a new url to the url store.
	 * @param the_url is the new url to add.
	 */
	public void addURL(final URL the_url) {
		final Domain domain = my_domains.get(the_url.getHost()); // gets this urls domain/host
		if (domain != null) {
			domain.addURL(the_url.toString());
		} else { // domain is not listed yet, add it.
			try {
				final Domain place_holder = 
					new PlaceHolderDomain(the_url.getHost(), my_cooker);
				place_holder.addURL(the_url.toString()); // allows no urls to be buffered
				// or parsed until the robots.txt data has come back
				my_domains.put(place_holder.getDomain(), place_holder);
			} catch (MalformedURLException the_error) {
				// Can't process domain, mal-formed so url is no good.
				//the_error.printStackTrace();
			}
		}
	}
	
	/**
	 * Replaces a place holder domain with a new domain, based on the results of the
	 * domain's robots.txt rules.
	 * @param the_new_domain is the new domain to replace the old.
	 */
	public void replaceDomain(final Domain the_new_domain) {
		final PlaceHolderDomain old_domain = (PlaceHolderDomain) my_domains.get(the_new_domain.getDomain());
		my_domains.remove(old_domain);
		my_domains.put(the_new_domain.getDomain(), the_new_domain);
		if (old_domain != null) { // may not have existed before 
			for(String url : old_domain.getPotential()) {
				the_new_domain.addURL(url);
			}
		}
	}

}
