package com.barkerton.crawler;

import java.net.URL;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

import com.barkerton.crawler.util.PropertyManager;

/**
 * Crawler seed (url) to be crawled.
 * 
 * @author c.barker
 *
 */
public class Seed {

	private Log log = LogFactory.getLog(Seed.class);
	
	private PropertyManager props;
	private URL url;				// URL to crawl
	private int crawlDepth;			// Link depth
	private boolean siteOnly;		// Limit crawl to site
	
	private Seed() { }
	
	public Seed(URL url) {
		this.props = PropertyManager.getInstance();
		this.url = url;
		this.crawlDepth = Integer.valueOf( props.getValue("crawl.depth", "0") );
		this.siteOnly = Boolean.valueOf( props.getValue("crawl.site.only", "false") );
	}
	
	public Seed(URL url, int depth, boolean site) {
		this.url = url;
		this.crawlDepth = depth;
		this.siteOnly = site;
	}
	
	public URL getUrl() {
		return url;
	}

	public void setUrl(URL url) {
		this.url = url;
	}

	public int getCrawlDepth() {
		return crawlDepth;
	}

	/**
	 * Sets the depth aka number of URLs to crawl for a given site.
	 * If 0 then will crawl all of URLs found within a site.
	 * @param depth
	 */
	public void setCrawlDepth(int crawlDepth) {
		this.crawlDepth = crawlDepth;
	}
	
	public boolean isSiteOnly() {
		return siteOnly;
	}

	/**
	 * Denotes whether or not crawler should spider out past
	 * a site or not (expand outside of site).
	 * @param only
	 */
	public void setSiteOnly(boolean siteOnly) {
		this.siteOnly = siteOnly;
	}
	
	public boolean equals(Seed s) {
		if (this == s)
			return true;
		else if (this.url.toString().equalsIgnoreCase(s.url.toString())
				&& this.siteOnly && s.siteOnly && (this.crawlDepth == s.crawlDepth) )
			return true;
		else
			return false;
	}
}
