package com.google.code.spidertr;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;

/**
 * 
 *This class is the class which represents Crawler concept.
 */
public class Spider {

	/**
	 * Status of Crawler.
	 */
	private boolean status;
	private URL url;

	/**
	 * Return new instance of Crawler.
	 */
	public Spider() {
		this.status = false;
	}

	/**
	 * Connect the given URL
	 * 
	 * @param url
	 *            {@link URL} which the crawler will be connected.
	 * @throws IOException
	 */
	public void connect(URL url) throws IOException {
		this.url = url;
		url.openStream();
		this.status = true;
	}

	/**
	 * 
	 * @return Status of Web Crawler.
	 */
	public boolean getStatus() {
		return this.status;
	}

	/**
	 * Return URLs in the given URL
	 * 
	 * @return {@link String} {@link List} of URLs
	 */
	public List<String> getURLs() {
		List<String> links = null;
		try {
			links = new LinkFinder(url.toString(), getURLContent())
					.findLinks();
		} catch (MalformedURLException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		}
		return links;

	}

	/**
	 * Returns String content of URL.
	 * 
	 * @return {@link String} content of URL.
	 * @throws IOException
	 */
	private String getURLContent() throws IOException {
		BufferedReader bufferedReader = new BufferedReader(
				new InputStreamReader(url.openStream()));
		String s;
		StringBuilder builder = new StringBuilder();
		while ((s = bufferedReader.readLine()) != null) {
			builder.append(s);
		}
		String siteContent = builder.toString();
		return siteContent;
	}

}
