package edu.hawaii.webspider;

import com.meterware.httpunit.GetMethodWebRequest;
import com.meterware.httpunit.WebConversation;
import com.meterware.httpunit.WebRequest;
import com.meterware.httpunit.WebResponse;

/**
 * A demonstration class illustrating how to retrieve and process web pages using HttpUnit.
 * For SVN and Assignment 14 (CM Practices) purposes, I have reverted this webspider version 
 * back to where only TASK 0 has been completed.  This allows this version to fully pass the 
 * unit tests and also the verify test, which is required for Assignment 14.  
 * 
 * @author Philip Johnson
 */
public class WebSpiderExample {

  /** Contains the starting url for the crawl. */
  private final String startUrl;

  /**
   * A WebSpider crawls the web and returns info.
   * 
   * @param startUrl The home url for the crawl.
   */
  public WebSpiderExample(String startUrl) {
    this.startUrl = startUrl;
  }

  /**
   * Returns the number of links found at the startUrl.
   * 
   * @return The number of links.
   * @throws Exception if problems occur retrieving the startUrl.
   */
  public int getNumLinks() throws Exception {
    // create the conversation object which will maintain state for us
    WebConversation wc = new WebConversation();
    WebRequest request = new GetMethodWebRequest(this.startUrl);
    WebResponse response = wc.getResponse(request);
    return response.getLinks().length;
  }

  /**
   * Retrieves the httpunit home page and counts the number of links on it.
   * 
   * @param args Ignored.
   * @throws Exception If problems occur.
   */
  public static void main(String[] args) throws Exception {
    WebSpiderExample example = new WebSpiderExample("http://www.httpunit.org");
    System.out.println("The HttpUnit home page contains " + example.getNumLinks() + " links.");
  }
}
