package edu.hawaii.webspider;

import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.logging.ConsoleHandler;

import com.meterware.httpunit.GetMethodWebRequest;
import com.meterware.httpunit.WebConversation;
import com.meterware.httpunit.WebLink;
import com.meterware.httpunit.WebRequest;
import com.meterware.httpunit.WebResponse;

/**
 * A demonstration class illustrating how to retrieve and process web pages using HttpUnit.
 *
 * @author Philip Johnson
 * @author Lisa Chen
 */
public class WebSpider {

  /** Contains the starting url for the crawl. */
  private final String startUrl;
  /** Contains the user specified max number of pages to crawl otherwise no limit. */
  private int maxNumCrawlPages = 0;

  /** Command line argument to retrieve all links. */
  private static final String TOTAL_LINKS = "-totallinks";
  /** Command line argument to retrieve most popular link. */
  private static final String MOST_POPULAR = "-mostpopular";
  /** Command line argument to set logging mode. */
  private static final String LOGGING = "-logging";

  /** Status flag to set logging mode. */
  private boolean LOG_MODE = false;
  /** Status flag to set count total links mode. */
  private boolean TOTAL_LINKS_MODE = false;
  /** Status flag to set find most popular link mode. */
  private boolean MOST_POPULAR_MODE = false;

  /** Contains the collection of links already visited and counted. */
  private final List<String> countedLinks;
  /** Contains the collection of popular links counted. */
  private Map<String, Integer> popularLinks;
  /** Contains the running count of links in the crawl to aid recursion. */
  private int numLinksInNPages;
  /** Contains the current page number being crawled to aid recursion. */
  private int pageNumber;

  /** Logger for the web spider. */
  private final Logger webSpiderLogger = Logger.getLogger("edu.hawaii.webspider");
  /** Console handler for logging output. */
  private final ConsoleHandler webSpiderHandler;

  /**
   * A WebSpider crawls the web and returns information.
   *
   * @param startUrl Url for the crawl.
   * @param maxNumCrawlPages Maximum number of pages to crawl.
   * @see Kevin English suggested turning off JavaScript to avoid js related exceptions.
   */
  public WebSpider(String startUrl, int maxNumCrawlPages) {
    // Kevin English mentioned these 3 lines would help to cut back on JavaScript Exceptions
    com.meterware.httpunit.HttpUnitOptions.setExceptionsThrownOnScriptError(false);
    com.meterware.httpunit.HttpUnitOptions.setExceptionsThrownOnErrorStatus(false);
    com.meterware.httpunit.HttpUnitOptions.setScriptingEnabled(false);

    this.startUrl = startUrl;
    this.maxNumCrawlPages = maxNumCrawlPages;
    this.countedLinks = new ArrayList<String>();
    this.countedLinks.add(this.startUrl);
    this.popularLinks = new HashMap<String, Integer>();
    this.numLinksInNPages = 0;
    this.pageNumber = 0;
    // set up logger
    this.webSpiderHandler = new ConsoleHandler();
    this.webSpiderHandler.setFormatter(new WebSpiderFormatter());
    this.webSpiderLogger.addHandler(webSpiderHandler);
    this.webSpiderLogger.setUseParentHandlers(false);
  }

  /**
   * A WebSpider crawls the web and returns info.
   *
   * @param startUrl Url for the crawl.
   * @see Kevin English suggested turning off JavaScript to avoid js related exceptions.
   */
  public WebSpider(String startUrl) {
    // Kevin English mentioned these 3 lines would help to cut back on JavaScript Exceptions
    com.meterware.httpunit.HttpUnitOptions.setExceptionsThrownOnScriptError(false);
    com.meterware.httpunit.HttpUnitOptions.setExceptionsThrownOnErrorStatus(false);
    com.meterware.httpunit.HttpUnitOptions.setScriptingEnabled(false);

    this.startUrl = startUrl;
    this.countedLinks = new ArrayList<String>();
    this.countedLinks.add(this.startUrl);
    this.popularLinks = new HashMap<String, Integer>();
    this.numLinksInNPages = 0;
    this.pageNumber = 0;
    // set up logger
    this.webSpiderHandler = new ConsoleHandler();
    this.webSpiderHandler.setFormatter(new WebSpiderFormatter());
    this.webSpiderLogger.addHandler(webSpiderHandler);
    this.webSpiderLogger.setUseParentHandlers(false);
  }

  /**
   * Retrieves a page specified by the command line and counts the number of links on it.
   *
   * @param args Command line request and parameters to run WebSpider.
   * @throws Exception If problems occur.
   */
  public static void main(String[] args) throws Exception {

    if ((args[1].contains("http://")) && (args[2].matches("[1-9]*[0-9]*"))) {
      // first arg will always be the url to spider
      // second arg will always be max num pages to crawl
      WebSpider exampleSpider = new WebSpider(args[1], Integer.valueOf(args[2]));
      // parse the other arguments
      exampleSpider.parseCommandLine(args);
      exampleSpider.runSpider(exampleSpider);
    }
    else if (args.length == 1) {
      // only one argument should be url
      WebSpider exampleSpider = new WebSpider(args[0]);
      // count number of links on homepage
      exampleSpider.getNumLinks();
    }
    else {
      System.out.println("Please use the following pattern to start WebSpider:"
          + "java -jar webspider-lisachen.jar -totallinks http://www.httpunit.org 100 -logging");
    }

  }

  /**
   * Retrieves a page specified by the command line and counts the number of links on it.
   *
   * @param webSpider object holding url to parse and max number of web pages to crawl.
   * @throws Exception If problems occur.
   * @return String converted results of -totallinks or -mostpopular mode web crawl.
   */
  public String runSpider(WebSpider webSpider) throws Exception {
    String spiderResult = null;

    if (MOST_POPULAR_MODE) {
      try {
        // need to run the crawl first
        numLinksInNPages = webSpider.getNumLinks(this.maxNumCrawlPages);
        // get the most popular link in N number of pages crawled
        spiderResult = webSpider.getMostPopularPage();
      }
      catch (RuntimeException e) {
        System.out.println("Unable to run getMostPopularPage." + e);
        spiderResult = null;
      }
      if (LOG_MODE) {
        this.writeToLog("The most popular link at " + webSpider.startUrl + " is " + spiderResult
            + " in " + this.maxNumCrawlPages + " pages crawled.");
      }
    }
    else if (TOTAL_LINKS_MODE) {
      try {
        // recursive method to count number of links in N number of pages to crawl
        spiderResult = Integer.valueOf(webSpider.getNumLinks(this.maxNumCrawlPages)).toString();
      }
      catch (RuntimeException e) {
        System.out.println("Unable to run getNumLinks." + e);
        spiderResult = null;
      }
      if (LOG_MODE) {
        this.writeToLog("The " + webSpider.startUrl + " page crawl produced " + spiderResult
            + " links over " + this.maxNumCrawlPages + " pages.");
      }
    }
    else if (LOG_MODE) {
      this.writeToLog("User enabled logging.");
    }

    return spiderResult;
  }

  /**
   * Parse the command line arguments from main method.
   *
   * @param args Command line args from main.
   * @throws Exception if problems occur.
   * @return true if command line parse is successful.
   */
  public boolean parseCommandLine(String[] args) throws Exception {
    boolean parseResult = false;

    try {
      // parse the command line arguments to determine task to perform
      for (String arg : args) {
        if (MOST_POPULAR.equals(arg)) {
          MOST_POPULAR_MODE = true;
        }
        else if (TOTAL_LINKS.equals(arg)) {
          TOTAL_LINKS_MODE = true;
        }
        else if (LOGGING.equals(arg)) {
          LOG_MODE = true;
        }
      }
      parseResult = true;
    }
    catch (Exception ex) {
      System.out.println("Unable to parse command line arguments.");
    }

    return parseResult;
  }

  /**
   * Returns the number of links found at the startUrl.
   *
   * @throws Exception if problems occur retrieving the startUrl.
   * @return The number of links found after crawling the startUrl.
   */
  public int getNumLinks() throws Exception {
    // create the conversation object which will maintain state for us
    WebConversation wc = new WebConversation();
    WebRequest request = new GetMethodWebRequest(this.startUrl);
    WebResponse response = wc.getResponse(request);
    return response.getLinks().length;
  }

  /**
   * Returns the number of links found after crawling N pages from the startUrl.
   *
   * @param numNPages Max number of pages to crawl.
   * @throws Exception if problems occur retrieving the urls.
   * @return The number of links found after crawling over numNPages.
   */
  public int getNumLinks(int numNPages) throws Exception {
    String urlToCrawl = this.countedLinks.get(this.pageNumber);
    String tempLinkName = null;
    WebLink[] linkNames = null;

    try {
      // create the conversation object which will maintain state for us
      WebConversation wc = new WebConversation();
      WebRequest request = new GetMethodWebRequest(urlToCrawl);
      WebResponse response = wc.getResponse(request);
      linkNames = response.getLinks();
      if (LOG_MODE) {
        this.writeToLog("Retrieving " + urlToCrawl);
        this.writeToLog("Found " + linkNames.length + " link(s).");
      }
      // go through each link found on the page
      for (WebLink link : linkNames) {
        tempLinkName = link.getRequest().getURL().toString();
        // if the link was counted already
        if (this.countedLinks.contains(tempLinkName)) {
          // check the popular links hash map
          if (this.popularLinks.containsKey(tempLinkName)) {
            // already in popular links hash map, increment the value
            this.popularLinks.put(tempLinkName, this.popularLinks.get(tempLinkName) + 1);
          }
          else {
            // add new popular link
            this.popularLinks.put(tempLinkName, 1);
          }
        }
        else {
          // add link never seen before
          this.countedLinks.add(tempLinkName);
        }
      }
      // add this page's link results' count to the total results' count over n pages
      this.numLinksInNPages = this.countedLinks.size();
    }
    catch (Exception ex) {
      // reached a bad link, log the error and move onto the next link
      if (LOG_MODE) {
        this.writeToLog("Unable to retrieve " + urlToCrawl);
      }
    }

    // increment index of the page to crawl next
    this.pageNumber++;
    // if not on the last page to crawl, continue crawling by recursive call
    if (numNPages > 0) {
      this.getNumLinks(numNPages - 1);
    }

    return this.numLinksInNPages;
  }

  /**
   * Returns the most popular link after crawling N pages from the startUrl.
   *
   * @return The most popular link.
   */
  public String getMostPopularPage() {
    String mostPopularLink = null;
    int currMaxCount = Integer.MIN_VALUE;
    Set<String> popularUrls = this.popularLinks.keySet();
    // find the links with the highest count
    for (String url : popularUrls) {
      if (this.popularLinks.get(url) > currMaxCount) {
        mostPopularLink = url;
        currMaxCount = this.popularLinks.get(url);
      }
    }
    return mostPopularLink;
  }

  /**
   * Set up log handlers, formatters and write the message to logs.
   *
   * @param message String message to be logged.
   * @return true if a log file is successfully created or written to.
   * @see http://java.sun.com/j2se/1.5.0/docs/guide/logging/overview.html
   */
  public boolean writeToLog(String message) {
    boolean wroteLog = false;
    webSpiderLogger.log(Level.INFO, message);
    wroteLog = true;

    return wroteLog;
  }

}
