package edu.hawaii.webspider;

import java.util.LinkedList;
import java.util.List;

import java.util.logging.Logger;
import java.util.logging.Level;

import com.meterware.httpunit.GetMethodWebRequest;
import com.meterware.httpunit.WebConversation;
import com.meterware.httpunit.WebLink;
import com.meterware.httpunit.WebRequest;
import com.meterware.httpunit.WebResponse;

/**
 * This program takes in an URL and then produce the number of links found
 * on this page. It can also find the most popular page and logging is available.
 * The program is invoked by two command lines. An example to obtain the 
 * number of links without logging is:  
 *     ant jar 
 *     java -jar webspider -totallinks http://www.hackystat.org 100 
 * To find the most popular page instead, type -mostpopular instead of -totallinks.
 * To enable logging, type -logging as the last parameter
 * 
 * @author Ka Yee Leung
 */
public class WebSpiderExample {

  static int numOfPage = 0;
  /** Contains the starting URL for the crawl. */
  final String startUrl;
  /** Count and address of page is stored in each item. Used for finding the most popular page. */
  protected static final List<WebItem> countOfAllPages = new LinkedList<WebItem>();
  /** Stores all links accessed. */
  protected static final List<WebLink> allAccessedPages = new  LinkedList<WebLink>();
  /** Stores the URLs of all pages accessed. */
  protected static final List<String> allAccessedPagesURL = new LinkedList<String>();
  /** The Logger used for logging. */
  private static final Logger logger = Logger.getLogger("edu.hawaii.webspider");
  
  /**
   * A WebSpider crawls the web and returns info.
   * 
   * @param startUrl The home URL for the crawl.
   */
  public WebSpiderExample(String startUrl) {
    this.startUrl = startUrl;
  }

  /**
   * To get the total number of links.
   * 
   * @return number of lines
   * @throws Exception If errors occur while running the main method.
   */
    public int getNumLinks() throws Exception {
    //codes inspired by a classmate to handle exceptions 
    com.meterware.httpunit.HttpUnitOptions.setExceptionsThrownOnScriptError (false);
    com.meterware.httpunit.HttpUnitOptions.setExceptionsThrownOnErrorStatus(false);    
    com.meterware.httpunit.HttpUnitOptions.setScriptingEnabled(false);
    
    // create the conversation object which will maintain state for us
    WebConversation wc = new WebConversation();
    WebRequest request = new GetMethodWebRequest(this.startUrl);
    WebResponse response = wc.getResponse(request);
    return response.getLinks().length;
  }

  /**
   * A webItem stores the URL of the current page and the number of pages linked to it.
   * 
   * @param link The URL
   * @param linkCount Number of pages linked to the page
   */
  public static class WebItem {
    /** The link of web page. */  
    public String link;
    /** Store the number of link count. */
    public Integer linkCount;
  
    /**
    * Constructor for webItem.
    * 
    * @param link The link of the webItem
    */
    public WebItem(String link) {
      this.link = link;
      this.linkCount = 1;
    }
  }

  /**
   * Record the number of occurence of each page.
   * 
   * @param link The URL of the current link.
   */
  static void recordCounfOfpage(String link) {
   if (countOfAllPages.isEmpty()) {
      WebItem tempLink = new WebItem(link);
      countOfAllPages.add(tempLink);
    }
    else {
      Boolean matchFound = false;
      for (int j = 0; j < countOfAllPages.size(); j++) {
      if ((countOfAllPages.get(j).link).equals(link)) {
          countOfAllPages.get(j).linkCount++;
          matchFound = true;
        }
      }
      if (!(matchFound)) {
        WebItem tempLink1 = new WebItem(link);
        countOfAllPages.add(tempLink1);
      }
    }
  }
  
  /**
   * Compute the number of pages linked to the URL.
   * 
   * @param url Url
   * @param findMostPopular Boolean value to see if find most popular
   * @param withLog Boolean value for keeping log
   * @throws Exception or com.meterware.httpunit.ScriptException 
   *            if there is problem with retrieving the links
   * @throws NullPointerException if there is problem with the lists
   * @throws com.meterware.httpunit.ScriptException For script errors
   */
  static void webtotallinks(WebSpiderExample url, Boolean findMostPopular, Boolean withLog) 
    throws Exception, NullPointerException, com.meterware.httpunit.ScriptException {

    //record total number of links found in crawling the desired number of pages 
    int totalLinks = 0;
    //record total number of pages crawled so far
    int totalPageTraversed = 0; 
  
    WebConversation wc = new WebConversation();
    WebRequest request = new GetMethodWebRequest(url.startUrl);
    WebResponse response = wc.getResponse(request);
    WebLink[] pageLinks = response.getLinks();
      
    //Get the number of links of all links found at that URL.
    //If there's less than 100 links on that URL, go to the first link found on the page
    //and repeat the above until a total of 100 links has been traversed
    while (totalPageTraversed <= numOfPage) {
      for (int i = 0; ((i < pageLinks.length) && (totalPageTraversed <= numOfPage)); i++) {
        String Link = pageLinks[i].getRequest().getURL().toString();

        totalPageTraversed++;
        if (withLog) {
          logger.log(Level.INFO, "Status: retrieving " + Link);
        }
        //retrieve the file extension of a file link
        String typeOfLink = Link.substring(Link.length() - 3, Link.length());

        //test if the page is a pdf or rss file or if the page is invalid
        if (!(("pdf").equals(typeOfLink)) && !(("rss").equals(typeOfLink)) && 
            !(("zip").equals(typeOfLink))) {
        try {
          pageLinks[i].click();
        }     
        catch (Exception e) {
           if (withLog) {
             logger.log(Level.WARNING, Link + " is a dead link");
            }
          i = i + 2;
        }

        //record number of pages linked to each page visited
        recordCounfOfpage(Link);

        WebSpiderExample currentLink = 
          new WebSpiderExample(pageLinks[i].getRequest().getURL().toString());
        totalLinks = totalLinks + (currentLink.getNumLinks());
        if (withLog) {
          logger.log(Level.INFO, "Found " + currentLink.getNumLinks() + " links");
        } 

        //Check to see if the current page has already been retrieved
        if (!(allAccessedPages.isEmpty()) && !(allAccessedPagesURL.isEmpty())) {
          for (int j = 0; j < allAccessedPages.size(); j++) {
            //Check to see if the pages has already been added to the list
            if ((pageLinks[i].equals(allAccessedPages.get(j))) ||
               (pageLinks[i].getRequest().getURL().toString().equals(allAccessedPagesURL.get(j)))) {
              totalPageTraversed--;
              allAccessedPages.remove(j);
              allAccessedPagesURL.remove(j);
              totalLinks = totalLinks - currentLink.getNumLinks();
              j = allAccessedPages.size();
            }
          }
        }
        allAccessedPages.add(pageLinks[i]);
        allAccessedPagesURL.add(Link);
        }
      }
      if (allAccessedPages.size() > 1) {
        response = wc.getResponse(allAccessedPages.get(0).getRequest()); 
        allAccessedPages.remove(0);
      }
      else {
        response = wc.getResponse(allAccessedPages.get(0).getRequest());
      }
      pageLinks = response.getLinks();
    }

    //print out the result
    if ((findMostPopular)) {
      WebItem mostPopular = getMostPopular(withLog);
      System.out.println("The most popular website discovered is: " + 
           mostPopular.link + " with " + mostPopular.linkCount + " pages that point to it");
    }
    else {
      System.out.println("The total number of links discovered while crawling the first " +
        numOfPage + " pages accessable from " + url.startUrl + " is: " + totalLinks);
    }
  }
  
  /**
   * Find out the most popular page.
   * 
   * @param withLog For logging
   * @return mostPopular The most popular page 
   */
  public static WebItem getMostPopular(Boolean withLog) {
    Logger logger = Logger.getLogger("edu.hawaii.webspider");
    if (withLog) {
      logger.log(Level.INFO, "Status: getting the most popular link.");
    }
    WebItem mostPopular = countOfAllPages.get(0);
    for (int i = 1; i < countOfAllPages.size(); i++) {
      if (countOfAllPages.get(i).linkCount > mostPopular.linkCount) {
        mostPopular = countOfAllPages.get(i);
      }
    }
    return mostPopular;
  }
  
  /**
   * Retrieves the httpunit home page and counts the number of links on it.
   * 
   * @param args Ignored.
   * @throws Exception If problems occur.
   */
  public static void main(String[] args) throws Exception {
    Logger logger = Logger.getLogger("edu.hawaii.webspider");
    //check if invalid number parameters are entered
    if ((args.length < 2) || (args.length > 4)) {
      logger.warning( "Missing parameter(s). " +
          "Example: java -jar webspider.jar -totallinks http://www.httpunit.org 100");
      return;
    }
    else {
      WebSpiderExample URL = new WebSpiderExample(args[1]);
      //check if an invalid parameter is entered
      try {
        numOfPage = Integer.parseInt(args[2]);
      } 
      catch (Exception e) {
        System.out.println("Please enter the number of pages to crawl!");
      }
      if (args[0].equals("-totallinks")) {
        if ((args.length > 3) && (args[3].equals("-logging"))) {
          logger.log(Level.INFO, "Task: to compute total number of links");
          webtotallinks(URL, false, true);  
        }
        else {
          webtotallinks(URL, false, false);
        }
      }  
      else if (args[0].equals("-mostpopular")) {
        if ((args.length > 3) && (args[3].equals("-logging"))) {
          logger.log(Level.INFO, "Task: to find the most popular page");
          webtotallinks(URL, true, true);
        }
      else {
          webtotallinks(URL, true, false);
        }
      }
    }
  }
}


