package edu.hawaii.webspider;
import com.meterware.httpunit.GetMethodWebRequest;
import com.meterware.httpunit.WebConversation;
import com.meterware.httpunit.WebRequest;
import com.meterware.httpunit.WebResponse;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.Date;
import java.util.logging.Logger;

//import org.hackystat.utilities.logger.HackystatLogger;

/**
 * A web crawler class that uses HTTPUNIT to search through links and gather information
 * about them.  Has several command-line calling the WebSpider is as follows:
 * [-option] [URL] [depth] [logging]
 * 
 * [-option] : Either -totallinks or -mostpopular.  They provide different information 
 * about the links gathered.
 * [URL] : The URL to begin the search at.
 * [depth] : How many links to follow until stopping the web traversal.
 * [logging] : optional setting for -logging.
 * 
 * @author Ben Karsin
 *
 */

public class WebSpider {

  /**
   * List of all links.  Links are removed and followed until the depth bound is reached.
   */
  public List<WebSpiderPage> webLinks = new ArrayList<WebSpiderPage>();

  /**
   * TreeMap containing each link URL mapped to it's corresponding WebSpiderPage object.
   */
  public Map<String, WebSpiderPage> pageMap = new TreeMap<String, WebSpiderPage>();
  
  /**
   * Flag that is used to enable logging.
   */
  public static boolean logging = false;
  
  /**
   * Date object used to provide timestamps when logging is enabled.
   */
  public static Date timeObject = new Date();
  
  /**
   * Logger object used when logging is enabled.
   */
  public static Logger myLogger;
  
  
  /**
   * Constructor that takes the URL to begin the web crawler at and creates the initial page.
   * Adds the first page to all the necessary data structures.
   * @param startUrl the first page to be searched by the crawler.
   */
  public WebSpider(String startUrl) {
    WebSpiderPage startPage = new WebSpiderPage(startUrl);
    pageMap.put(startUrl, startPage);
    webLinks.add(startPage);
      
  }
  

  /**
   * Parse the command line and perform the specified operations.  Details are outlined 
   * in the class documentation.
   * @param args command line input, outlined in class header.
   * @throws Exception Exception thrown by HTTPUNIT.
   */
  public static void main(String[] args) throws Exception {

    WebSpider mySpider = new WebSpider(args[1]);
    int totalLinks = Integer.parseInt(args[2]);
    WebSpiderPage searchPage;
    int searchCount = 0;
    int allLinksFound = 0;
    int mostHits = 0;
    WebSpiderPage mostPopular = null;

    
    
    com.meterware.httpunit.HttpUnitOptions.setExceptionsThrownOnScriptError (false);
    com.meterware.httpunit.HttpUnitOptions.setExceptionsThrownOnErrorStatus(false);
    com.meterware.httpunit.HttpUnitOptions.setScriptingEnabled(false); 
/*    
    if (args[3].equals("-logging")) {
      myLogger = HackystatLogger.getLogger("webspider-bkarsin");
      HackystatLogger.setLoggingLevel(myLogger, "active");
      myLogger.info(timeObject.toString().substring(0, 15) + " - Beginning Logging");
      logging=true;
    }
*/
    searchPage = new WebSpiderPage(args[1]);

    if (!args[0].equals("-totallinks") && !args[0].equals("-mostpopular")) {
      System.out.println(args[0] + " is an invalid argument, command must be either" +
        " -totallinks or -mostpopular");
      return;
    }
    
    while (searchCount < totalLinks) {
      mySpider.addLinks(searchPage);
      searchPage = mySpider.getNextPage();
      searchCount++;
    }
    
    if (args[0].equals("-totallinks")) {
     
      for (WebSpiderPage page : mySpider.pageMap.values()) {
        allLinksFound += page.hits;
      }
      
      System.out.printf("Total links found from visiting %d pages, starting at "
          + args[1] + " = %d.\n", 
          searchCount, allLinksFound);
      
    }
    else if (args[0].equals("-mostpopular")) {

      for (WebSpiderPage page : mySpider.pageMap.values()) {
        if (page.hits > mostHits) {
          mostPopular = page;
          mostHits = page.hits;
        }
      }
      System.out.printf("After visiting %d sites, starting at " + args[1] + 
        ", the most popular site was: \n" +
        mostPopular.url + " with %d hits", searchCount, mostHits);
    }

  }

  
  /**
   * Gathers all links from the given page and adds them to the data structures.  If the link has
   * already been seen, it will be in the pageMap structure, where it will have its hit attribute
   * incremented.
   * @param page Should contain a valid URL and other attributes.
   * @throws Exception thrown by HTTPUNIT.
   */
  public void addLinks(WebSpiderPage page)  throws Exception {
    if (page == null) {
      return;
  }
//    if(logging == true) {
//      myLogger.info(timeObject.toString().substring(0, 0) + "Retrieving " + page.url);
//    }
    WebConversation myConv = new WebConversation();
    WebRequest request = new GetMethodWebRequest(page.url);
    WebResponse response;
    String tempUrl;
    WebSpiderPage tempPage;
    
    try {
      response = myConv.getResponse(request);
    }
    catch (Exception e) {
      return;
    }
 /*   
    if(logging == true) {
      myLogger.info(timeObject.toString().substring(0, 0) + "Found " + response.getLinks().length
         + " links.");
    }*/
    for (com.meterware.httpunit.WebLink link : response.getLinks()) {

      if (link.getURLString().contains("://")) {
        tempUrl = link.getURLString();
      }
      else {
        tempUrl = page.url + link.getURLString();        
      }
      
      if (this.pageMap.containsKey(tempUrl)) {
        tempPage = this.pageMap.remove(tempUrl);
      }
      else {
        tempPage = new WebSpiderPage(tempUrl);
      }
      tempPage.hits++;
      this.pageMap.put(tempPage.url, tempPage);
      this.webLinks.add(tempPage);
    }
    page.searched = true;
  }


  
  /**
   * Obtains the next page to be traversed for it's links.  It continues to search through the 
   * webLinks list until it finds a page that has yet to be searched.
   * @return the next un-searched page from webLinks.
   */
  public WebSpiderPage getNextPage() {
    WebSpiderPage nextPage = null;
    do {
      if (this.webLinks.size() == 0) {
        return null;
      }
      nextPage = this.webLinks.remove(0);
    } while(nextPage.searched);
    return nextPage;
  }
  

  /**
   * Determines the number of links on a given page and returns the total.
   * @param url of the page to be searched.
   * @return Number of links found on the given URL.
   * @throws Exception thrown by HTTPUNIT
   */
  public int getNumLinks(String url) throws Exception {
    // create the conversation object which will maintain state for us
    WebConversation wc = new WebConversation();
    WebRequest request = new GetMethodWebRequest(url);
    WebResponse response = wc.getResponse(request);
    return response.getLinks().length;
  }
 
  
}


