package edu.hawaii.webspider;

//OptionsTip.java
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.Parser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.ParseException;
import java.util.regex.Pattern;
import java.util.logging.Logger;
import java.util.logging.Level;

/**
 * The main web dispatch class. Accepts command line arguments:
 * usage: WebSpiderDispatcher -totallinks url numberOfPages [-logging]
 *                          WebSpiderDispatcher -mostpopular url numberOfPages [-logging]
 * Invalid Option
 *  -h                   Print help for this application
 *  -logging             Show Logging
 *  -mostpopular arg1 arg2   Find the most popular page. Requires 2 arguments:
 *                       arg1=url, arg2=number of page to count
 *  -totallinks arg1 arg2   Count the total links from the given Url. Requires 2
 *                    arguments: arg1=url, arg2=number of page to count
 * Thank you for shopping at Walmart
 * @author kenglish
 *
 */
public class WebSpiderDispatcher {
  private static final String LOGGING_CLI_OPT = "logging";
  private static final String MOST_POPULAR_CLI_OPT = "mostpopular";
  private static final String TOTAL_LINKS_CLI_OPT = "totallinks";
  // make class singleton
  static Logger logger = Logger.getLogger("edu.hawaii.webspider");   
  /**
   * Constructor is private to make a singleton class.
   *
   */
  private WebSpiderDispatcher() {
  }
  /**
   * Runs the TotalLinks class with.
   * @param startUrl The url from which we will start crawling the web.
   * @param numberOfPagesToCrawl The number of pages to crawl.
   */
  public static void runTotalLinks(String startUrl, int numberOfPagesToCrawl) {
    TotalLinks totalLinks = new TotalLinks();
    totalLinks.setStartUrl(startUrl);
    totalLinks.getLinks(numberOfPagesToCrawl);
  }
  /**
   * Runs the TotalLinks class with.
   * @param startUrl The url from which we will start crawling the web.
   * @param numberOfPagesToCrawl The number of pages to crawl.
   */
  
  public static void runMostPopular(String startUrl, int numberOfPagesToCrawl) {
    MostPopular popularityCount = new MostPopular();
    popularityCount.setStartUrl(startUrl);
    popularityCount.getMostPopularLink(numberOfPagesToCrawl);
  }
  /**
   * Prints the Help for command line arguments.
   * @param options Command line options. 
   * @param header The header message.
   */
  public static void printHelp(Options options, String header) {
    String footer = "Thank you for shopping at Walmart";
    
    HelpFormatter f = new HelpFormatter();
    f.setArgName("url");
    f.printHelp("WebSpiderDispatcher -totallinks url numberOfPages [-logging]\n " +
        "WebSpiderDispatcher -mostpopular url numberOfPages [-logging]\n ",header, options,footer);
  }
  /**
   * The main method.
   * @param args Command Line arguments.
   */
  
  public static void main(String args[]) {
    
    
//    BasicConfigurator.configure(new ConsoleAppender(
//                new PatternLayout("%d{HH:mm:ss} %m \n")));
//    logger.setLevel(Level.FATAL);
    Logger.getLogger("edu.hawaii.webspider").setLevel(Level.FINEST);
    logger.info("My First loggin message.");
    Options options = new Options();
    String extraArgInfo = "Requires 2 arguments: arg1=url, arg2=number of page to count";
    options.addOption("h", false, "Print help for this application");
    Option option = new Option(TOTAL_LINKS_CLI_OPT, true, 
                          "Count the total links from the given Url. " + extraArgInfo);
    option.setArgs(2);
    options.addOption(option);
    option = new Option(MOST_POPULAR_CLI_OPT, true, "Find the most popular page. " + extraArgInfo);
    option.setArgs(2);
    options.addOption(option);
    options.addOption(LOGGING_CLI_OPT, false, "Show Logging");    
    try {
      // also see http://articles.techrepublic.com.com/5100-3513_11-5813561.html
      //  see http://commons.apache.org/cli/manual/index.html
      Parser p = new BasicParser();
      CommandLine cl = p.parse(options,args );
      String  programToRun = ""; 
      
      if (cl.hasOption(TOTAL_LINKS_CLI_OPT)) {
        programToRun = TOTAL_LINKS_CLI_OPT;
      } 
      else if (cl.hasOption(MOST_POPULAR_CLI_OPT) ) {
        programToRun = MOST_POPULAR_CLI_OPT;
      } 
      else {
        printHelp(options,"Invalid Option");
        System.out.println("Invaldi Options");
        return ;
//        System.exit(-1);        
      }
      
      if (cl.hasOption(LOGGING_CLI_OPT)) {
        logger.setLevel(Level.INFO);
        logger.info("Logging is enabled.");
      }
      if (cl.getOptionValues(programToRun).length != 2) {
        printHelp(options,"Invalid Option");
        System.exit(-1);           
      }
      String [] optionValues = cl.getOptionValues(programToRun);
      String urlArg = optionValues[0];
      String numberOfPagesArg = optionValues[1];
      
      if (!urlArg.startsWith("http://")) {
        // we try to help the user a bit.
         urlArg = "http://" + urlArg;
      }
      
      if (!WebAnalysisTool.isValidHostUrl(urlArg)) {
        printHelp(options,"Arg1 is an invalid url");
        System.exit(-1);
      }
      
      int numberOfPages = 0;
      if (Pattern.matches("^\\d*$", numberOfPagesArg) ) {
        numberOfPages = Integer.parseInt(numberOfPagesArg);
      } 
      else {
        printHelp(options,"Arg2 must be a number.");
        System.exit(-1);                  
      }
      System.out.println("Running...");
      if (programToRun.equals(TOTAL_LINKS_CLI_OPT)) {
        WebSpiderDispatcher.runTotalLinks(urlArg, numberOfPages);  
      } 
      else if (programToRun.equals(MOST_POPULAR_CLI_OPT)) {
        WebSpiderDispatcher.runMostPopular(urlArg, numberOfPages);
      }
      logger.info("Exiting");
    }
    catch (ParseException e) {
     printHelp(options,"Invalid options");
     System.out.println("this is main");
     System.exit(-1);
    }
    
  }
}
