package webSpiderUI;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Date;
import java.util.Map;
import java.util.Queue;
import java.util.Scanner;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import multiThreaded.PageBuffer;
import multiThreaded.PageParserThread;
import multiThreaded.PageRetrieverThread;
import multiThreaded.UrlBuffer;

import singleThreaded.DataGatherer;
import singleThreaded.PageParser;
import singleThreaded.PageRetriever;
import singleThreaded.Reporter;
import singleThreaded.UrlStorage;


/**
 * The web spider program.
 * @author DoubleS
 * @version 1.0
 */
public class WebSpider
{
  /**
   * Default length of the keywords string.
   */
  //private static String[] my_keywords = new String[10];
  private static String[] my_keywords =
      new String[] {"intelligence", "artificial", "agent", "university", "research",
          "science", "robot", "students"};
  
  /**
   * The default seed url.
   */
  private static String my_first_url = "http://faculty.washington.edu/gmobus/";
  
  /**
   * The limit of page to retrieve.
   */
  private static int my_limit_page = 500;
  
  /**
   * Multi-threaded or single-threaded.
   */
  private static boolean IS_MULTITHREAD = true;

  /**
   * Number of retriever threads.
   */
  private static int my_num_retriever = 1;
  
  /**
   * The main method of the web spider.
   * @param the_args The command line argument.
   */
  public static void main(final String[] the_args)
  {
    // UI

    //setURLinfo();

    if (IS_MULTITHREAD)
    {
      webSpiderMultiThread();      
    }
    else
    {
      webSpiderSingleThread();
    }
  }

  /**
   * Run the single-threaded web spider. 
   */
  public static void webSpiderSingleThread()
  {
    try
    {
      // clock initialize
      double totalTime = 0;
      double startTime = 0;
      double finishTime = 0;

      // initialize & construct
      PageRetriever the_retriever = new PageRetriever(my_first_url);
      PageParser the_parser =
          new PageParser(the_retriever.retrievePageSource(), the_retriever.getProtocol(),
                         the_retriever.getHost());
      UrlStorage the_url_storage = new UrlStorage();
      DataGatherer the_data = new DataGatherer(my_keywords);
      Date startDate = new Date();
      Date finishDate = new Date();
      Queue<String> the_links = null;
      Map<String, Integer> result;
      Reporter the_reporter = new Reporter();
      // initiate URL
      the_url_storage.addToUnvisitedPages(my_first_url);
      int counter = 0;
      String next_url;
      String visited_url;

      // while not terminate
      while (counter < my_limit_page && the_url_storage.hasUnvisitedPage())
      {
        // Start timer
        startDate = new Date();
        startTime = startDate.getTime();
        // pick URL
        next_url = the_url_storage.nextUnvisitedPage();
        the_url_storage.addToVisitedPages(next_url);
        // retrieve page
        the_retriever = new PageRetriever(next_url);
        visited_url = the_retriever.getUrl().toString();
        // parse page
        the_parser =
            new PageParser(the_retriever.retrievePageSource(), the_retriever.getProtocol(),
                           the_retriever.getHost());
        the_parser.parse();
        the_links = the_parser.getLinks();

        // add URL to the URL list
        while (!the_links.isEmpty())
        {

          final String temp_link = the_links.poll();
          while (the_links.remove(temp_link))
          {
            the_links.remove(temp_link);
          }

          if (!the_url_storage.isAlreadyVisited(temp_link))
          {
            the_url_storage.addToUnvisitedPages(temp_link);
          }
        }
        counter++;

        the_data.processWords(the_parser.getText());
        result = the_data.getMapKeywordHit();

        // Stop timer
        finishDate = new Date();
        finishTime = finishDate.getTime();
        totalTime += (finishTime - startTime);

        //System.out.println("Total links: " + the_url_storage.getTotalLinks());
        the_reporter =
            new Reporter(result.entrySet(), the_data.getPagesNumber(), the_data
                .getWordsNumber(), the_url_storage.getTotalLinks(), visited_url,
                         my_limit_page, totalTime);
        System.out.println(the_reporter.report());
      }
      
      System.out.println("Seed url: " + my_first_url);
      System.out.println(the_reporter.finalReport());
    }
    catch (MalformedURLException e)
    {
      System.err.println("Invalid link!");
    }
  }

  /**
   * Run the multi-threaded web spider.
   */
  public static void webSpiderMultiThread()
  {
    final UrlBuffer the_url_buffer = new UrlBuffer();
    final PageBuffer the_page_buffer = new PageBuffer();
    final Date startDate = new Date();

    // create new thread pool with four threads
    // ExecutorService webSpider = Executors.newFixedThreadPool(4);

    // This will do the pop-up threads.
    ExecutorService webSpider = Executors.newCachedThreadPool();

    try
    {
      URL the_url = new URL(my_first_url);

      the_url_buffer.addToUnvisitedPages(my_first_url);
      // Start the timer
      final double startTime = startDate.getTime();
      
      for (int i = 0; i < my_num_retriever; i++)
      {
        webSpider.execute(new Thread(new PageRetrieverThread(the_url_buffer, the_page_buffer,
                                                             my_limit_page)));
      }
      webSpider
          .execute(new Thread(new PageParserThread(the_url_buffer, the_page_buffer,
                                                   my_keywords, startTime, my_limit_page)));
      webSpider.shutdown();
    }
    catch (MalformedURLException e)
    {
      System.out.println("Invalid url! - " + my_first_url);
      e.printStackTrace();
    }
  }

  /**
   * Set seed URL, limited page, keywords.
   */
  public static void setURLinfo()
  {
    final Scanner inp = new Scanner(System.in);
    // ask page limit
    System.out.println("How many pages to parse?  ");
    my_limit_page = Integer.parseInt(inp.next());
    // ask keywords
    System.out.println("Enter keywords seperated by space?  ");
    try
    {
      my_keywords =
          (new BufferedReader(new InputStreamReader(System.in))).readLine().split(" ");

    }
    catch (IOException e1)
    {
      e1.printStackTrace();
    }
    System.out.println("Enter a url: ");
    URL a_url = null;
    try
    {
      a_url = new URL(inp.next());
      my_first_url = a_url.toString();
    }
    catch (final MalformedURLException e)
    {
      System.out.println("Bad URL" + e);
      System.out.println("Default URL:" + my_first_url);
    }

    // IS thread
    System.out.println("Multi-threaded?(Y/N)  ");
    final String temp_istread = inp.next();
    if (temp_istread.toLowerCase().contains("y"))
    {
      IS_MULTITHREAD = true;
      int temp;
      System.out.println("How many retrievers you want to use? (1-10)");
      temp = Integer.parseInt(inp.next());
      while (temp < 1 || temp > 10)
      {
        System.out.println("Sorry, invalid input.");
        System.out.println("Please re-enter the number of retrievers you want to use? (1-10)");
        temp = Integer.parseInt(inp.next());
      }
      my_num_retriever = temp;
    }
  }
}
