package multiThreaded;

import java.util.Queue;

import singleThreaded.PageParser;

/**
 * Page parser thread for the web spider.
 * @author DoubleS
 * @version 1.0
 */
public class PageParserThread implements Runnable
{
  /**
   * The url buffer.
   */
  private UrlBuffer my_url_buffer;
  
  /**
   * The web page buffer.
   */
  private PageBuffer my_page_buffer;
  
  /**
   * The max number of links need to visit.
   */
  private int my_limit;
  
  /**
   * The starting time of this web spider.
   */
  private final double my_start_time;
  
  /**
   * The counter of how many pages have been parsed.
   */
  private int my_counter;
  
  /**
   * The keywords.
   */
  private String[] my_keywords;
  
  /**
   * The data gatherer thread object.
   */
  private DataGathererThread my_data_gatherer;
  private boolean is_block;
  /**
   * Construct a PageRetrieverThread.
   * @param the_url_buffer The url buffer.
   * @param the_page_buffer The page buffer.
   * @param the_keywords The keywords.
   * @param the_start_time The starting time.
   */
  public PageParserThread(final UrlBuffer the_url_buffer, final PageBuffer the_page_buffer, 
                          final String[] the_keywords, final double the_start_time,
                          final int the_limit)
  {
    my_url_buffer = the_url_buffer;
    my_page_buffer = the_page_buffer;
    my_data_gatherer = new DataGathererThread(the_keywords, the_start_time, the_limit);
    my_limit = the_limit;
    my_start_time = the_start_time;
    my_counter = 0;
    my_keywords = the_keywords;
  }
  
  /**
   * Run the page parser and then create a data gatherer thread.
   */
  public void run()
  {
    is_block = false;
    //System.out.println("I'm in the Page Parser Thread");
    PageParser the_parser;
    WebPage the_page;
    Queue<String> the_links;
    String the_text;
    Thread data_gatherer_thread;
    
    // Retrieve the web page as long as the number of parsed pages doesn't
    // exceed my_limit
    while (my_counter < my_limit)
    {
      my_counter++;
      //System.out.println("I'm in the Page Parser Thread while loop");
      is_block = true;
      the_page = my_page_buffer.retrievePageFromBuffer();
      is_block = false;
      the_parser = new PageParser(the_page.getSource(), the_page.getURL().getProtocol(),
                                  the_page.getURL().getHost());
      the_parser.parse();
      the_links = the_parser.getLinks();
      
      if (!(my_url_buffer.getTotalLinks() > my_limit))
      {
        while (!the_links.isEmpty())
        {
          my_url_buffer.addToUnvisitedPages(the_links.poll());
        }
      }
      
      the_text = the_parser.getText();
      my_data_gatherer.updateDataGatherer(the_text, the_page.getURL().toString(),
                                          my_url_buffer.getTotalLinks(), my_counter);
 
      // Run the data gatherer thread.
      data_gatherer_thread = new Thread(my_data_gatherer);
      data_gatherer_thread.start();
      /*
      try
      {
        data_gatherer_thread.join();
      }
      catch (final InterruptedException e)
      {
        e.printStackTrace();
      }*/
    }
    my_data_gatherer.getReporter().MultiThreadTextReport();
  }
  /**
   * 
   * @return is_block
   */
  public boolean IsBlock()
  {
    return is_block;
  }
  /**
   * 
   */
  public DataGathererThread getDataGether()
  {
    return my_data_gatherer;
  }
}
