package multiThread;
/**
 * File: PageRetriever.java
 * TCSS 422 Team Project 1
 * Autumn 2009
 * Team Members: Darin Cyr, Kirk Leonard, John Patanian
 */

import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import java.util.Scanner;

/**
 * Retrieves web pages and stores them for later analysis by PageParser.
 * @author Darin Cyr
 * @author Kirk Leonard
 * @author John Patanian
 * @version Autumn 2009
 *
 */
public class ThreadedPageRetriever extends Thread
{

	private static final int MAXIMUM_CAPACITY = 100;
	private static final long SLEEP_TIME_IN_MS = 100;
  /**
   * Holds the list of URLs to retrieve.
   */
  private URLBucket my_url_list;
  
  /**
   * Holds the list of pages to cache in the buffer.
   */
  private Buffer<String> my_page_buffer;
  
 
  /**
  * Takes a reference to the urlList to pass to the parser, the current url we're
  * working with, and the pageCount (passed to parser to generate file names).
  * Calls the parser for the current page.
  * @param the_url_list The list of URLs to retrieve.
  * @param the_page_buffer The buffer of pages to parse
  */
  public ThreadedPageRetriever(final URLBucket the_url_list,
    final Buffer<String> the_page_buffer)
  {
    my_url_list = the_url_list;
    my_page_buffer = the_page_buffer;
  }
  
  /**
   * Retrieves an internet page from the url and adds to the page buffer.
   * @throws InterruptedException .
   */
  private void doRetrievePages()  throws InterruptedException
  {
    try
    {
      // As a consumer gets from the URL Bucket Buffer
      final URL url = my_url_list.get();
      
      if (url != null)
      {
        final URLConnection connection = url.openConnection();
        connection.connect();
        final Scanner sc = new Scanner(connection.getInputStream());
        
        final StringBuffer sb = new StringBuffer();
        
        //print the url as the first line
        sb.append(url.toString());
        
        while (sc.hasNextLine())
        {
          sb.append(sc.nextLine());
        }
        final String page_html = sb.toString();
        
        //And as a Producer Adds to the Page Buffer.
        my_page_buffer.add(page_html);
        //System.out.println(page_html);
      }
    }
    catch (final IOException e)
    {
      return;
    }
    catch (final InterruptedException e)
    {
      throw e;
    }
  }
  /**
   * Does the thread activities.
   */
  public void run()
  {
    while (my_url_list.getNumUnvisited() < MAXIMUM_CAPACITY)
    {
      try
      {
        // sleep for a randomly chosen time
        Thread.sleep((int) Math.random() * SLEEP_TIME_IN_MS); 
      }
      catch (final InterruptedException e)
      {
        return;
      }
      
      try
      {
        doRetrievePages();
      }
      catch (final InterruptedException e) 
      {
        return;
      }
    }
  }
  

}
