/*
 * Winter 2014 
 * TCSS 422 - Computer Operating System
 * Project1 - Web Crawler
 */
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;

/**
 * 
 * @author mayuri
 * @version Winter 2014
 * 
 */
 
public class CrawlerApplication {

    //Enforcing a limit on the number of web pages that will be retrieved.
    //Application will stop as soon as this number is reached.
    private static int DEFAULT_MAX_PAGES_TO_BE_CRAWLED = 10;

    // This is the max running time of the crawler in milliseconds.
    private static long MAX_RUNNING_TIME =  100000;

    /**
     * The blocking queue used to store all the URLs int the form of Strings.
     * The PageRetriever will read this queue and retrieve urls one by one.
     * The PageParser will add all urls found in a parsed page to this queue.
     *
     * It is marked as volatile so that read/write operation is atomic
     */
    private volatile BlockingQueue<String> myUrlRetrievalQueue;

    /**
     * The blocking queue used to store all the web pages for parsing.
     * The PageRetriever will write the Page Content to this Queue.
     * The PageParser will read PageContent from this queue and parse.
     *
     * It is marked as volatile so that read/write operation is atomic
     */
    private volatile  BlockingQueue<PageContent> myPagesParserQueue;

    /**
     * This blocking queue is used to store all Parsed page content.
     * Page Parser will write to this queue.
     * Page Analyzer will read from this queue and process.
     */
    private volatile  BlockingQueue<ParsedPageContent> myPagesAnalyzerQueue;


    /**
     * The start url
     */
    private String startUrl;

    /**
     * Max number of pages to crawl
     */
    private int maxPagesToCrawl;
    

    //set of 10 Keywords to be searched in the crawled pages
    private List<String> keyWords = new ArrayList<String>();




    public CrawlerApplication(final String startUrl, int maxPagesToCrawl) {
          this.startUrl =  startUrl;
          this.maxPagesToCrawl = maxPagesToCrawl;

          this.myUrlRetrievalQueue    = new LinkedBlockingQueue<String>();
          this.myPagesParserQueue       = new LinkedBlockingQueue<PageContent>();
          this.myPagesAnalyzerQueue       = new LinkedBlockingQueue<ParsedPageContent>();
    }





    private  void startSingleThreaded(String startUrl) throws InterruptedException {

        long startTime = System.currentTimeMillis();
        long timeElapsed = 0;

        PageRetriever pageRetriever = new PageRetriever(startUrl, maxPagesToCrawl, myUrlRetrievalQueue, myPagesParserQueue);

        while (!myUrlRetrievalQueue.isEmpty() && timeElapsed < MAX_RUNNING_TIME)
        {

            // create a retriever
            System.out.println("Creating the page retriever...");
            pageRetriever.retrieve();

            // create a Parser
            System.out.println("Calling the page parser...");
            PageParser pageParser = new PageParser(myUrlRetrievalQueue, myPagesParserQueue, myPagesAnalyzerQueue);
            pageParser.parse();

            // create analyzer
            //TODO: we have to implement this.

            //print for debug
            System.out.println("Calling the page parser...");
            printAnalyzerQueue();

            timeElapsed = System.currentTimeMillis() -  startTime;

        }


    }

    private  void startMultiThreaded(String startUrl) throws InterruptedException {
        long startTime = System.currentTimeMillis();
        long timeElapsed = 0;

        PageRetriever pageRetriever = new PageRetriever(startUrl, maxPagesToCrawl, myUrlRetrievalQueue, myPagesParserQueue);

        while (!myUrlRetrievalQueue.isEmpty() && timeElapsed < MAX_RUNNING_TIME)
        {

            // create a retriever
            System.out.println("Creating the page retriever...");
            pageRetriever.retrieve();

            // create a Parser
            System.out.println("Calling the page parser...");
            Runnable pageParser = new PageParser(myUrlRetrievalQueue, myPagesParserQueue, myPagesAnalyzerQueue);
            Thread t = new Thread(pageParser);
            t.start();

            // create analyzer
            //TODO: we have to implement this.

            //print for debug
            System.out.println("Calling the page parser...");
            printAnalyzerQueue();

            timeElapsed = System.currentTimeMillis() -  startTime;

        }


    }

    private void printAnalyzerQueue() {

        while(!myPagesAnalyzerQueue.isEmpty()) {
            System.out.println(myPagesAnalyzerQueue.peek());
        }
    }


    public static void main(String[] args) throws InterruptedException{


        Scanner in = new Scanner(System.in);

        System.out.print("Please enter the start URL:");
        String startUrl = in.next();

        System.out.print("Please enter 1 for Single and 2 for multi threaded version:");
        int singleOrMulti = in.nextInt();

        CrawlerApplication crawlerApplication = new CrawlerApplication(startUrl, DEFAULT_MAX_PAGES_TO_BE_CRAWLED);
        if(singleOrMulti == 1) {

            crawlerApplication.startSingleThreaded(startUrl);
        } else {
            crawlerApplication.startMultiThreaded(startUrl);
        }


    }






}
