/* Team 5
 * James McQueen
 * Corwyn Simpson
 * May 1 ish, 2012
 */
package spider.ui;

import java.io.IOException;
import java.io.Writer;
import java.io.FileWriter;
import java.net.MalformedURLException;
import java.net.URL;

import spider.controllers.Parser;
import spider.controllers.ParserGroup;
import spider.controllers.Retriever;
import spider.controllers.RetrieverGroup;
import spider.stores.DataGatherer;
import spider.stores.LinkBuffer;
import spider.stores.PageBuffer;

/**
 * Starts up a Reporter GUI.
 * @author James McQueen
 * @author Corwyn Simpson
 */
public class Spider 
{
    public static void main(String[] the_args)
    {
        /**/
        Reporter report = new Reporter();
        report.start();
        /**
        try
        {
            batchTesting(15, 300);
        }
        catch (Exception e)
        {
            e.printStackTrace();
        }/**/
    }
    
    public static void batchTesting(int the_iterations, int the_page_count) 
            throws IOException, InterruptedException
    {
        Writer out = new FileWriter("batch-test.txt");
        DataGatherer data = new DataGatherer();
        PageBuffer pages = new PageBuffer(100);
        LinkBuffer links = new LinkBuffer();
        String[] keywords = ("intelligence,artificial,agent,university," +
        		"research,science,robot").split(",");
        ParserGroup parser_group;
        RetrieverGroup retriever_group;
        
        long total_time;
        
        
        for (int i = 0; i < the_iterations; i++)
        {
            //single-threaded
            data.clearResults();
            pages.clear();
            links.clear();
            parser_group = new ParserGroup(data, links, pages, keywords, 1);
            retriever_group = new RetrieverGroup(links, pages, the_page_count, 1);
            
            total_time = runSingleThreaded(the_page_count, pages, links,
                            parser_group, retriever_group, out);

            out.write("Single Threaded done getting "+ the_page_count +" pages" +
            		  " in "+ total_time +" ms\n");
            out.flush();
            
            for (int parse_count = 8; parse_count < 100; parse_count += 8)
            {
                for (int ret_count = 8; ret_count < 100; ret_count += 8)
                {
                    data.clearResults();
                    pages.clear();
                    links.clear();
                    links.addLink(new URL("http://faculty.washington.edu/gmobus/"));

                    parser_group = new ParserGroup(data, links, pages, keywords,
                                    parse_count);
                    retriever_group = new RetrieverGroup(links, pages, 
                                    the_page_count, ret_count);

                    long start_time = System.currentTimeMillis();
                    new Thread(retriever_group).start();
                    Thread parser_thread = new Thread(parser_group);
                    parser_thread.start();
                    parser_thread.join();
                    total_time = System.currentTimeMillis() - start_time;
                    
                    out.write(ret_count +" retriever, "+ parse_count +" parser"+
                              " Multi Threaded done getting "+ the_page_count +
                              " pages in "+ total_time +" ms\n");
                    out.flush();
                }       
            }
        }
        out.close();
    }

    private static long runSingleThreaded(int the_page_count,
                    PageBuffer the_pages, LinkBuffer the_links,
                    ParserGroup the_parser_group,
                    RetrieverGroup the_retriever_group, Writer out) throws IOException
    {
        long start_time;
        long total_time = 0;
        try 
        {
            the_links.addLink(new URL("http://faculty.washington.edu/gmobus/"));
            while (!the_links.isEmpty() && the_links.visitedCount() < the_page_count) 
            {
                start_time = System.currentTimeMillis();
                while (the_pages.isEmpty())
                {
                    //Retrieve a page and do retriever stuff.
                    Retriever retriever = new Retriever(the_retriever_group);
                    retriever.run();
                    the_retriever_group.storePage(retriever);
                }

                //Parse a page and do parser stuff
                Parser parser = new Parser(the_parser_group);
                parser.run();
                the_parser_group.gatherResults(parser);
                total_time = System.currentTimeMillis() - start_time;
            } 
        }
        catch (InterruptedException exl) 
        {
            System.err.println("BlockingQueue was interrupted");
        }
        catch (MalformedURLException ex)
        {
            out.write("Single Threaded testing failed with bad URL\n");
        }
        return total_time;
    }
}
