/*
 * TCSS 422 - Spring quarter 2011
 * Team: 	Googlers
 * Members: Deepa Sahni, Krisnil Gounder, Michael Schweiger
 * Date: 	April 25, 2011
 */
package main;

import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashSet;
import java.util.Scanner;
import java.util.Set;

import ui.ConsoleReporter;
import ui.Reporter;
import web.Gatherer2;
import web.PageParser;
import web.PageRetriever;
import buffers.Buffer;
import buffers.PageBuffer;
import buffers.URLBuffer;

public class Main {

	//*********************************************************************************
	//		Static constants
	//*********************************************************************************
	
	/**
	 * The max number of pages that this webcrawler will look for.
	 */
	public static final int MAX_PAGES = 5000;
	
	/**
	 * The max amount of milliseconds that this program will sit idle if not pages are
	 * in the buffer.
	 */
	public static final long MAX_IDLE_TIME = 12000;

	/**
	 * Help message for invalid arguments.
	 */
	public static final String HELP_MESSAGE = "Arguments available:\n\n"
		+ "<seed url>\n"
		+ "followed by the number of pages to load as a positive integeger 0-"
		+ MAX_PAGES + " (a negative number equals max)\n"
		+ "followed by -s or -m (must have exactly one): Single-threaded/Multi-threaded\n"
		+ "Followed by a list of 1 or more terms (space separated)"
		+ "\n\nEx: <name>.jar http://www.yahoo.com -s 10 news stories";
	
	/**
	 * The initial system time when this application started, in ms.
	 */
	public static final long INITIAL_TIME = System.currentTimeMillis();
	
	/**
	 * Maximum number of concurrent utility threads.
	 */
	public static final int MAX_UTIL_THREADS = 5;
	
	
	//*********************************************************************************
	//		Private fields
	//*********************************************************************************
	
	/**
	 * Determines whether or not this is multi-threaded.
	 */
	private final boolean isThreaded;
	
	/**
	 * The reporter object (console or GUI) that this application will use.
	 */
	private final Reporter my_reporter;
	
	/**
	 * The gatherer object used to analyze the terms.
	 */
	private final Gatherer2 my_gatherer;
	
	/**
	 * The URLBuffer that will be used to synchronize PageRetriever and PageParser threads.
	 */
	private final Buffer<URL> my_url_buffer = URLBuffer.getInstance();
	
	/**
	 * The PageBuffer that will be used to synchronize PageRetriever and PageParser
	 * threads.
	 */
	private final Buffer<String> my_page_buffer = PageBuffer.getInstance();
	
	/**
	 * The file to save logs to.
	 */
	private static File my_log;
	
	/**
	 * The count of pages that have been pulled out of the buffer to be retrieved/parsed.
	 */
	private static int pages_reported = 0;
	
	/**
	 * current number of concurrent parser threads.
	 */
	private static int parsers_active = 0;
	
	/**
	 * current number of concurrent retriever threads.
	 */
	private static int retrievers_active = 0;
	
	/**
	 * The current max pages to process.
	 */
	private static int current_max_pages = 0;
	
	
	//*********************************************************************************
	//		Program entry point
	//*********************************************************************************
	
	/**
	 * Facilitates the main loop that initiates all threads and keeps the program moving.
	 * @param the_args The system args, currently ignored.
	 */
	public static void main(String[] args) {
		//Deal with arguments or show help message appropriately.
		boolean threaded = false;
		Set<String> terms = new HashSet<String>();
		String seed = null;
		if (args.length < 4) {
			System.out.println(HELP_MESSAGE);
			System.exit(0);
		}
		else {
			seed = args[0];
			String arg1 = args[1];
			if (arg1.equalsIgnoreCase("-m")) {
				threaded = true;
				my_log = new File("spiderRunMT.txt");
			} else if (arg1.equalsIgnoreCase("-s")) {
				threaded = false;
				my_log = new File("spiderRunST.txt");
			} else {
				System.out.println(HELP_MESSAGE);
				System.exit(0);
			}
			int temp = Integer.parseInt(args[2]);
			if (temp < 0 || temp > MAX_PAGES) {
				current_max_pages = MAX_PAGES;
			} else {
				current_max_pages = temp;
			}
			for (int i = 3; i < args.length; i++) {
				terms.add(args[i]);
			}
		}
		//Initialize the seed URL
		URL seed_url = null;
		try {
			seed_url = new URL(seed);
		} catch (final MalformedURLException e) {
			System.err.println("SEED URL is invalid or connection error"
					+ ", program cannot start!\nTerminating.");
			System.exit(0);
		}
		if (seed == null) {
			throw new IllegalStateException("SEED url did not retrieve properly!");
		}
		
		//We will need a reporter, a gatherer, a URLBuffer,
		//and a PageBuffer to make the application work (PageBuffer and URLBuffer
		//are already fields in each instance of Main).
		//The PageRetrievers and PageParsers are one time use and will be created
		//as needed.
		Reporter reporter = null;
		try {
			reporter = new ConsoleReporter(my_log);
		} catch (IOException e) {
			System.err.println("Could not initialize log file, continuing without logging");
			reporter = new ConsoleReporter();
		}
		Gatherer2 gatherer = new Gatherer2(terms, reporter);
		
		Main app = new Main(seed_url, reporter, gatherer, threaded);
		if (threaded) {
			app.startMT();
		} else {
			app.startST();
		}
	}
	
	/**
	 * Constructor that takes the already checked and established seed URL.
	 * @param url The seed url.
	 */
	private Main(final URL seed_url, final Reporter the_reporter,
				 final Gatherer2 the_gatherer, final boolean threaded) {
		isThreaded = threaded;
		my_url_buffer.add(seed_url);
		my_reporter = the_reporter;
		my_gatherer = the_gatherer;
		if (isThreaded) {
			new Thread(my_gatherer, "Gatherer_thread").start();
			new Thread(my_reporter, "Reporter_thread").start();
		}
	}
	
	/**
	 * Starts the application loop with multi-threading.
	 */
	private void startMT() {
		int pages_retrieved = 0;
		int pages_processed = 0;
		while (pages_processed < current_max_pages) {
			if (!my_url_buffer.isEmpty() && pages_retrieved < current_max_pages) {
				if (getRetrieverCount() < MAX_UTIL_THREADS) {
					URL temp = my_url_buffer.remove();
					incRetrievers();
					new Thread(new PageRetriever(temp), "PageRetriever_thread").start();
				} else {  //if all retriever threads are used, wait till one frees.
					synchronized(Main.class) {
						try {
							Main.class.wait();
						} catch (InterruptedException e) {
							//interrupted
						}
					}
				}
			}
			if (!my_page_buffer.isEmpty() && getParserCount() < MAX_UTIL_THREADS) {
				incParsers();
				String temp = my_page_buffer.remove();
				if (temp.length() > 1) {	
					//Strip off the first token (url address)
					Scanner tokenizer = new Scanner(temp);
					String url = tokenizer.next();
					pages_processed++;
					new Thread(new PageParser(url, my_gatherer, temp), "PageParser_thread").start();
				} else {  //page was empty
					finishParser();  //a new thread wasn't actually created.
					pages_retrieved--;  //retrieve another page, hopefully it won't be empty.
				}
			} 
		}
		while (pages_reported < current_max_pages) {
			synchronized(Main.class) {
				try {
					Main.class.wait();
				} catch (InterruptedException e) {
					//interrupted
				}
			}
		}
			my_reporter.stop();
			my_gatherer.stop();
	}
	
	/**
	 * Starts the application loop without multi-threading.
	 */
	private void startST() {
		int pages_retrieved = 0;
		while (pages_reported < current_max_pages) {
			if (!my_url_buffer.isEmpty() && pages_retrieved < current_max_pages) {
				URL temp = my_url_buffer.remove();
				new PageRetriever(temp).run();
			}
			if (!my_page_buffer.isEmpty()) {
				String temp = my_page_buffer.remove();
				if (temp.length() > 1) {	
					//Strip off the first token (url address)
					Scanner tokenizer = new Scanner(temp);
					String url = tokenizer.next();
					temp = temp.substring(url.length(), temp.length());  //cut out the url.
					new PageParser(url, my_gatherer, temp).run();
					my_gatherer.process_next_page();  //work through the page added.
					my_reporter.display();
				}
			} 
		}
	}
	
	/**
	 * Returns whether this application is currently multi-threaded.
	 * @return True if multi-threaded, false otherwise.
	 */
	public boolean isThreaded() {
		return isThreaded;
	}
	
	/**
	 * Increases the count of reported pages.
	 */
	public synchronized static void increaseReportedCount() {
		pages_reported++;
		synchronized(Main.class) {
			try {
				Main.class.notify();
			} catch (IllegalMonitorStateException e) {
				
			}
		}
	}
	
	/**
	 * @return the count of reported pages.
	 */
	public synchronized static int getReportedCount() {
		return pages_reported;
	}
	
	/**
	 * Decrease the count of active parser threads. 
	 */
	public synchronized static void finishParser() {
		parsers_active--;
		synchronized(Main.class) {
			try {
				Main.class.notify();
			} catch (IllegalMonitorStateException e) {
				
			}
		}
	}
	
	/**
	 * @return The count of active parsers.
	 */
	public synchronized static int getParserCount() {
		return parsers_active;
	}
	
	/**
	 * Increase the count of active parser threads.
	 */
	public synchronized static void incParsers() {
		parsers_active++;
	}
	
	/**
	 * Decrease the count of active retriever threads.
	 */
	public synchronized static void finishRetriever() {
		retrievers_active--;
		synchronized(Main.class) {
			try {
				Main.class.notify();
			} catch (IllegalMonitorStateException e) {
				
			}
		}
	}
	
	/**
	 * @return the count of active retriever threads.
	 */
	public synchronized static int getRetrieverCount() {
		return retrievers_active;
	}
	
	/**
	 * Increases the count of active retriever threads.
	 */
	public synchronized static void incRetrievers() {
		retrievers_active++;
	}
}
