package main;

import java.net.MalformedURLException;
import java.util.LinkedList;
import java.util.List;
import java.util.Observable;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;

/**Spider is the controller for the Web Spider. It uses two blocking queues to act as a 
 * Buffer between getting and parsing the pages. 
 * @version 5-11-2012
 * @author T. Lewis & Ken Norton BitWiseGuys.
 * **/
public class Spider extends Observable implements Runnable {	
	private int my_RetrieverThreadCount;
	private int my_ParserThreadCount;
	private String my_SeedURL;
	private Queue<WebPage> pageBuffer;
	private Queue<String> pageToRetrieve;
	private Queue<List>my_analyticsQueue;
	private int PageLimit;
	public static int NumPages;
	private long startTime;
	private Validate validate;
	private String[] my_SeedList;
	public static int totalLinks;
	public static ConcurrentHashMap<String, Integer> freqMap;
	public int totalPagesParsed;
	public static int totalWords;
	private float endTime;
	private boolean my_isMultithreaded;

	/**
	 * Param constructor
	 * @throws throws a MalformedURLException on the seed URL: this can be ignored unless you enter your own.**/
	Spider(String the_SeedURL, String[] the_keyWordList, int the_RetrieverThreadCount, 
			int the_ParserThreadCount, int the_PageLimit, boolean isMultiThreaded) {
		startTime = System.nanoTime();
		Validate validate = new Validate();
		pageToRetrieve = new LinkedList<String>();
		pageBuffer = new LinkedList<WebPage>();
		freqMap = new ConcurrentHashMap<String, Integer>();
		my_analyticsQueue = new  LinkedList<List>();
		my_RetrieverThreadCount = the_RetrieverThreadCount; 
		my_ParserThreadCount =  the_ParserThreadCount;
		my_isMultithreaded = isMultiThreaded;

		my_SeedURL = the_SeedURL;
		PageLimit = the_PageLimit;
		my_SeedList = the_keyWordList;

		NumPages = 0;
		totalLinks = 0;
		totalWords = 0;
	}

	/**
	 * Default constructor
	 * @throws throws a MalformedURLException on the seed URL: this can be ignored.**/
	Spider() {
		pageToRetrieve = new LinkedList<String>();
		pageBuffer = new LinkedList<WebPage>();
		my_SeedURL = "http://faculty.washington.edu/gmobus/";

		my_RetrieverThreadCount = 1; 
		my_ParserThreadCount = 1;
		PageLimit = 50;
		startTime = System.nanoTime();
		Validate validate = new Validate();
		freqMap = new ConcurrentHashMap<String, Integer>();
		my_analyticsQueue = new  LinkedList<List>();

		NumPages = 0;
		totalLinks = 0;
		totalWords = 0;
	}
	/**
	 * Run creates 2 thread pools one to get the URLS and one to Parse them. The thread pools use the 
	 * Callable feature to return the URL or WebPage that has been done by the thread.
	 * 
	 * @throws MalformedURLException on the pages not to visit. this can be ignored. 
	 * ***/
	public void crawl() throws MalformedURLException{
		Validate.visit("http://questioneverything.typepad.com/");

		pageToRetrieve.offer(my_SeedURL);
		ExecutorService consumer = Executors.newFixedThreadPool(my_RetrieverThreadCount);
		ExecutorService producer = Executors.newFixedThreadPool(my_ParserThreadCount);
		getLinks(consumer);
		getPages(producer);

		while(NumPages< PageLimit) {
			int tempPgCount = 0;
			int tempLkCount = 0;

			tempPgCount = Math.min(pageBuffer.size(), my_ParserThreadCount);
			if (!pageBuffer.isEmpty()){
				for (int i = 0; i<tempPgCount; i++){
					getPages(producer);
				}
			}
			tempLkCount = Math.min(pageToRetrieve.size(), my_RetrieverThreadCount);

			if (pageBuffer.isEmpty()){
				for (int i = 0; i<tempLkCount; i++){
					getLinks(consumer);
				}
			}
		}
		producer.shutdown();
		consumer.shutdown();
		endTime = ((float) (System.nanoTime() - startTime))/1000000000;
	}

	/**Retriever
	 * The GetLinks function attemps to start as many threads as there are pages to retrieve 
	 * or until it reaches a maximum of pages to retrieve **/
	private void getLinks(ExecutorService consumer){

		long startParseTime = System.nanoTime();	

		Retriever retriever;
		synchronized (pageToRetrieve){
			retriever = new Retriever(pageToRetrieve.poll());
		}
		Future<WebPage> submit = consumer.submit(retriever);

		try {
			if (!submit.get().getURL().equals("Dummy")){
				synchronized (pageBuffer){
					pageBuffer.offer(submit.get(1000L,TimeUnit.MILLISECONDS));
				}
			}
		} catch (InterruptedException e) {

			Thread.yield();
		} catch (ExecutionException e) {

			e.getCause().toString();

		} catch (TimeoutException e) {

			e.printStackTrace();
		}
	}

	/**Parser
	 * GetPages attempts to create threads to fill the pool to retrieve the pages from the queue
	 * @throws InterruptedException ***/
	private void getPages(ExecutorService producer){

		WebPage tempPage = null;

		getdata();

		long startParseTime = System.nanoTime();	

		Parser my_parser;
		synchronized(pageBuffer){
			my_parser = new Parser(pageBuffer.poll(), my_SeedList);
		}
		Future<WebPage> parse_submit = producer.submit(my_parser);

		try {
			tempPage = parse_submit.get();

			synchronized(pageToRetrieve){
				pageToRetrieve.addAll(tempPage.getLinkList());
			}
		} catch (InterruptedException e) {
			System.out.print("InterruptedException: from Parser"+ e);

		} catch (ExecutionException e) {

			System.out.print("ExecutionException: from spider"+ e);
			e.printStackTrace();
		}
		Analytics var_Analytics = new Analytics(tempPage, this.my_SeedList,totalLinks, NumPages, totalWords , freqMap, startTime, PageLimit);
		List<String> var_AnalyticsList = var_Analytics.createReportList();

		synchronized (my_analyticsQueue) {
			my_analyticsQueue.offer(var_AnalyticsList);	
			setChanged();
			notifyObservers(var_AnalyticsList);
		}

		NumPages+=1;
	}

	public List<String> getAnalyticsList(){
		List<String> tempList;
		synchronized (my_analyticsQueue) {
			tempList = my_analyticsQueue.poll();
		}
		return tempList;
	}

	public boolean isEmptyAnalyticsQ(){
		return my_analyticsQueue.isEmpty();
	}

	public float getRunTime(){
		return endTime;
	}

	@Override
	public void run() {
		try {
			crawl();
			setChanged();
			notifyObservers(this);
		} catch (MalformedURLException e) {

		}
	}

	private void getdata(){
		if (!my_isMultithreaded){
			try {
				Thread.sleep(2000);
			} catch (InterruptedException e) {
				// TODO Auto-generated catch block
				//e.printStackTrace();
			}
		}
	}
}
