package controller;

import java.awt.Color;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.jsoup.nodes.Document;

import model.PageAnalyzer;
import model.PageParser;
import model.PageRetriever;
import view.CrawlFrame;

/**
 * Controls the web crawler program by tying together the
 * various classes. Holds Objects of the other classes. Receives
 * user input from and sends results to the GUI.
 * 
 * @author Thomas Nunn
 * @author David Everitt
 * @author Dexter Hu
 * @version TCSS 422 Operating Systems Winter 2014
 */
public class CrawlControl {

	/**
	 * The JFrame for the GUI.
	 */
	private CrawlFrame myCrawlFrame;

	/**
	 * The PageRetriever.
	 */
	private volatile PageRetriever myRetriever;

	/**
	 * The PageParser.
	 */
	private volatile PageParser myParser;

	/**
	 * The PageAnalyzer.
	 */
	private volatile PageAnalyzer myAnalyzer;

	/**
	 * A copy of the URLs from the PageParser.
	 */
	private List<String> myURLsCopy;

	/**
	 * Holds the time recorded when the user clicks the start button.
	 */
	long startTime;

	/**
	 * Holds the time recorded when the crawler is finished.
	 */
	long endTime;

	/**
	 * Formatter for displaying the crawler's results.
	 */
	final DecimalFormat myDF = new DecimalFormat("#.##");

	/**
	 * Holds the web crawler's result data for displaying.
	 */
	private String results = "";

	/**
	 * Represents the state of the web crawler, running or stopped.
	 */
	private volatile boolean isRunning1 = true;


	/**
	 * Constructor
	 */
	protected CrawlControl() {
		myCrawlFrame = new CrawlFrame();
		myRetriever = new PageRetriever();
		myParser = new PageParser();
		myAnalyzer = new PageAnalyzer();

		myURLsCopy = new ArrayList<>();

		myCrawlFrame.getCrawlPanel().getStartButton().addActionListener(new StartButtonListener());
		myCrawlFrame.getCrawlPanel().getStopButton().addActionListener(new StopButtonListener());
		myCrawlFrame.getCrawlPanel().getPagesButton().addActionListener(new PagesButtonListener());
		myCrawlFrame.getCrawlPanel().getURLButton().addActionListener(new URLButtonListener());
		myCrawlFrame.getCrawlPanel().getResetButton().addActionListener(new ResetButtonListener());
		myCrawlFrame.getCrawlPanel().getResultButton().addActionListener(new ResultButtonListener());
	}

	/**
	 * Gathers and displays the result data.
	 * 
	 * @param A message to display
	 */
	private void displayResults() {

		StringBuilder sb = new StringBuilder(); //the result output String

		sb.append("Pages Retrieved: " + myRetriever.getNumberVisited());
		sb.append("\nAverage words per page: " + myDF.format(myAnalyzer.getAvgWordsPerPage()));
		sb.append("\nAverage URLs per page: " + myParser.getAvgURLs());
		sb.append("\n\nKeyword	Avg.hits per page	Total hits\n");

		Map<String, Integer> keyStats = myAnalyzer.getKeyStats();
		for (Map.Entry<String, Integer> entry : keyStats.entrySet()) {
			int totalHits = entry.getValue();
			double avgHits = (double)totalHits / (double)myRetriever.getNumberVisited();
			sb.append(" " + entry.getKey() + "	 " + myDF.format(avgHits) + "		 " + totalHits + "\n");
		}

		sb.append("\nAvg. parse time per page: " + myDF.format((myParser.getAvgTimePerPage())/1000000000.0) + " sec");
		sb.append("\nTotal running time: " + myDF.format((endTime - startTime)/1000000000.0) + " sec");

		results = sb.toString();
		myCrawlFrame.getCrawlPanel().setOutputText(results);
	}

	/**
	 * Parse the user entered list of keywords into single Strings and
	 * load into ArrayList.
	 * 
	 * @param theKeys
	 * @return
	 */
	private List<String> parseKeysString(String theKeys) {
		return Arrays.asList(theKeys.split("\\s*,\\s*"));
	}

	/**
	 * A class for creating single-threaded web crawlers.
	 * @author nunnt
	 */
	private class singleThreadedCrawler implements Runnable {

		@Override
		public void run() {

			// get the user input
			String url = myCrawlFrame.getCrawlPanel().getURLField().getText();
			String keys = myCrawlFrame.getCrawlPanel().getKeysField().getText();

			if (myRetriever.validateURL(url)) {

				myParser.addURL(url);
				myAnalyzer.loadKeyWords(parseKeysString(keys));

				while (true) {

					for (String URL : myParser.getURLs()) {
						if (!myRetriever.getDocList().containsKey(URL)) {
							myRetriever.retrievePage(URL);
						}
						if (!isRunning1) {
							break;
						}
						if (System.nanoTime() - startTime > (double)18000000000.0) // stop program at 18 seconds
							myCrawlFrame.getCrawlPanel().getStopButton().doClick();
					}

					for (Map.Entry<String, Document> entry : myRetriever.getDocList().entrySet()) {
						myParser.parsePage(entry.getValue());
					}
					if (!isRunning1) {
						break;
					}
				}

			} else {
				myCrawlFrame.getCrawlPanel().setOutputText("Bad URL!\nPlease try again.");
			}

			myAnalyzer.analyzePage(myParser.getHTMLMap()); //start the analysis for URLs and keywords
			endTime = System.nanoTime(); //program is done, get time
			displayResults();
		}
	}

	/**
	 * A class for creating multi-threaded web crawlers.
	 * @author nunnt
	 */
	private class multiThreadedCrawler implements Runnable {

		@Override
		public void run() {

			// get the user input
			String url = myCrawlFrame.getCrawlPanel().getURLField().getText();
			String keys = myCrawlFrame.getCrawlPanel().getKeysField().getText();

			Thread retrieverThread = new Thread(myRetriever);
			Thread parserThread = new Thread(myParser);
			Thread analyzerThread = new Thread(myAnalyzer);

			if (myRetriever.validateURL(url)) {

				myParser.addURL(url);
				myAnalyzer.loadKeyWords(parseKeysString(keys));

				myRetriever.setParser(myParser);
				myParser.setRetriever(myRetriever);
				myAnalyzer.setParser(myParser);

				retrieverThread.start();
				analyzerThread.start();
				parserThread.start();


				while (myRetriever.isRunning() && myParser.isRunning() && myAnalyzer.isRunning()) {
					// threads are running....

					if (System.nanoTime() - startTime > (double)18000000000.0) {// stop program at 18 seconds
						myCrawlFrame.getCrawlPanel().getStopButton().doClick();
					}
				}

				endTime = System.nanoTime(); //program is done, get time
				displayResults();

			} else {
				myCrawlFrame.getCrawlPanel().setOutputText("Bad URL!\nPlease try again.");
			}
		}
	}

	/**
	 * Listener for the "Unleash the Crawler" button.
	 * 
	 * @author nunnt
	 */
	public class StartButtonListener implements ActionListener {

		@Override
		public void actionPerformed(ActionEvent arg0) {
			startTime = System.nanoTime(); //start the clock

			// check which thread version is selected and call appropriate method
			if (myCrawlFrame.getCrawlPanel().isSingleSelected()) {

				singleThreadedCrawler stc = new singleThreadedCrawler();
				Thread single = new Thread(stc);
				single.start();

			} else {

				multiThreadedCrawler mtc = new multiThreadedCrawler();
				Thread multi = new Thread(mtc);
				multi.start();
			}

			myCrawlFrame.getCrawlPanel().getStartButton().setEnabled(false);
			myCrawlFrame.getCrawlPanel().getStopButton().setEnabled(true);
			myCrawlFrame.getCrawlPanel().getStopButton().setBackground(Color.RED);
			myCrawlFrame.getCrawlPanel().getStopButton().setForeground(Color.YELLOW);

			myCrawlFrame.getCrawlPanel().setOutputText("" +
					"\n_______________________________________________________________" +
					"\n\nPRESS THE \"Stop the Crawler\" BUTTON WHEN YOU WANT THE WEB CRAWLER TO STOP\n" +
					"\n_______________________________________________________________" +
					"\n\nThe program will time out after 18 seconds.");
		}
	}

	/**
	 * Listener for the JButton that stops the web crawler.
	 * 
	 * @author nunnt
	 *
	 */
	public class StopButtonListener implements ActionListener {

		@Override
		public void actionPerformed(ActionEvent e) {

			//stop single thread
			isRunning1 = false;

			//stop multi-thread
			myRetriever.stopCrawler();
			myParser.stopCrawler();
			myAnalyzer.stopCrawler();

			myCrawlFrame.getCrawlPanel().getStopButton().setEnabled(false);
			myCrawlFrame.getCrawlPanel().getStopButton().setBackground(null);
			myCrawlFrame.getCrawlPanel().getPagesButton().setEnabled(true);
			myCrawlFrame.getCrawlPanel().getURLButton().setEnabled(true);
			myCrawlFrame.getCrawlPanel().getResetButton().setEnabled(true);
			myCrawlFrame.getCrawlPanel().getResultButton().setEnabled(true);
		}
	}

	/**
	 * Listener for the "Show URLs" button. Get the list of URLs
	 * that have been parsed and display.
	 * 
	 * @author Thomas
	 */
	public class URLButtonListener implements ActionListener {

		@Override
		public void actionPerformed(ActionEvent arg0) {

			myURLsCopy = myParser.getURLs();
			int count = 1;
			StringBuilder sb = new StringBuilder();
			sb.append("Unique URLs parsed from the crawled pages:\n\n");
			for (String s : myURLsCopy) {
				sb.append(count + ". " + s + "\n");
				count++;
			}
			myCrawlFrame.getCrawlPanel().setOutputText(sb.toString());
		}
	}

	/**
	 * Listener for the "Show Results" button. Display the crawler's results.
	 * 
	 * @author Thomas
	 */
	public class ResultButtonListener implements ActionListener {

		@Override
		public void actionPerformed(ActionEvent arg0) {
			myCrawlFrame.getCrawlPanel().setOutputText(results);
		}
	}

	/**
	 * Listener for the "Pages Processed" button. Display the crawler's results.
	 * 
	 * @author Thomas
	 */
	public class PagesButtonListener implements ActionListener {

		@Override
		public void actionPerformed(ActionEvent arg0) {


			int count = 1;
			StringBuilder sb = new StringBuilder();
			sb.append("Pages retrieved:\n\n");

			for (Map.Entry<String, Document> entry : myRetriever.getDocList().entrySet()) {
				sb.append(count + ". " + entry.getKey() + "\n");
				count++;
			}
			myCrawlFrame.getCrawlPanel().setOutputText(sb.toString());
		}
	}

	/**
	 * Listener for the "Reset Crawler" button. Resets all the fields and text areas.
	 * 
	 * @author Thomas
	 */
	public class ResetButtonListener implements ActionListener {

		@Override
		public void actionPerformed(ActionEvent arg0) {
			myURLsCopy.clear();
			myRetriever.clear();
			myParser.clear();
			myAnalyzer.clear();
			results = "";

			isRunning1 = true;

			myRetriever = new PageRetriever();
			myParser = new PageParser();
			myAnalyzer = new PageAnalyzer();

			myCrawlFrame.getCrawlPanel().setOutputText("");
			myCrawlFrame.getCrawlPanel().getStartButton().setEnabled(true);
			myCrawlFrame.getCrawlPanel().getStopButton().setEnabled(false);
			myCrawlFrame.getCrawlPanel().getPagesButton().setEnabled(false);
			myCrawlFrame.getCrawlPanel().getURLButton().setEnabled(false);
			myCrawlFrame.getCrawlPanel().getResetButton().setEnabled(false);
			myCrawlFrame.getCrawlPanel().getResultButton().setEnabled(false);
		}
	}
}
