package model;

import java.awt.Cursor;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.swing.DefaultListModel;
import javax.swing.JLabel;
import javax.swing.JOptionPane;

import view.SearchTab;

/**
 * Webcrawler component. This is a very basic crawler, and the algorithms were
 * to a large extent gathered from and based on the chapter
 * "Crawling the Web with Java" from the book "The Art of Java".
 * 
 * @author Audun
 * 
 */
public class WebCrawler {
	private boolean running;
	private SearchTab searchTab;
	private ReadWriter readWriter;
	private String websiteToCrawl;

	public WebCrawler(SearchTab searchTab) {
		this.searchTab = searchTab;
		websiteToCrawl = "http://www.nlm.nih.gov/";
	}

	/**
	 * Handles search requests
	 */
	public void handleSearchRequest() {
		if (isRunning()) {
			setRunning(false);
			return;
		}

		ArrayList errorList = new ArrayList();
		int searchSize = 0;
		String maxSize = ((String) searchTab.getSearchSizeComboBox()
				.getSelectedItem()).trim();

		if (maxSize.length() > 0) {
			try {
				searchSize = Integer.parseInt(maxSize);
			} catch (NumberFormatException e) {
			}
			if (searchSize < 1) {
				errorList.add("FAIL format for max search size");
			}
		}
		String file = System.getProperty("user.dir")
				+ System.getProperty("file.separator") + "crawler.log";
		String logFile = file.trim();
		if (logFile.length() < 1) {
			errorList.add("Can't find logfile");
		}

		String search = searchTab.getSearchField().getText().trim();
		if (search.length() < 1) {
			errorList.add("Can't find search input");
		}

		if (errorList.size() > 0) {
			StringBuffer failList = new StringBuffer();
			for (int i = 0; i < errorList.size(); i++) {
				failList.append(errorList.get(i));
				if (i + 1 < errorList.size()) {
					failList.append("\n");
				}
			}
			displayFails(failList.toString());
			return;
		}

		websiteToCrawl = removeWWW(websiteToCrawl);
		search(logFile, websiteToCrawl, searchSize, search);

	}

	/**
	 * Further handles search request, by disabling / enabling GUI controls and telling the crawl method to crawl.
	 * @param logFile
	 * @param startUrl
	 * @param maxUrls
	 * @param searchString
	 */
	public void search(final String logFile, final String startUrl,
			final int maxUrls, final String searchString) {
		Thread thread = new Thread(new Runnable() {

			public void run() {
				// Changes gui to busy
				searchTab.setCursor(Cursor
						.getPredefinedCursor(Cursor.WAIT_CURSOR));
				searchTab.getSearchSizeComboBox().setEnabled(false);
				searchTab.getSearchField().setEnabled(false);
				searchTab.getSearchButton().setText("Stop");

				updateStatus(websiteToCrawl, 0, 0, maxUrls);
				readWriter = new ReadWriter(logFile);
				readWriter.matchLogFile();
				running = true;

				Boolean testCaseSensitive = searchTab.getCaseSensitive()
						.isSelected();
				crawl(websiteToCrawl, maxUrls, searchString, testCaseSensitive);

				running = false;
				readWriter.close();

				// Changes gui to ready
				searchTab.setCursor(Cursor.getDefaultCursor());
				searchTab.getSearchSizeComboBox().setEnabled(true);
				searchTab.getSearchField().setEnabled(true);
				searchTab.getSearchButton().setText("Search");

				if (searchTab.getModel().isEmpty()) {
					JOptionPane.showMessageDialog(searchTab,
							"0 Results found for your search.",
							"No results found", JOptionPane.WARNING_MESSAGE);
				}
			}
		});
		thread.start();
	}

	/**
	 * Shows a popup window with one or more errors
	 * @param message
	 */
	public void displayFails(String message) {
		JOptionPane.showMessageDialog(searchTab, message, "Error",
				JOptionPane.ERROR_MESSAGE);
	}

	/**
	 * Adds search matches to the gui and to the logfile
	 * @param url
	 */
	private void addMatch(String url) {
		DefaultListModel model = (DefaultListModel) searchTab.getModel();
		model.addElement(url);
		readWriter.printToLog(url);
	}

	/**
	 * Checks if the URL is a proper URL
	 * @param url
	 * @return
	 */
	public URL checkIfProperURL(String url) {
		if (!url.toLowerCase().startsWith("http://"))
			return null;
		URL testedUrl = null;
		try {
			testedUrl = new URL(url);
		} catch (Exception e) {
			return null;
		}
		return testedUrl;
	}

	/**
	 * Checks if the website has implemented a robots.txt file restricting the crawlers access
	 * @param urlToCheck
	 * @return
	 */
	private boolean isRobotAllowed(URL urlToCheck) {
		String host = urlToCheck.getHost().toLowerCase();
		ArrayList disallowList = (ArrayList) searchTab.getDisallowListCache()
				.get(host);

		if (disallowList == null) {
			disallowList = new ArrayList();
			try {
				URL robotsFileUrl = new URL("http://" + host + "/robots.txt");
				BufferedReader reader = new BufferedReader(
						new InputStreamReader(robotsFileUrl.openStream()));
				String line;
				while ((line = reader.readLine()) != null) {
					if (line.indexOf("Disallow:") == 0) {
						String disallowPath = line.substring("Disallow:"
								.length());
						int commentIndex = disallowPath.indexOf("#");
						if (commentIndex != -1) {
							disallowPath = disallowPath.substring(0,
									commentIndex);
						}
						disallowPath = disallowPath.trim();
						disallowList.add(disallowPath);
					}
				}
				searchTab.getDisallowListCache().put(host, disallowList);
			} catch (Exception e) {
				return true;
			}
		}

		String file = urlToCheck.getFile();
		for (int i = 0; i < disallowList.size(); i++) {
			String disallow = (String) disallowList.get(i);
			if (file.startsWith(disallow)) {
				return false;
			}
		}
		return true;
	}

	/**
	 * Downloads websites
	 * @param pageUrl
	 * @return
	 */
	private String downloader(URL pageUrl) {
		try {
			BufferedReader reader = new BufferedReader(new InputStreamReader(
					pageUrl.openStream()));
			String line;
			StringBuffer pageBuffer = new StringBuffer();
			while ((line = reader.readLine()) != null) {
				pageBuffer.append(line);
			}
			return pageBuffer.toString();
		} catch (Exception e) {
		}
		return null;
	}

	/**
	 * Removes www from any url
	 */
	public String removeWWW(String url) {
		int index = url.indexOf("://www.");
		if (index != -1) {
			return url.substring(0, index + 3) + url.substring(index + 7);
		}
		return (url);
	}

	/**
	 * Method for getting links from a webpage
	 * @param pageUrl
	 * @param pageContents
	 * @param crawledList
	 * @return
	 */
	private ArrayList retrieveLinks(URL pageUrl, String pageContents,
			HashSet crawledList) {
		Pattern p = Pattern.compile("<a\\s+href\\s*=\\s*\"?(.*?)[\"|>]",
				Pattern.CASE_INSENSITIVE);
		Matcher m = p.matcher(pageContents);
		// Create list of link matches.
		ArrayList linkList = new ArrayList();
		while (m.find()) {
			String link = m.group(1).trim();
			// Skip empty links.
			if (link.length() < 1) {
				continue;
			}
			// Skip links that are just page anchors.
			if (link.charAt(0) == '#') {
				continue;
			}
			// Skip mailto links.
			if (link.indexOf("mailto:") != -1) {
				continue;
			}
			// Skip JavaScript links.
			if (link.toLowerCase().indexOf("javascript") != -1) {
				continue;
			}
			// Prefix absolute and relative URLs if necessary.
			if (link.indexOf("://") == -1) {
				// Handle absolute URLs.
				if (link.charAt(0) == '/') {
					link = "http://" + pageUrl.getHost() + link;
					// Handle relative URLs.
				} else {
					String file = pageUrl.getFile();
					if (file.indexOf('/') == -1) {
						link = "http://" + pageUrl.getHost() + "/" + link;
					} else {
						String path = file.substring(0,
								file.lastIndexOf('/') + 1);
						link = "http://" + pageUrl.getHost() + path + link;
					}
				}
			}
			// Remove anchors from link.
			int index = link.indexOf('#');
			if (index != -1) {
				link = link.substring(0, index);
			}
			// Remove leading "www" from URL's host if present.
			link = removeWWW(link);
			// Verify link and skip if invalid.
			URL verifiedLink = checkIfProperURL(link);
			if (verifiedLink == null) {
				continue;
			}
			/*
			 * If specified, limit links to those having the same host as the
			 * start URL.
			 */

			// Skip link if it has already been crawled.
			if (crawledList.contains(link)) {
				continue;
			}
			// Add link to list.
			linkList.add(link);
		}
		return (linkList);
	}

	/**
	 * Updates the GUI status label with search information
	 * @param crawling
	 * @param crawled
	 * @param toCrawl
	 * @param maxUrls
	 */
	public void updateStatus(String crawling, int crawled, int toCrawl,
			int maxUrls) {
		JLabel statusLabel = searchTab.getStatusLabel();
		String searchSizeString = (String) searchTab.getSearchSizeComboBox()
				.getModel().getSelectedItem();
		int searchSize = Integer.parseInt(searchSizeString);
		statusLabel.setText("Crawling website: " + crawling + ", " + crawled
				+ " of " + searchSize + " links visited"
				+ ", Unexplored links: " + toCrawl);
	}

	/*
	 * Determine whether or not search string is matched in the given page
	 * contents.
	 */
	private boolean searchStringMatches(String pageContents,
			String searchString, boolean caseSensitive) {
		String searchContents = pageContents;
		/*
		 * If case-sensitive search, lowercase page contents for comparison.
		 */
		if (!caseSensitive) {
			searchContents = pageContents.toLowerCase();
		}
		// Split search string into individual terms.
		Pattern p = Pattern.compile("[\\s]+");
		String[] terms = p.split(searchString);
		// Check to see if each term matches.
		for (int i = 0; i < terms.length; i++) {
			if (caseSensitive) {
				if (searchContents.indexOf(terms[i]) == -1) {
					return false;
				}
			} else {
				if (searchContents.indexOf(terms[i].toLowerCase()) == -1) {
					return false;
				}
			}
		}
		return true;
	}

	// Perform the actual crawling, searching for the search string.
	public void crawl(String website, int maxUrls, String searchString,
			boolean caseSensitive) {
		// Set up crawl lists.
		HashSet crawledList = new HashSet();
		LinkedHashSet toCrawlList = new LinkedHashSet();
		// Add start URL to the to crawl list.
		toCrawlList.add(website);
		/*
		 * Perform actual crawling by looping through the To Crawl list.
		 */
		while (running && toCrawlList.size() > 0) {
			/*
			 * Check to see if the max URL count has been reached, if it was
			 * specified.
			 */
			if (maxUrls != -1) {
				if (crawledList.size() == maxUrls) {
					break;
				}
			}
			// Get URL at bottom of the list.
			String url = (String) toCrawlList.iterator().next();
			// Remove URL from the To Crawl list.
			toCrawlList.remove(url);
			// Convert string url to URL object.
			URL verifiedUrl = checkIfProperURL(url);
			// Skip URL if robots are not allowed to access it.
			if (!isRobotAllowed(verifiedUrl)) {
				continue;
			}
			updateStatus(url, crawledList.size(), toCrawlList.size(), maxUrls);

			// Add page to the crawled list.
			crawledList.add(url);
			// Download the page at the given URL.
			String pageContents = downloader(verifiedUrl);
			/*
			 * If the page was downloaded successfully, retrieve all its links
			 * and then see if it contains the search string.
			 */
			if (pageContents != null && pageContents.length() > 0) {
				// Retrieve list of valid links from page.
				ArrayList links = retrieveLinks(verifiedUrl, pageContents,
						crawledList);
				// Add links to the To Crawl list.
				toCrawlList.addAll(links);
				/*
				 * Check if search string is present in page, and if so, record
				 * a match.
				 */
				if (searchStringMatches(pageContents, searchString,
						caseSensitive)) {
					addMatch(url);
				}
			}
			updateStatus(url, crawledList.size(), toCrawlList.size(), maxUrls);
			// System.out.println(toCrawlList.size());
		}
	}

	public boolean isRunning() {
		return running;
	}

	public void setRunning(boolean crawling) {
		this.running = crawling;
	}

	public SearchTab getSearchTab() {
		return searchTab;
	}

	public void setSearchTab(SearchTab searchTab) {
		this.searchTab = searchTab;
	}
}
