package org.nanhill.gecko;

import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

public class Spider implements Runnable {
	private static final Log logger = LogFactory.getLog(Spider.class);

	/** Config for the spider */
	private SpiderConfig config;

	/**
	 * Download queue. Thread safety: To access the queue, first synchronize on
	 * it.
	 */
	private DownloadQueue queue;

	/**
	 * Set of URLs downloaded or scheduled, so we don't download a URL more than
	 * once. Thread safety: To access the set, first synchronize on it.
	 */
	private Set<URL> urlsDownloadedOrScheduled;

	/**
	 * Set of URLs currently being downloaded by Spider threads. Thread safety:
	 * To access the set, first synchronize on it.
	 */
	private Set<URLToDownload> urlsDownloading;

	/**
	 * Number of downloads currently taking place. Thread safety: To modify this
	 * value, first synchronize on the download queue.
	 */
	private int downloadsInProgress;

	/** Whether the spider should quit */
	private volatile boolean quit;

	/** Count of running Spider threads. */
	private int running;

	/** Time we last checkpointed. */
	private long lastCheckpoint;

	public Spider(SpiderConfig config) {
		this.config = config;
		queue = new DownloadQueue(config);
		queue.queueURL(new URLToDownload(config.getStartLocation(), 0));
		urlsDownloadedOrScheduled = new HashSet<URL>();
		urlsDownloading = new HashSet<URLToDownload>();
		downloadsInProgress = 0;
		lastCheckpoint = 0;
	}

	public void start() {
		quit = false;
		running = 0;

		for (int i = 0; i < config.getSpiderThreads(); i++) {
			if (logger.isInfoEnabled()) {
				logger.info("Starting Spider thread");
			}
			Thread t = new Thread(this, "Spider-Thread-" + (i + 1));
			t.start();
			running++;
		}
	}

	public void stop() {
		quit = true;
	}

	public boolean isRunning() {
		return running == 0;
	}

	private void checkpointIfNeeded() {
		if (config.getCheckpointInterval() == 0) {
			return;
		}

		if (System.currentTimeMillis() - lastCheckpoint > config
				.getCheckpointInterval()) {
			synchronized (queue) {
				if (System.currentTimeMillis() - lastCheckpoint > config
						.getCheckpointInterval()) {
					writeCheckpoint();
					lastCheckpoint = System.currentTimeMillis();
				}
			}
		}
	}

	private void writeCheckpoint() {
		if (logger.isDebugEnabled()) {
			logger.debug("writeCheckpoint()");
		}
		try {
			FileOutputStream fos = new FileOutputStream(config
					.getCheckpointFile(), false);
			ObjectOutputStream oos = new ObjectOutputStream(fos);
			oos.writeObject(queue);
			oos.writeObject(urlsDownloading);
			oos.close();
		} catch (IOException ioe) {
			if (logger.isWarnEnabled()) {
				logger.warn("IO Exception attempting checkpoint: "
						+ ioe.getMessage(), ioe);
			}
		}
	}

	@SuppressWarnings("unchecked")
	public void readCheckpoint() {
		try {
			FileInputStream fis = new FileInputStream("spider.checkpoint");
			ObjectInputStream ois = new ObjectInputStream(fis);
			queue = (DownloadQueue) ois.readObject();
			urlsDownloading = (Set<URLToDownload>) ois.readObject();
			queue.queueURLs(urlsDownloading);
			urlsDownloading.clear();
		} catch (Exception e) {
			if (logger.isErrorEnabled()) {
				logger.error("Caught exception reading checkpoint: "
						+ e.getMessage(), e);
			}

		}
	}

	public void run() {
		HTMLParser htmlParser = new HTMLParser(config);
		URLGetter urlGetter = new URLGetter(config);

		while ((queueSize() > 0 || downloadsInProgress > 0) && quit == false) {
			checkpointIfNeeded();
			if (queueSize() == 0 && downloadsInProgress > 0) {
				// Wait for a download to finish before seeing if this thread
				// should stop
				try {
					Thread.sleep(config.getQueueCheckInterval());
				} catch (InterruptedException ignored) {
				}
				// Have another go at the loop
				continue;
			} else if (queueSize() == 0) {
				break;
			}
			URLToDownload nextURL;
			synchronized (queue) {
				nextURL = queue.getNextInQueue();
				if (nextURL == null) {
					continue;
				}
				downloadsInProgress++;
			}
			synchronized (urlsDownloading) {
				urlsDownloading.add(nextURL);
			}
			int newDepth = nextURL.getDepth() + 1;
			int maxDepth = config.getMaxDepth();
			synchronized (urlsDownloading) {
				urlsDownloading.remove(nextURL);
			}
			List newURLs = downloadURL(nextURL, urlGetter, htmlParser);

			newURLs = filterURLs(newURLs);

			ArrayList<URLToDownload> u2dsToQueue = new ArrayList<URLToDownload>();
			for (Iterator i = newURLs.iterator(); i.hasNext();) {
				URL u = (URL) i.next();
				// Download if not yet downloaded, and the new depth is less
				// than the maximum
				synchronized (urlsDownloadedOrScheduled) {
					if (!urlsDownloadedOrScheduled.contains(u)
							&& (maxDepth == 0 || newDepth <= maxDepth)) {
						u2dsToQueue.add(new URLToDownload(u, nextURL.getURL(),
								newDepth));
						urlsDownloadedOrScheduled.add(u);
					}
				}
			}
			synchronized (queue) {
				queue.queueURLs(u2dsToQueue);
				downloadsInProgress--;
			}
		}
		logger.info("Spider thread stopping");
		running--;
	}

	/**
	 * Get the size of the download queue in a thread-safe manner.
	 */
	private int queueSize() {
		synchronized (queue) {
			return queue.size();
		}
	}

	/**
	 * Get a URL, and return new URLs that are referenced from it.
	 * 
	 * @return A List of URL objects.
	 */
	private List downloadURL(URLToDownload url, URLGetter urlGetter,
			HTMLParser htmlParser) {
		if (logger.isDebugEnabled()) {
			logger.debug("downloadURL(" + url + ")");
		}

		// Bail out early if image and already on disk
		URLObject obj = new URLObject(url.getURL(), config);
		if (obj.existsOnDisk()) {
			if (config.refreshHTMLs() && (obj.isHTML() || obj.isXML())) {
				if (logger.isInfoEnabled())
					logger.info("Q: [" + queue + "] " + url);
				obj = urlGetter.getURL(url);
			} else if (config.refreshImages() && obj.isImage()) {
				if (logger.isInfoEnabled())
					logger.info("Q: [" + queue + "] " + url);
				obj = urlGetter.getURL(url);
			}
		} else {
			if (logger.isInfoEnabled())
				logger.info("Q: [" + queue + "] " + url);
			obj = urlGetter.getURL(url);
		}

		if (obj == null) {
			return new ArrayList();
		}

		if (!obj.existsOnDisk()) {
			obj.writeToFile();
		}

		if (obj.isHTML() || obj.isXML()) {
			return htmlParser.parseLinksInDocument(url.getURL(), obj
					.getStringContent());
		} else if (obj.isImage()) {
			return new ArrayList();
		} else {
			logger.warn("Unsupported content type received: "
					+ obj.getContentType());
			logger.info("URL was " + url);
			return new ArrayList();
		}
	}

	private List filterURLs(List URLs) {
		String match = config.getURLMatch();
		ArrayList<URL> retVal = new ArrayList<URL>();

		synchronized (urlsDownloadedOrScheduled) {
			for (Iterator i = URLs.iterator(); i.hasNext();) {
				URL u = (URL) i.next();
				if (urlsDownloadedOrScheduled.contains(u)) {
					continue;
				}

				String s = u.toExternalForm();
				if (s.indexOf(match) != -1) {
					retVal.add(u);
				}
			}
		}
		return retVal;
	}

}
