package indexer;

import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.List;
import java.util.Vector;

public class Spider extends Thread {
	public static List<String> GlobalURLsToVisit = new Vector<String>();
	public static List<String> GlobalVisitedURLs = new Vector<String>();

	private volatile Thread thread;

	private List<String> _visitedLinks = new Vector<String>();

	public Spider(int spiderIndex) {
		thread = new Thread();
		thread.setName("[Spider " + spiderIndex + "]");
		thread.start();
	}

	public void stopSpider() {
		Thread tmpThread = thread;
		thread = null;

		if (tmpThread != null) {
			tmpThread.interrupt();
		}
	}

	@Override
	public void run() {
		if (thread == null) {
			return; // stopped before started.
		}
		try {
			// all the run() method's code goes here
			// do some work

			Thread.yield(); // let another thread have some time perhaps to stop
							// this one.
			if (Thread.currentThread().isInterrupted()) {
				throw new InterruptedException("Stopped by ifInterruptedStop()");
			}
			// do some more work
		} catch (Throwable t) {
			// log/handle all errors here
		}
	}

	// / <summary>
	// / Takes a single Uri (Url) and returns the catalog that is generated
	// / by following all the links from that point.
	// / </summary>
	// / <remarks>
	// /This is the MAIN method of the indexing system.
	// / </remarks>
	public void BuildCatalog(Object threadID) {
		URI startPageUri;
		try {
			startPageUri = null;

			synchronized (GlobalURLsToVisit) {
				synchronized (GlobalVisitedURLs) {
					if (GlobalURLsToVisit.size() == 0) {
						stopSpider();
						return;
					} else {
						GlobalVisitedURLs.add(GlobalURLsToVisit.get(0));

						startPageUri = new URI(GlobalURLsToVisit.remove(0));
					}
				}
			}

			if (startPageUri != null) {
				resetLists();

				ProcessUri(startPageUri, 0);
			}
		} catch (Exception e) {
			// ProgressEvent(new ProgressEventArgs(e));
		}
	}

	private void resetLists() {
		_visitedLinks = new Vector<String>();
	}

	// / <summary>
	// /GETS THE FIRST DOCUMENT, AND STARTS THE SPIDER!
	// RECURSIVE CALL
	// / </summary>
	protected int ProcessUri(URI uri, int level) {
		if (level > Common.RecursionLimit /* || _localPagesCount > 10000 */
				|| CrawlingManager.ShouldStopThreads) {
			return Common.RecursionLimit;
		}

		String url = uri.toString();

		if (!_visitedLinks.contains(url)) {
			_visitedLinks.add(url);

			Document downloadDocument = Download(uri);
			if (null != downloadDocument) {
				downloadDocument.Parse();
			}

			// ### Loop through the 'local' links in the document and parse each
			// of them recursively ###
			if (null != downloadDocument
					&& null != downloadDocument.getLocalLinks()) { // only if
																	// the Robot
																	// meta says
																	// it's OK
				for (String link : downloadDocument.getLocalLinks()) {
					try {
						ProcessUri(new URL(downloadDocument.getUri().toURL(),
								link).toURI(), level + 1); // calls THIS method!
					} catch (Exception ex) {
						// ProgressEvent(new ProgressEventArgs(new
						// Exception(" new Uri(" + downloadDocument.Uri + ", " +
						// link + ") invalid : ", ex)));
					}
				}
			} // process local links
		} // robot allowed and not visited
		return level;
	}// ProcessUri

	// / <summary>
	// / Attempts to download the Uri and (based on it's MimeType) use the
	// DocumentFactory
	// / to get a Document subclass object that is able to parse the downloaded
	// data.
	// / </summary>
	private Document Download(URI uri) {
		URL url;
		try {
			url = uri.toURL();
		} catch (MalformedURLException e1) {
			e1.printStackTrace();
			return null;
		}
		Document htmldoc = null;

		HttpURLConnection connection = new HttpURLConnection(url) {

			@Override
			public void connect() throws IOException {
			}

			@Override
			public boolean usingProxy() {
				return false;
			}

			@Override
			public void disconnect() {
			}
		};

		// //Proxy instance, proxy ip = 10.0.0.1 with port 8080
		// Proxy proxy = new Proxy(Proxy.Type.HTTP, new
		// InetSocketAddress("10.0.0.1", 8080));
		// conn = new URL(urlString).openConnection(proxy);

		try {
			connection = (HttpURLConnection) url.openConnection();
			connection.setRequestMethod("GET");
			connection.setReadTimeout(3000);
			connection.setInstanceFollowRedirects(true);
			connection.setDefaultUseCaches(true);

			htmldoc = DocumentFactory.New(uri, connection.getContentType());
			htmldoc.GetResponse(connection);
		} catch (MalformedURLException e) {
			// the URL is not in a valid form
		} catch (IOException e) {
			System.err.println("Couldn't establish connection!");
		} finally {
			connection.disconnect();
			connection = null;
		}

		return htmldoc;
	}
}
