package traverser;

/**
 * TrieTraverser.java
 *
 * Extension of BFS traverser with two differences: 1) Uses a trie instead of a
 * queue to keep track of visited pages 2) Keeps track of search order and
 * prints it at the end
 *
 * Author: Dan Roberts Last Modified: 10/10/11
 */

public class TrieTraverser extends BasicTraverser {

	protected WikiTrie visited;
	protected ParentTrackQueue<String> toVisit;


	public TrieTraverser(String startPage, String endPage) {
		super(startPage, endPage);
		toVisit = new ParentTrackQueue<String>();
		visited = new WikiTrie();
	}

	// Print the path followed to get from start to end

	public void printPath() {
		if (!visited.contains(endPage)) {
			System.err.println("Error: traverse not yet run!");
			return;
		}

		System.out.println("Reverse search path:");
		Stack<String> path = visited.getSearchPath(endPage);
		int i = 0;
		for (String str : path) {
			System.out.println(i + ": " + str);
			i++;
		}
		System.out.println("Search path length: " + path.count());
	}

	public Stack<String> getPath() {
		if (!visited.contains(endPage)) {
			System.err.println("Error: traverse not yet run!");
			return null;
		}
		return visited.getSearchPath(endPage);

	}

	// Traverse wikipedia, starting at startPage and attempting to find endPage,
	// using a BFS.
	// returns the number of pages accessed prior to finding the target

	public int traverse() {
		return traverse(false);
	}

	public int traverse(boolean quiet) {
		int count = 0;
		if (endPage.equals(startPage)) {
			return count;
		}
		count++; // The first page counts
		toVisit.addParent(null);
		toVisit.push(startPage);

		// Idea: pop a page off the queue and add it to the visited list,
		// get all of the links from that page. If the target page
		// is among them, then stop; otherwise, add all the links
		// to the queue and repeat.
		String currentPage = "";
		while ((currentPage = toVisit.pop()) != null) {
			if (visited.contains(currentPage)) {
				continue;
			}

			visited.add(currentPage, toVisit.currentParent());
			CacheLinkProcessor lp = new CacheLinkProcessor(currentPage);
			lp.removeDuplicates();
			lp.knuthShuffle();

			if (!quiet) {
				System.out.println(count + ": Visiting page " + currentPage + " - "
						+ lp.count() + " links.");
			}

			if (lp.count() <= 0) {
				continue;
			}

			toVisit.addParent(currentPage);
			for (String link : lp.getLinks()) {
				if (link.equals(endPage)) {
					visited.add(endPage, currentPage);
					count++; // The target page counts
					if (!quiet) {
						System.out.println(count + ": Found target page " + endPage + ".");
					}
					return count;
				}
				toVisit.push(link);
			}
			count++;
		}

		// If we get here, either we have a bug or wikipedia has a closed loop.
		System.out.println("Looks like we exhausted wikipedia.");

		return -1;

	}

	// Test client: traverse from Robot to EMIEW (what's an emiew? good
	// question!)
	public static void main(String[] args) {
		String start = "http://en.wikipedia.org/wiki/Robot";
		String end = "http://en.wikipedia.org/wiki/EMIEW";
		if (args.length >= 2) {
			start = "http://en.wikipedia.org/wiki/" + args[0];
			end = "http://en.wikipedia.org/wiki/" + args[1];
		}

		/*
        if (args.length >= 3) {
            if (args[2].equals("-nc")) {
                CacheLinkProcessor.setUseCache(false);
            }
        }
        */

		TrieTraverser test = new TrieTraverser(start, end);
		System.out.println("Found target in " + test.traverse() + " steps.");
		test.printMetrics();
		test.printPath();
	}

}
