package traverser;

/**
 * BreadthFirstTraversal.java
 *
 * Proof-of-concept client for SIGART's wikicrawler project
 *
 * Traverses between two arbitrary wikipedia pages using a naive breadth-first graph search.
 *
 * Author: Dan Roberts
 * Modified: 9/19/11
 */

import java.util.ArrayList;

public class BasicTraverser implements Traverser {

	// Information about the traverse
	protected String startPage = "";
	protected String endPage = "";

	// List of sites to visit
	protected Queue<String> toVisit = null;

	// List of sites we've already visited
	// TODO: develop our own, more efficient data structure for the list of
	// already visited sites
	private ArrayList<String> visited = null;

	// Constructor
	// startPage and endPage should be complete URLs for wikipedia,
	// e.g. http://en.wikipedia.org/wiki/Robot
	public BasicTraverser(String startPage, String endPage) {
		this.startPage = startPage;
		this.endPage = endPage;

		toVisit = new Queue<String>();
		toVisit.push(startPage);

		visited = new ArrayList<String>();
	}

	public void printMetrics() {
		CacheLinkProcessor.printMetrics();
	}

	// Traverse wikipedia, starting at startPage and attempting to find endPage,
	// using a BFS.
	// returns the number of pages accessed prior to finding the target
	@Override
	public int traverse() {
		int count = 0;
		if (endPage.equals(startPage)) {
			return count;
		}
		count++; // The first page counts

		// Idea: pop a page off the queue and add it to the visited list,
		// get all of the links from that page. If the target page
		// is among them, then stop; otherwise, add all the links
		// to the queue and repeat.
		String currentPage = "";
		while ((currentPage = toVisit.pop()) != null) {
			if (visited.contains(currentPage)) {
				System.out.println("Visited contains " + currentPage);
				continue;
			}
			visited.add(currentPage);
			CacheLinkProcessor lp = new CacheLinkProcessor(currentPage, 123456);
			lp.removeDuplicates();
			lp.knuthShuffle();
			System.out.println(count + ": Visiting page " + currentPage + " - "
					+ lp.count() + " links.");
			for (String link : lp.getLinks()) {
				if (link.equals(endPage)) {
					count++; // The target page counts
					System.out.println(count + ": Found target page " + endPage
							+ ".");
					return count;
				}
				toVisit.push(link);
			}
			count++;
		}
		// If we get here, either we have a bug or wikipedia has a closed loop.
		System.out.println("Looks like we exhausted wikipedia.");
		return -1;
	}

	// Test client: traverse from Robot to EMIEW (what's an emiew? good
	// question!)
	public static void main(String[] args) {
		String start = "http://en.wikipedia.org/wiki/Robot";
		String end = "http://en.wikipedia.org/wiki/EMIEW";
		if (args.length >= 2) {
			start = "http://en.wikipedia.org/wiki/" + args[0];
			end = "http://en.wikipedia.org/wiki/" + args[1];
		}
		BasicTraverser test = new BasicTraverser(start, end);
		System.out.println("Found target in " + test.traverse() + " steps.");
		test.printMetrics();
	}

}
