package traverser;

/**
 * BreadthFirstTraversal.java
 * 
 * Proof-of-concept client for SIGART's wikicrawler project
 * 
 * Traverses between two arbitrary wikipedia pages using a breadth-first graph
 * search.
 * 
 * Author: Modified: 10/02/11
 */

// WATCH OUT: It is not working correctly

public class BreadthFirstLastTraversal implements Traverser {

	// Information about the traverse

	private String startPage = "";

	private String endPage = "";
	// List of sites to visit from start
	private ParentTrackQueue<String> front_toVisit = null;
	// List of sites to visit from target
	private ParentTrackQueue<String> back_toVisit = null;

	// List of sites we've already visited
	private WikiTrie front_visited = null;
	private WikiTrie back_visited = null;
	private WikiTrie back_uniLinks = null;

	// Constructor
	// startPage and endPage should be complete URLs for wikipedia,
	// e.g. http://en.wikipedia.org/wiki/Robot
	public BreadthFirstLastTraversal(String startPage, String endPage) {
		this.startPage = startPage;
		this.endPage = endPage;

		front_toVisit = new ParentTrackQueue<String>();
		back_toVisit = new ParentTrackQueue<String>();

		front_toVisit.addParent(null);
		front_toVisit.push(startPage);

		back_toVisit.addParent(null);
		back_toVisit.push(endPage);

		front_visited = new WikiTrie();
		back_visited = new WikiTrie();
		back_uniLinks = new WikiTrie();
	}

	public void printMetrics() {
		LinkProcessor.printMetrics();
	}

	// Traverse wikipedia, starting at startPage and attempting to find endPage,
	// using a BFS.
	// returns the number of pages accessed prior to finding the target
	@Override
	public int traverse() {
		int front_count = 0;
		int back_count = 0;
		if (endPage.equals(startPage)) {
			return 0;
		}
		front_count++; // The first page counts
		back_count++;

		// Idea: pop a page off the queue and add it to the visited list,
		// get all of the links from that page. If the target page
		// is among them, then stop; otherwise, add all the links
		// to the queue and repeat.
		String front_currentPage = "";
		String back_currentPage = "";

		while ((front_currentPage = front_toVisit.pop()) != null
				&& (back_currentPage = back_toVisit.pop()) != null) {
			// Visit the page on the front
			if (!front_visited.contains(front_currentPage)) {
				front_visited.add(front_currentPage,
						front_toVisit.currentParent());

				// Scrape the links from the page and preprocess them
				CacheLinkProcessor front_lp = new CacheLinkProcessor(
						front_currentPage);
				front_lp.removeDuplicates();
				front_lp.knuthShuffle();

				System.out.println(front_count + ": Front Visiting page "
						+ front_currentPage + " - " + front_lp.count()
						+ " links.");

				// Check the links against target match, and against backsolver
				front_toVisit.addParent(front_currentPage);
				for (String link : front_lp.getLinks()) {
					if (link.equals(endPage)) {
						front_count++; // The start page counts
						System.out.println(front_count + back_count
								+ ": Found target page " + endPage + ".");
						return front_count + back_count;
					} else if (back_visited.contains(link)) {
						System.out.println("Found an intersection at " + link);
						System.out.println("Intersection path:");
						for (String str : back_visited.getSearchPath(link)) {
							System.out.println(str);
						}
						front_count++;
						return back_count + front_count;
					}
					front_toVisit.push(link);
				}
				front_count++;

			} else {
				System.out.println("Visited contains " + front_currentPage);
				continue;
			}

			// System.out.println("Backsolve");

			// Visit a page from the back
			if (!back_visited.contains(back_currentPage)
					&& !back_uniLinks.contains(back_currentPage)) {
				// Scrape and process links from the page
				CacheLinkProcessor back_lp = new CacheLinkProcessor(
						back_currentPage);
				back_lp.removeDuplicates();
				back_lp.knuthShuffle();

				// Check if the link we followed to get here is bidirectional
				boolean biDirectional = false;
				for (String link : back_lp.getLinks()) {
					if (link.equals(back_toVisit.currentParent())) {
						biDirectional = true;
					}
				}

				if (!back_currentPage.equals(endPage) && !biDirectional) {
					back_uniLinks.add(back_currentPage, null);
					continue;
				}

				back_visited
						.add(back_currentPage, back_toVisit.currentParent());

				System.out.println(back_count + ": Target Visiting page "
						+ back_currentPage + " - " + back_lp.count()
						+ " links.");

				back_toVisit.addParent(back_currentPage);

				// Check links agains frontsolver
				if (front_visited.contains(back_currentPage)
						|| front_toVisit.contains(back_currentPage)) {
					System.out.println("Found an intersection at "
							+ back_currentPage);
					System.out.println("Intersection path:");
					for (String str : back_visited
							.getSearchPath(back_currentPage)) {
						System.out.println(str);
					}
					front_count++;
					return back_count + front_count;
				}

				for (String link : back_lp.getLinks()) {
					back_toVisit.push(link);
				}
				back_count++;
			} else {
				System.out.println("Visited contains " + back_currentPage);
				continue;
			}

		}
		// If we get here, either we have a bug or wikipedia has a closed loop.
		System.out.println("Looks like we exhausted wikipedia.");
		return -1;
	}

	// Test client: traverse from Robot to EMIEW (what's an emiew? good
	// question!)
	public static void main(String[] args) {
		String start = "http://en.wikipedia.org/wiki/Robot";
		String end = "http://en.wikipedia.org/wiki/EMIEW";
		if (args.length >= 2) {
			start = "http://en.wikipedia.org/wiki/" + args[0];
			end = "http://en.wikipedia.org/wiki/" + args[1];
		}
		BreadthFirstLastTraversal test = new BreadthFirstLastTraversal(start,
				end);
		System.out.println("Found target in " + test.traverse() + " steps.");
		test.printMetrics();
	}
}
