package traverser;

import java.util.concurrent.Semaphore;

public class MasterThread extends TrieTraverser {

	public PriorityParentQueue toVisit;
	public KeyTracker tracker;
	public WikiTrie visited;
	public boolean traverseComplete = false;
	
	public static boolean quiet = false;
	public static double view_threshold = 0.0;
	public static boolean new_ranks = true;

	private WorkerThread [] workers;
	private final static int POOL_SIZE = 12;
	public Semaphore idle_workers = new Semaphore(POOL_SIZE);

	public MasterThread(String startPage, String endPage) {
		super(startPage, endPage);
		toVisit = new PriorityParentQueue();
		tracker = new KeyTracker(endPage, new_ranks);
		visited = new WikiTrie();

		// Create the thread pool
		workers = new WorkerThread[POOL_SIZE];
		for (int i = 0; i < POOL_SIZE; i++) {
			workers[i] = new WorkerThread();
		}
		
		WorkerThread.master = this;
	}

	// Compute the keyword priority of a page
	public int computePriority(String page) {

		// Visit a page, check if it's the last link, and compute its priority
		CacheLinkProcessor lp = new CacheLinkProcessor(page);
		// Note: duplicates are important, and order doesn't matter.
		int priority = 0;
		int numLinks = 0;

		for (String link : lp.getLinks()) {
			if (link.equals(endPage)) { // We're done
				synchronized (visited) {
					visited.add(endPage, page);
				}
				setTraverseComplete();
			}
			numLinks++;

			for (String key : KeyTracker.splitAndReduce(link)) {
				priority += tracker.priorityOf(key);
			}
		}
		// Normalize priority, so that large pages aren't disproportionately
		// more attractive
		// priority /= numKeys;
		// Note: if we wanted to increase the granularity, we could multiply
		// priority by 10 or something.
		priority *= 100;
		if (numLinks != 0)
			priority /= numLinks;
		else
			priority = 0;
		
		if (!quiet) {
			System.out.println(CacheLinkProcessor.shortName(page) + "\t\tpriority " + priority);
		}
		return priority;
	}
	
	private synchronized void setTraverseComplete () {
		traverseComplete = true;
	}
	
	private synchronized boolean checkTraverseComplete() {
		return traverseComplete;
	}
	
	public void printPath() {
		synchronized (visited) {
			if (!visited.contains(endPage)) {
				System.err.println("Error: traverse not yet run!");
				return;
			}

			System.out.println("Reverse search path:");
			Stack<String> path = visited.getSearchPath(endPage);
			int i = 0;
			for (String str : path) {
				System.out.println(i + ": " + str);
				i++;
			}
			System.out.println("Search path length: " + path.count());
		}
	}
	
	public int getPathLength() {
		if (!visited.contains(endPage)) {
			System.err.println("Error: traverse not yet run!");
			return -1;
		}
		return visited.getSearchPath(endPage).count();

	}
	
	private void drainThreads() {
		for (int i = 0; i < workers.length; i++) {
			workers[i].running = false;
		}
		for (int i = 0; i < workers.length; i++) {
			try {
				workers[i].join();
			} catch (InterruptedException e) {
				e.printStackTrace();
				System.exit(1);
			}
		}
	}

	public int traverse () {
		if (!quiet) {
			System.out.println("Start page: " + startPage);
			System.out.println("End   page: " + endPage);
			System.out.println("Traversing...");
		}

		// Start worker threads
		for (int i = 0; i < POOL_SIZE; i++) {
			workers[i].start();
		}

		int count = 0; // Count of visited pages
		if (endPage.equals(startPage)) {
			return count;
		}

		count++; // The first page counts

		visited.add(startPage, null);
		toVisit.add(startPage, null, computePriority(startPage));
		
		
		while (toVisit.hasNext()) {
			// Get a page and its priority from the queue
			int currentPriority;
			String currentPage;
			synchronized (toVisit) {
				currentPriority = toVisit.topPriority();
				currentPage = toVisit.pop();
			}
			
			CacheLinkProcessor lp = new CacheLinkProcessor(currentPage);
			lp.removeDuplicates();
			lp.knuthShuffle();
			
			if (!quiet) {
				System.out.println(count + ": Examining " + currentPage
						+ ", priority " + currentPriority + ", " + lp.count()
						+ " unique links");
			}
			
			String [] links = lp.getLinks();
			int min_views = (int) view_threshold * links.length;
			for (int j = 0; j < links.length; j++) {
				if (count > MAX_VISITS) {
					drainThreads();
					System.err.println("Visited " + MAX_VISITS + " pages without success.  Aborting traversal...");
					return 0;
				}
				
				
				// Don't even look at pages we've already been to.
				synchronized (visited) {
					if (visited.contains(links[j])) {
						continue;
					}
					visited.add(links[j], currentPage);
				}
				count++;
				
				// Wait for a worker to go idle
				try {
					idle_workers.acquire();
				} catch (InterruptedException e) {
					e.printStackTrace();
					System.exit(1);
				}
				
				// First, check if we're done, or if we should move on
				if (checkTraverseComplete()) {
					drainThreads();
					return count;
				}
				if (j > min_views && toVisit.topPriority() > currentPriority) {
					idle_workers.release();
					break;
				}
				
				// Find the idle worker
				for (int i = 0; i < workers.length; i++) {
					synchronized (workers[i]) {
						if (workers[i].idle) {
							workers[i].seedLink(links[j], currentPage);
							break;
						}
					}
				}
			}

			// If we are here, then either currentPage was exhausted, or we
			// found a higher priority page
			if (toVisit.topPriority() > currentPriority) {
				if (!quiet) {
					System.out.println("Found higher priority");
				}
				// If currentPage is not exhausted, then we might want to come
				// back to it later.
				synchronized (toVisit) {
					toVisit.add(currentPage, visited.parentOf(currentPage),
							currentPriority);
				}
			} else {
				if (!quiet) {
					System.out.println("Exhausted page");
				}
			}
			
			
			// Make sure we have another page to visit
			//if (!toVisit.hasNext()) {
				// let threads finish
				for (int i = 0; i < workers.length; i++) {
					while (!workers[i].idle) {
						Thread.yield();
					}
				}
			//}
			
			
		}

		// If we get here, either we have a bug or wikipedia has a closed loop.
		System.out.println("Looks like we exhausted wikipedia.");

		return -1;
	}
	
	public static void main(String[] args) {
		String start, end;
		
//		start = FrequencyCollector.getRandomPage();
//		end = FrequencyCollector.getRandomPage();
		
		start = "http://en.wikipedia.org/wiki/Photograph";
		end = "http://en.wikipedia.org/wiki/Apple";
		
		double [] percents = {0, .1, .2, .3, .4, .5};

		for (int i = 0; i < percents.length; i++) {
			MasterThread.view_threshold = percents[i];
			
			long startTime = System.currentTimeMillis();
			MasterThread test = new MasterThread (start, end);
			
			System.out.println("Found target in " + test.traverse() + " steps.");
			
			long endTime = System.currentTimeMillis();
			test.printMetrics();
			test.printPath();
			System.out.println("Total time: " + (endTime - startTime));
		}
	}
		
}
