package com.gash.scrape;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.Writer;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

import com.gash.scrape.data.Link;
import com.gash.scrape.data.Page;

public class RunScrape {
	private static Writer output;
	//private static Map<String, Page> pages;
	private static Set<String> pages;

	/**
	 * @param args
	 */
	public static void main(String[] args) throws Exception {
		RunScrape r = new RunScrape();
		
		output = new BufferedWriter(new FileWriter(args[2]));
		pages = new HashSet<String>();
		
		r.scrapePages(Integer.parseInt(args[0]), Integer.parseInt(args[1]));
		
		output.close();
	}
	
	private void scrapePages(int linksPerPage, int depth) throws Exception {
		File swf = new File("resources/stopwords-long.txt");
		Scraper s = new Scraper(swf);

		// find all direct links from the mongo page to other wiki pages
		Page pg = s.scrape("http://en.wikipedia.org/wiki/MongoDB");
		pages.add(pg.getUrl().toLowerCase());
		output.write(pg.toString() + "\n");
		
		traverse(pg, s, linksPerPage, depth);
	}

	private void traverse(Page pg, Scraper scraper, int linksPerPage, int depth) throws Exception {
		// Stop conditions
		if (depth == 0)
			return;
		
		if (pg == null)
			return;
		
		
		int linksCount = linksPerPage;
		
		for (Link link : pg.getLinks().values()) {
			if (link.getUrl() == null || link.getUrl().startsWith(pg.getUrl()))
				continue;

			// limit to only wikipedia pages
			if (link.getUrl().indexOf("en.wikipedia.org") == -1
				|| link.getUrl().indexOf(":", 5) != -1)
				continue;

			// don't scrape pages we already have
			if (pages.contains(link.getUrl().toLowerCase()))
				continue;

			//System.out.println("--> " + link.getUrl());
			Page apg = scraper.scrape(link.getUrl());
			
			// don't print nonexistent page
			if (apg.getTitle() == null)
				continue;
			
			pages.add(apg.getUrl().toLowerCase());
			output.write(apg.toString() + "\n");

			if (linksCount == 0)
				break;
			else
				linksCount--;

			traverse(apg, scraper, linksPerPage, depth-1);
		}

		
	}

}
