package crawler;

import java.util.List;
import java.util.HashMap;
import org.htmlparser.util.ParserException;
import work.Job;

/**
 * The IndexPageJob returns a list of all links to news
 * stories on a page of the Proquest news database
 * @author Kevin
 */
public class IndexPageJob implements Job {

	@Override
	public Object run(Object parameters, HashMap<String, Object> shared) {
		VirtualBrowser browser = (VirtualBrowser) shared.get("browser");
		String url = (String) shared.get("url");
		int page = (Integer) parameters;
		url += "&firstIndex=" + (page * 10);
		System.out.println("Accessing:" + url);
		
		List<String> links = null;
		try {
			String body = browser.request(false, url, null, null);
			links = (NewsCrawler.getLinksFromIndex(body));
			System.out.println("Success("+links.size()+"):" + url);
		} catch (ParserException e) {
			System.err.println("Fail(Parser):" + url);
		} catch (IllegalArgumentException e) {
			System.err.println("Fail(Http):" + url);
		}
		return links;
		
	}

}
