package example.norecursive.crawler;

import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ExecutionException;

public class MockUpCrawlerSequencial {

	private final List<String> urls = Arrays.asList("http://www.mysite.com",
			"http://www.yoursite.com");

	public void doCrawl() throws InterruptedException, ExecutionException {

		// Runs crawling tasks
		for (final String url : urls) {

			// Your crawling magic goes here:
			System.out.println("Crawling something in " + url);
			consumeTime();
			boolean result = true;

			int count = 0;

			Boolean status = result;
			System.out.println("Crawling of " + urls.get(count)
					+ " finished with status " + status);
			count++;
		}
	}

	public static void main(String[] args) throws InterruptedException,
			ExecutionException {
		MockUpCrawler crawler = new MockUpCrawler();
		crawler.doCrawl();
	}

	/**
	 * simulates time consuming work
	 * 
	 * @throws InterruptedException
	 *             in case calling thread is interrupted due to task being
	 *             canceled
	 */
	private void consumeTime() throws InterruptedException {
		double temp = 0;
		for (int i = 0; i < 100; i++) {
			Thread.sleep(100); // Do not catch InterruptedException, it is what
								// enables task cancelaton
			temp = Math.cos(Math.random());
		}
		System.out.println(temp);
	}

}
