package tools.webkit;

import java.util.ArrayList;
import java.util.List;

/**
 * Crawler which support Multi-Thread
 * @author Lanjun
 *
 */
public class Crawler {
	List<String> urls = null;
	List<Webpage> webpages;
	int numOfThreads = 0;
	final String BROWSER = "IE8.0";
	
	private synchronized String getOneURL() {
		if (urls.isEmpty()) {
			return null;
		}
		String url = urls.get(0);
		urls.remove(0);
		return url;
	}
	
	private synchronized void add(Webpage webpage) {
		webpages.add(webpage);
	}
	
	class CrawlThread extends Thread {
		public void run() {
			DownloadURL d = new DownloadURL();
			while (true) {
				String url = getOneURL();
				if (url == null) {
					break;
				} else {
					System.out.println("Thread " + this.getId() + " downloading: \t" + url);
					Webpage w = d.downURL(url, BROWSER);
					add(w);
					try {
						sleep(200);
					} catch (InterruptedException e) {
						e.printStackTrace();
					}
				}
			}
		}
	}
	
	/**
	 * initial
	 * @param numOfThreads At least 1
	 */
	public Crawler(int numOfThreads) {
		if (numOfThreads < 1) {
			numOfThreads = 1;
		}
		this.numOfThreads = numOfThreads;
	}
	
	public List<Webpage> crawl(List<String> urls) {
		webpages = new ArrayList<Webpage>();
		this.urls = urls;
		List<Thread> threadList = new ArrayList<Thread>();
		for (int i = 0; i < numOfThreads; i++) {
			Thread t = new CrawlThread();
			t.start();
			threadList.add(t);
		}
		
		for (int i = 0; i < numOfThreads; i++) {
			try {
				threadList.get(i).join();
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}
		
		return webpages;
	}
	
	public static void main(String[] args) {
		ArrayList<String> urls = new ArrayList<>();
		urls.add("http://www.cuhk.edu.hk");
		urls.add("http://www.se.cuhk.edu.hk");
		urls.add("http://www.se.cuhk.edu.hk/~ljzhou");
		urls.add("http://www.se.cuhk.edu.hk/~zywei");
		urls.add("http://www.cs.hit.edu.cn");
		Crawler cr = new Crawler(2);
		List<Webpage> w = cr.crawl(urls);
		for (Webpage p : w) {
			System.out.println(p.url);
			System.out.println(p.content);
		}
	}
}
