package cn.jc.spider.demo1;

import java.util.Set;

public class MyCrawler {

	private void initCrawlerWithSeeds(String[] seeds) {
		for (String url : seeds) {
			LinkQueue.addUnVisitedUrl(url);
		}
	}

	public void crawling(String[] seeds) {
		LinkFilter filter_download = new LinkFilter() {
			public boolean accept(String url) {
				if (url.startsWith("https://market.android.com/")) {
					return true;
				} else {
					return false;
				}
			}
		};
		LinkFilter filter_pa = new LinkFilter() {
			public boolean accept(String url) {
				if (url.indexOf("https://market.android.com/") > -1) {
					return true;
				} else {
					return false;
				}
			}
		};
		initCrawlerWithSeeds(seeds);
		while (!LinkQueue.unVisitedUrlIsEmpty()
				&& LinkQueue.getVisitedUrlNum() <= 100000) {
			if ((LinkQueue.getVisitedUrlNum() % 100) == 0) {
			}
			System.out.println("visited url num:" + LinkQueue.getVisitedUrlNum());
			String visitUrl = (String) LinkQueue.unVisitedUrlDeQueue();
			if (visitUrl == null && !filter_download.accept(visitUrl)) {
				continue;
			}
//			DownLoadFile downLoader = new DownLoadFile();//
//			String filePath = downLoader.downloadFile(visitUrl);//////////
//			System.out.println(filePath);
			System.out.println(visitUrl);
			
			LinkQueue.addVisitedUrl(visitUrl);//上面方法抛异常了，这里还加吗�?
			Set<String> links;
			try {
				links = HtmlParserTool.extracLinks(visitUrl, filter_pa);
				for (String link : links) {
					LinkQueue.addUnVisitedUrl(link);
				}
			} catch (Throwable e) {
				//"https://www.google.com/accounts/ServiceLogin?service=androidmarket&passive=86400&continue=https://market.android.com/&followup=https://market.android.com/"
				//这个异常抓不到？？？？？？？
				System.out.println(visitUrl + filter_pa);
				//e.printStackTrace();
			}
		}
	}

	public static void main(String[] args) {
		MyCrawler crawler = new MyCrawler();
		crawler.crawling(new String[] { "https://market.android.com/" });
	}

}
