package com.clustertech.crawler.service;

import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;

import com.clustertech.crawler.analyzer.ClusterAnalyzer;
import com.clustertech.crawler.analyzer.SinaNewsAnalyzer;
import com.clustertech.crawler.domain.NewsFeed;
import com.clustertech.crawler.utils.SinaNewsExtractor;

public class CrawlServiceImpl implements ClusterService {
	private ClusterAnalyzer analyzer;
	private String seed;
	public List<NewsFeed> crawl() {
		Logger logger = Logger.getLogger(this.getClass().getName());
		logger.info("Crawling " + seed + "...");
		String html = SinaNewsExtractor.crawlPage(seed);
		Map<String, String> links = SinaNewsExtractor.getURLs(html);
		int linkNum = links.size();
		logger.info("Total: " + linkNum + " pages...");
		int current = 0;
		Set<String> keys = links.keySet();
		Iterator<String> keyit = keys.iterator();
		List<NewsFeed> newsFeeds = new ArrayList<NewsFeed>();
		while (keyit.hasNext()) {
			current++;
			logger.info("Crawling #" + current + "/" + linkNum + " page...");
			String link = links.get(keyit.next());
			html = SinaNewsExtractor.crawlPage(link);
			newsFeeds.add(analyzer.getNewsFeed(html));
		}
		return newsFeeds;
	}
	
	public static void main(String[] args) {
		CrawlServiceImpl service = new CrawlServiceImpl();
		service.setSeed("http://news.sina.com.cn/world/");
		service.setAnalyzer(new SinaNewsAnalyzer());
		service.crawl();
	}
	
	public ClusterAnalyzer getAnalyzer() {
		return analyzer;
	}

	public void setAnalyzer(ClusterAnalyzer analyzer) {
		this.analyzer = analyzer;
	}

	public String getSeed() {
		return seed;
	}

	public void setSeed(String seed) {
		this.seed = seed;
	}
}
