package com.dh.blog.sitemap;

import java.util.HashMap;
import java.util.Map;

import com.gj.web.crawler.Crawler;
import com.gj.web.crawler.parse.CrawlHTMLParser;
import com.gj.web.crawler.parse.ResultModel;
import com.gj.web.crawler.pool.CrawlerThreadPool;
import com.gj.web.crawler.pool.CrawlerThreadPoolImpl;
import com.gj.web.crawler.pool.basic.URL;

public class SitemapParser extends CrawlHTMLParser {
	private Sitemap sitemap = SitemapImpl.getInstance();
	@Override
	public void callback(ResultModel result) {
		System.out.println(result.getString("_url"));
	}
	public static void main(String[] args) {
		SitemapParser parser = new SitemapParser();
		Crawler blog = new Crawler();
		blog.getAllowURL().add("http[:]//.*");
		blog.getParseURL().add("http[:]//.*");
		blog.setParser(parser);
		Map<String,Crawler> crawlers = new HashMap<String,Crawler>();
		crawlers.put("blog", blog);
		CrawlerThreadPool pool = CrawlerThreadPoolImpl.getInstance();
		pool.setCrawlers(crawlers);
		pool.open();
		pool.execute(new URL("blog","http://localhost:8080/iblog"));
	}
}
