package com.zongtui.crawler.dcSimpleCrawler;

import java.util.ArrayList;
import java.util.List;

import com.zongtui.fourinone.MigrantWorker;
import com.zongtui.fourinone.WareHouse;
import com.zongtui.webcrawler.sourceer.ResultItems;
import com.zongtui.webcrawler.sourceer.Spider;
import com.zongtui.webcrawler.sourceer.crawler.BaiduBaikePageProcessor;

public class CrawlerWorker extends MigrantWorker
{	
	public WareHouse doTask(WareHouse inhouse)
	{
		String word = inhouse.getString("word");
		System.out.println(word+" 得到控制命令，开始爬取…….");
		
		//single download
        Spider spider = Spider.create(new BaiduBaikePageProcessor()).thread(2);
        String urlTemplate = "http://baike.baidu.com/search/word?word=%s&pic=1&sug=1&enc=utf8";
        ResultItems resultItems = spider.<ResultItems>get(String.format(urlTemplate, "水力发电"));
        System.out.println(resultItems);

        //multidownload
        List<String> list = new ArrayList<String>();
        list.add(String.format(urlTemplate,"风力发电"));
        list.add(String.format(urlTemplate,"太阳能"));
        list.add(String.format(urlTemplate,"地热发电"));
        list.add(String.format(urlTemplate,"地热发电"));
        List<ResultItems> resultItemses = spider.<ResultItems>getAll(list);
        for (ResultItems resultItemse : resultItemses) {
            System.out.println(resultItemse.getAll());
        }
        spider.close();
		
		return new WareHouse("word", word+" world!");
	}
	
	public static void main(String[] args)
	{
		CrawlerWorker mw = new CrawlerWorker();
		mw.waitWorking("CrawlerWorker");
	}
}