package tech.abing.spider.task.crawler;

import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

import org.apache.log4j.Logger;

import tech.abing.spider.CrawTaskBuilder;
import tech.abing.spider.CrawlManager;
import tech.abing.spider.CrawlTask;
import tech.abing.spider.component.DynamicEntrance;
import tech.abing.spider.component.listener.TaskLifeListener;
import tech.abing.spider.downloader.impl.DefaultPageDownloader;
import tech.abing.spider.request.PageRequest;
import tech.abing.spider.request.PageRequest.PageEncoding;
import tech.abing.spider.request.StartContext;
import tech.abing.spider.task.pageprocessor.Five8Processor1;
import tech.abing.spider.task.pipeline.Five8Pipeline;
import tech.abing.spider.util.SqlUtil;

public class Five8Spider {

	private static Logger logger = Logger.getLogger(Five8Spider.class);
	private static SqlUtil sqlUtil = null;
	
	public static void main(String[] args) {

		CrawTaskBuilder builder = CrawlManager.getInstance()
				.prepareCrawlTask("采集58招聘信息", DefaultPageDownloader.class)
				.useThread(10)// 使用两个线程下载
//				.injectStartUrl(url1, Five8Processor1.class)
//				.injectStartUrl(url2, Five8Processor1.class)
				.useDynamicEntrance(DynamicEntranceImpl.class)
				.usePipeline(Five8Pipeline.class)
				.usePageRetryCount(3)
				.setPolitenessDelay(500)
				.useTaskLifeListener(new TaskLifeListener() {
					
					@Override
					public void onStart(CrawlTask task) {
						if(sqlUtil == null){
							//创建数据库链接
							sqlUtil = SqlUtil.getInstance();
							logger.info("数据库连接创建成功！");
						}
					}
					
					@Override
					public void onFinished(CrawlTask task) {
						if(sqlUtil != null){
							sqlUtil.closeConn();
							logger.info("数据库连接关闭成功！");
						}
						
						task.getPipeline().destory();
					}
				})
				.usePageEncoding(PageEncoding.UTF8);
		CrawlTask spider = builder.build();
		CrawlManager.getInstance().start(spider);

	}
	
	public static final class DynamicEntranceImpl extends DynamicEntrance {

		@Override
		public List<StartContext> loadStartContext() {
			StartContext context = new StartContext();
			
			HashMap<String, String> urlMap = new LinkedHashMap<String, String>();
			
			//软件开发/编程
			String url1 = "http://sz.58.com/jzsoftware/?utm_source=market&spm=b-31580022738699-me-f-824.bdpz_biaoti&PGTID=114016233188856191565014936&ClickID=1";
			//网站建设
			String url2 = "http://sz.58.com/jisuanjiwl/?utm_source=market&spm=b-31580022738699-me-f-824.bdpz_biaoti&PGTID=198017221188856190262074261&ClickID=1";
			
			urlMap.put("软件开发/编程", url1);
			urlMap.put("网站建设", url2);
			
			String sql = "select max(pubDate) as maxPubDate from t_five8 where category = ?";
			
			for(String key : urlMap.keySet()){
				
				PageRequest req = context.createPageRequest(urlMap.get(key), Five8Processor1.class, 0);
				
				Map<String, Object> map = sqlUtil.searchForConn(sql, key);
				req.putParams("category", key);
				if(map != null && !map.isEmpty() && map.get("maxPubDate") != null){
					req.putParams("maxPubDate", map.get("maxPubDate").toString());
				}
				
				context.injectSeed(req);
			}
			
			return Arrays.asList(context);
		}
		public boolean continueLoad() {
			return false;//循环采集
		}
	}

}
