package tech.abing.spider.task.crawler;

import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.log4j.Logger;

import tech.abing.spider.CrawTaskBuilder;
import tech.abing.spider.CrawlManager;
import tech.abing.spider.CrawlTask;
import tech.abing.spider.component.DynamicEntrance;
import tech.abing.spider.component.listener.TaskLifeListener;
import tech.abing.spider.downloader.impl.DefaultPageDownloader;
import tech.abing.spider.request.Cookie;
import tech.abing.spider.request.PageRequest;
import tech.abing.spider.request.PageRequest.PageEncoding;
import tech.abing.spider.request.StartContext;
import tech.abing.spider.task.pageprocessor.RateProccessor1;
import tech.abing.spider.task.pipeline.RatePipeline;
import tech.abing.spider.util.SqlUtil;

public class RateSpider {

	private static Logger logger = Logger.getLogger(RateSpider.class);
	
	private static SqlUtil sqlUtil = null;
	private static String catchDate = null;
	
	public void crawl(){
		this.crawl(1, 3, 500);//10线程，重试3次，礼貌访问间隔500ms
	}
	public void crawl(int threads, int retryCount, int politenessDelay){
		Set<Cookie> cookies = new HashSet<Cookie>();
		cookies.add(new Cookie("cookie2", "1c05f5867338bfd5e9b6e5db67fa0bcb"));
		
		CrawTaskBuilder builder = CrawlManager.getInstance()
				.prepareCrawlTask("根据类目关键字获取店铺", DefaultPageDownloader.class)
				.useThread(1)
				.useDynamicEntrance(DynamicEntranceImpl.class).useCookie(cookies)
				.usePageRetryCount(retryCount)//重试3次
				.setPolitenessDelay(politenessDelay)
				.usePipeline(RatePipeline.class)
				.useTaskLifeListener(new TaskLifeListener() {
					
					@Override
					public void onStart(CrawlTask task) {
						if(sqlUtil == null){
							//创建数据库链接
							sqlUtil = SqlUtil.getInstance();
							logger.info("数据库连接创建成功！");
						}
					}
					
					@Override
					public void onFinished(CrawlTask task) {
						task.getPipeline().destory();
						sqlUtil.closeConn();
						logger.info("数据库连接关闭成功！");
					}
				})
				.usePageEncoding(PageEncoding.UTF8);
		
		CrawlTask spider = builder.build();
		
		spider.getDownloader().setTimeout(10);
		CrawlManager.getInstance().start(spider);
	}
	
	
	public static final class DynamicEntranceImpl extends DynamicEntrance {

		@Override
		public List<StartContext> loadStartContext() {
			
			StartContext context = new StartContext();
			
			//获取类目及关键字
			String sql = "select shop_id,rate_link from tbbase.tb_task_rate limit 1";
			String delSql = "delete from tbbase.tb_task_rate where shop_id=?";
			
			Map<String, Object> map = sqlUtil.searchForConn(sql,catchDate);
			if(map != null && !map.isEmpty()){
				
				String shopId = map.get("shop_id").toString();
				String rateLink = map.get("rate_link").toString();
				
				
				int cnt = sqlUtil.deleteForConn(delSql,shopId);
				if(cnt > 0){//删除成功
					
					PageRequest req = context.createPageRequest(rateLink, RateProccessor1.class, 0);
					req.putParams("shopId", shopId);
					req.putParams("rateLink", rateLink);
					context.injectSeed(req);
				}
			}
			return Arrays.asList(context);
		}
		
		public boolean continueLoad() {
			return true;//循环采集
		}

	}
}
