package com.bbz.crawler.core.proxyip.processor;

import java.util.List;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import com.bbz.crawler.core.commen.model.CrawlerProxyIp;
import com.bbz.crawler.core.proxyip.dataprocess.Ip3366DataProcess;
import com.bbz.crawler.core.proxyip.dataprocess.XiCiDaiLiDataProcess;
import com.bbz.crawler.core.proxyip.pipeline.IpProxyPoolPipeline;
import com.bbz.crawler.core.util.HttpClientDownloader;

import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.pipeline.ConsolePipeline;
import us.codecraft.webmagic.processor.PageProcessor;
import us.codecraft.webmagic.proxy.Proxy;
import us.codecraft.webmagic.proxy.SimpleProxyProvider;
import us.codecraft.webmagic.scheduler.BloomFilterDuplicateRemover;
import us.codecraft.webmagic.scheduler.QueueScheduler;

/**
 * 爬取代理IP地址
 * 
 * @author binbin.a.zhang
 *
 */
@Component
public class IpProxyPoolProcessor implements PageProcessor {
	@Autowired
	IpProxyPoolPipeline ipProxyPoolPipeline;
	// 部分一：抓取网站的相关配置，包括编码、抓取间隔、重试次数等
	private Site site = Site.me().setRetryTimes(3).setSleepTime(50000).setCharset("utf-8").setUserAgent(
			"\"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36\"");;

	@Override
	// process是定制爬虫逻辑的核心接口，在这里编写抽取逻辑
	public void process(Page page) {
		List<CrawlerProxyIp> list = new XiCiDaiLiDataProcess().dataProcess(page);
		List<String> url = page.getHtml().xpath("//a[@class='next_page']/@href").all();

		if (url == null) {
			// skip this page
			page.setSkip(true);
		} else {
			page.putField("url", url);
			page.putField("proxyIpList", list);
		}
		// URL加到抓取列表中去
		page.addTargetRequests(url);
	}

	@Override
	public Site getSite() {
		return site;
	}

	public void run() {
		HttpClientDownloader httpClientDownloader = new HttpClientDownloader();
		httpClientDownloader.setProxyProvider(SimpleProxyProvider.from(new Proxy("116.1.214.79", 80)));
		Spider.create(new IpProxyPoolProcessor())
				// 从http://www.xicidaili.com/nn/1开始抓
				.addUrl("http://www.xicidaili.com/nn/1")
				// 添加pipeline
				.addPipeline(ipProxyPoolPipeline)
				// 开启5个线程抓取
				.thread(5).setDownloader(new HttpClientDownloader())
				// URL去重
				.setScheduler(new QueueScheduler().setDuplicateRemover(new BloomFilterDuplicateRemover(10000000)))
				// 启动爬虫
				.run();
	}

}
