package org.nerve.sprider.crawler4j;

import edu.uci.ics.crawler4j.crawler.CrawlConfig;
import edu.uci.ics.crawler4j.crawler.CrawlController;
import edu.uci.ics.crawler4j.fetcher.PageFetcher;
import edu.uci.ics.crawler4j.robotstxt.RobotstxtConfig;
import edu.uci.ics.crawler4j.robotstxt.RobotstxtServer;
import org.nerve.sprider.common.StringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

/**
 * org.nerve.study.sprider.crawler4j
 * Created by zengxm on 2016/3/17 0017.
 */
public class CrawlWorker {
	static final Logger logger = LoggerFactory.getLogger(CrawlWorker.class);

	private Properties properties;
	private int deep=1;
	private List<String> seeds;
	private CrawlController controller;
	private CrawlConfig config;
	private boolean useProxy;//是否使用代理
	private Class crawlerCls=CommonCrawler.class;

	public CrawlWorker(){
		properties=new Properties();
		try {
			properties.load(this.getClass().getResourceAsStream("/crawler4j.properties"));

		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	public CrawlWorker(int deep,String... seends){
		this();
		this.deep=deep;
		addSeeds(seends);
	}

	public void addSeeds(String... seends){
		if(seends!=null){
			if(seeds==null)
				seeds = new ArrayList<>();
			for(String s:seends)
				seeds.add(s);
		}
	}

	public Class getCrawlerCls() {
		return crawlerCls;
	}

	public CrawlWorker setCrawlerCls(Class crawlerCls) {
		this.crawlerCls = crawlerCls;
		return this;
	}

	public void start() throws Exception {
		if(controller!=null && !controller.isFinished()){
			logger.info("上次的任务还没有完成，无法开始新任务！");
			return;
		}
		int numberOfCrawlers = 7;

		if(config==null){
			//String crawlStorageFolder = "F:\\data\\sprider\\csdn";
			config = new CrawlConfig();
			config.setCrawlStorageFolder(properties.getProperty("storage"));
			config.setMaxConnectionsPerHost(2);
			config.setMaxDepthOfCrawling(deep);
			config.setPolitenessDelay(500);//每次等待1秒
//			config.setConnectionTimeout(30000);
//			config.setSocketTimeout(30000);

			if(useProxy){
				config.setProxyHost(properties.getProperty("proxy.host"));
				config.setProxyPort(Integer.valueOf(properties.getProperty("proxy.port","80")));
				config.setProxyUsername(properties.getProperty("proxy.user"));
				config.setProxyPassword(properties.getProperty("proxy.password"));
			}
		}
        /*
         * Instantiate the controller for this crawl.
         */
		PageFetcher pageFetcher = new PageFetcher(config);
		RobotstxtConfig robotstxtConfig = new RobotstxtConfig();
		RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher);
		controller = new CrawlController(config, pageFetcher, robotstxtServer);

		buildCustomData();

        /*
         * For each crawl, you need to add some seed urls. These are the first
         * URLs that are fetched and then the crawler starts following links
         * which are found in these pages
         */
		for(String s:seeds)
			controller.addSeed(s);

        /*
         * Start the crawl. This is a blocking operation, meaning that your code
         * will reach the line after this only when crawling is finished.
         */
		controller.start(crawlerCls, numberOfCrawlers);

		System.out.println("END!");
	}

	private void buildCustomData(){
		String domains[]=new String[seeds.size()];
		for(int i=0;i<domains.length;i++)
			domains[i]= StringUtil.getDomain(seeds.get(i));

		controller.setCustomData(domains);
	}

	public boolean isUseProxy() {
		return useProxy;
	}

	public CrawlWorker setUseProxy(boolean useProxy) {
		this.useProxy = useProxy;
		return this;
	}

	public int getDeep() {
		return deep;
	}

	public CrawlWorker setDeep(int deep) {
		this.deep = deep;
		return this;
	}

	public List<String> getSeeds() {
		return seeds;
	}

	public CrawlWorker setSeeds(List<String> seeds) {
		this.seeds = seeds;
		return this;
	}

	public CrawlController getController() {
		return controller;
	}

	public CrawlWorker setController(CrawlController controller) {
		this.controller = controller;
		return this;
	}

	public CrawlConfig getConfig() {
		return config;
	}

	public CrawlWorker setConfig(CrawlConfig config) {
		this.config = config;
		return this;
	}
}
