package xyz.yuanjilu.base;

import java.util.Iterator;
import java.util.List;

import org.apache.commons.collections.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import xyz.yuanjilu.dao.WebpageinfoDao;
import xyz.yuanjilu.entity.WebpageinfoEntity;
import xyz.yuanjilu.service.WebpageinfoService;
import xyz.yuanjilu.utils.Encrypt;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Request;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.pipeline.ConsolePipeline;
import us.codecraft.webmagic.pipeline.JsonFilePipeline;
import us.codecraft.webmagic.pipeline.Pipeline;
import us.codecraft.webmagic.processor.PageProcessor;

@Component
public abstract class AbstractPageProcessor  implements PageProcessor,AntAction{
    private Logger logger = LoggerFactory.getLogger(getClass());
    private Site site = Site.me().setRetryTimes(0)
            .setSleepTime(10*1000)//十秒调用一次
            .setTimeOut(10000)//
            .setUserAgent("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36");
    
    private String targetUrl;//目标爬虫路径
    private Spider spider= null ;
    @Autowired
	private WebpageinfoService webpageinfoService;
    @Autowired
    private List<Pipeline> pipelines;

    public AbstractPageProcessor() {
        super();
        this.targetUrl=this.initTargetUrl();
        spider =  Spider.create(this).addUrl(this.targetUrl);//默认还是要创建一个
    }

    public abstract String initTargetUrl() ;//子类必须实现目标url

    @Override
    public Site getSite() {
        return site;
    }
    
    @Override
    public void process(Page page) {
        page.putField(Constant.ANT_BEAN_KEY_ID, this.getClass().getSimpleName());
    	this.doProcess(page);
        List<Request> targetRequests = page.getTargetRequests();
        //如果已经爬取过的数据，就不在添加到爬行目标，从targetRequests移除，
        if (CollectionUtils.isNotEmpty(targetRequests)) {
			for (int i = targetRequests.size()-1; i>-1; i--) {
				String url = targetRequests.get(i).getUrl();
	        	String id = Encrypt.md5(url+"");
	            WebpageinfoEntity queryObject = webpageinfoService.queryObject(id);
	            if (queryObject!=null) {
	            	targetRequests.remove(i);
	            	logger.info("id="+id+"的信息已经爬取过，不再重新爬取");
	            }
			}
		}
    }
    /**
     * 子类事项爬虫规则，父类执行一些必要参数设置。
     * @param page
     */
    public abstract void doProcess(Page page);
    
    
    public void start() {
        if (Constant.SPIDER_STATUS_START.equals(this.status())) {//如果是在运行，就不再继续运行新的示例
            return;
        }
        logger.info("启动爬虫："+this.getClass()+";url:"+this.targetUrl);
        spider =  Spider.create(this).addUrl(this.targetUrl);//真正启动时，重新创建
        spider
        //.addPipeline(new ConsolePipeline())//添加控制台输出
        //.addPipeline(new JsonFilePipeline("D:\\webmagic\\data"))//添加json文件输出
        .thread(5);
        for (Pipeline pipeline : pipelines) {
            spider.addPipeline(pipeline);
        }
        spider.runAsync();
    }

    @Override
    public void restart() {
        logger.info("重启爬虫："+this.getClass()+";url:"+this.targetUrl);
        this.stop();
        this.start();
        
    }

    @Override
    public void stop() {
        logger.info("关闭爬虫："+this.getClass()+";url:"+this.targetUrl);
        spider.stop();
        
    }

    @Override
    public String status() {
        return spider.getStatus().toString();
    }
    
}
