package com.clark.crawler.novel.processor;

import com.clark.crawler.novel.NovelUtil;
import com.clark.crawler.novel.pipeline.ChapterPipeline;
import com.clark.crawler.novel.property.NovelProperties;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.processor.PageProcessor;

import java.util.List;

/**
 * @author code4crafter@gmail.com <br>
 * @since 0.5.1
 */
public class NovelPageProcessor implements PageProcessor {

    private Site site = Site.me().setRetryTimes(3).setSleepTime(0);

    @Override
    public void process(Page page) {
        List<String> urlList = page.getHtml().xpath(NovelProperties.chapterUrlListReg).links().all();

        //如果是开发环境，只抓取前n条
        if (NovelProperties.dev && urlList.size() > NovelProperties.devCrawlerCnt){
            urlList = urlList.subList(0,NovelProperties.devCrawlerCnt);
        }

        Spider spider = Spider.create(new ChapterPageProcessor());

        for (String url:urlList) {
            spider.addUrl(url);
        }
        NovelUtil.init(urlList);
        spider.addPipeline(new ChapterPipeline()).thread(5).run();
    }

    @Override
    public Site getSite() {
        return site;
    }
}
