package cn.sunxyz.spider.sample.zhihu;

import cn.sunxyz.spider.Spider;
import cn.sunxyz.spider.downloader.JSoupDownloader;
import cn.sunxyz.spider.downloader.ProxyDownloader;
import org.jsoup.nodes.Document;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import us.codecraft.xsoup.Xsoup;

import java.util.List;
import java.util.stream.Stream;

/**
 * Spider Tester.
 *
 * @author 2018/6/14
 * @version 1.0
 */
public class SpiderZhiHuDemo {


    private Logger logger = LoggerFactory.getLogger(SpiderZhiHuDemo.class);

    /**
     * 自己定义 抓取的url链接
     */
    public void initStreamDemo(){
        String url = "https://www.zhihu.com/question/";
        new Spider() {
            {
                downloader = new ProxyDownloader(new JSoupDownloader());
                pipeline = (title, page) -> {
                    Document document = page.getDocument();
                    String flower = document.select(".NumberBoard-itemValue").first().text();
                    String watch = document.select(".NumberBoard-itemValue").last().text();
                    List<String> answer = Xsoup.compile("//div[@class='List-item']/allText()").evaluate(document).list();
                    logger.info("url: {} , title : {} , 关注者: {} , 被浏览 : {} , answer : {}", page.getRequest(), title, flower, watch, answer);
                };
            }
        }.init(Stream.iterate(28629300, i -> i + 1).limit(60).map(i -> url + i).parallel()).start();
    }


    /**
     * 根据提供的种子爬取数据
     */
    public void initSeedDemo() {
        String seed = "https://www.zhihu.com/question/28629300";
        new Spider() {
            {
                downloader = new ProxyDownloader(new JSoupDownloader());
                extractPattern = "https://www.zhihu.com/question/.*"; // 指定对应的抓取规则
            }
        }.init(seed).start();
    }

    /**
     * 自己定义 抓取的url链接 (简单的)
     */
    public void easyStreamDemo(){
        String url = "https://www.zhihu.com/question/";
        new Spider() {
            {
                downloader = new ProxyDownloader(new JSoupDownloader());
                pipeline = (title, page) -> {
                    logger.info("url: {} , title : {} ", page.getRequest(), title);
                };
            }
        }.init(Stream.iterate(28629300, i -> i + 1).limit(60).map(i -> url + i).parallel()).start();
    }
} 
