package com.hc.api.controller;


import cn.edu.hfut.dmic.webcollector.model.CrawlDatum;
import cn.edu.hfut.dmic.webcollector.model.CrawlDatums;
import cn.edu.hfut.dmic.webcollector.model.Page;
import cn.edu.hfut.dmic.webcollector.plugin.berkeley.BreadthCrawler;

import java.util.Iterator;

/**
 * Created by yx on 2017/9/12.
 */
public class NewsCrawler extends BreadthCrawler {

    public NewsCrawler(String crawlPath, boolean autoParse) {
        super(crawlPath, autoParse);

    }

    @Override
    public void visit(Page page, CrawlDatums crawlDatums) {
        System.out.println("visiting:" + page.url() + "\tdepth=" + page.meta("depth"));
    }

    protected void afterParse(Page page, CrawlDatums next) {
        int depth;
        if (page.meta("depth") == null) {
            depth = 1;
        } else {
            depth = Integer.valueOf(page.meta("depth")).intValue();
        }

        ++depth;
        Iterator var4 = next.iterator();

        while (var4.hasNext()) {
            CrawlDatum datum = (CrawlDatum) var4.next();
            datum.meta("depth", depth + "");
        }

    }

    public static void main(String[] args) throws Exception {
        NewsCrawler crawler = new NewsCrawler("depth_crawler", true);

        for (int i = 1; i <= 5; ++i) {
            crawler.addSeed((new CrawlDatum("http://news.hfut.edu.cn/list-1-" + i + ".html")).meta("depth", "1"));
        }

        crawler.addRegex("http://news.hfut.edu.cn/show-.*html");
        crawler.addRegex("-.*\\.(jpg|png|gif).*");
        crawler.addRegex("-.*#.*");
        crawler.setTopN(5);
        crawler.start(3);
    }
}
