package com.asiainfo.zqx;

import cn.edu.hfut.dmic.webcollector.model.CrawlDatum;
import cn.edu.hfut.dmic.webcollector.model.CrawlDatums;
import cn.edu.hfut.dmic.webcollector.model.Page;
import cn.edu.hfut.dmic.webcollector.plugin.berkeley.BreadthCrawler;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.regex.Pattern;

/**
 * WangyiHao
 *
 * @Author jhr
 * @Date 2022/7/15
 */
public class WangyiHao extends BreadthCrawler {
    /**
     * 构造一个基于伯克利DB的爬虫
     * 伯克利DB文件夹为crawlPath，crawlPath中维护了历史URL等信息
     * 不同任务不要使用相同的crawlPath
     * 两个使用相同crawlPath的爬虫并行爬取会产生错误
     *
     * @param crawlPath 伯克利DB使用的文件夹
     * @param autoParse 是否根据设置的正则自动探测新URL
     */
    private static final String crawlPath = "/Users/jhr/data/db/wangyihao";
    private String seedurl="";
    private String regurl="";
    public WangyiHao(String Seed, String regularUrl) {
        super(crawlPath, false);
        seedurl =Seed;
        regurl = regularUrl;
//        crawlPath = CrawlPath;
        CrawlDatum crawlDatum = new CrawlDatum(seedurl).meta("depth", "2");
        addSeed(crawlDatum);
        this.addRegex(regularUrl);
        setThreads(2);
    }

    @Override
    public void visit(Page page, CrawlDatums next) {
        System.out.println("page.url()"+page.url());
        if(Pattern.matches(regurl,page.url())) {
            SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            //获取标题
            String title = page.select("div.post_main>h1.post_title").text();
            //获取时间
            String time = page.select("div.post_info").text();
            //获取作者
            String writer = page.select("div.post_info>a").first().text();
            //获取正文
            String text = page.select("div.post_body").text();
//            crawlerEntity.setAuthor(writer);
//            crawlerEntity.setContent(text);
//            crawlerEntity.setTitle(title);
//            crawlerEntity.setWebLink(page.url());
//            crawlerEntity.setSource("网易新闻");
//            crawlerEntity.setPublishTime(sdf.parse(time));
//            return crawlerEntity;
            System.out.println("标题"+title);
            try {
                System.out.println("时间"+sdf.parse( time));
            } catch (ParseException e) {
                throw new RuntimeException(e);
            }
            System.out.println("作者"+writer);
            System.out.println("正文"+text);
            System.out.println("来源：网易新闻号");
            System.out.println("URL："+page.url());
        }   else {
            Elements elements = page.select("a");
            System.out.println("elements.size()"+elements.size());
            for (Element element : elements) {
                if (element.attr("abs:href").matches(regurl)) {
//                if (page.matchUrl()) {
                    CrawlDatum craw = new CrawlDatum(element.attr("abs:href")).meta("depth", 2);
//                            .meta("refer", page.url());
                    next.add(craw);
                }
            }
        }

    }

    public static void main(String[] args) throws Exception {
        WangyiHao wangyiHao = new WangyiHao("https://www.163.com/dy/media/T1614331166577.html","https://www.163.com/dy/article/.*.html");
        wangyiHao.start(2);
    }
}
