package org.jeecg.crawler.special;

import cn.edu.hfut.dmic.webcollector.model.CrawlDatums;
import cn.edu.hfut.dmic.webcollector.model.Page;
import cn.edu.hfut.dmic.webcollector.plugin.rocks.BreadthCrawler;
import lombok.SneakyThrows;
import org.jsoup.nodes.Element;
import org.jsoup.nodes.Node;
import org.jsoup.select.Elements;

import java.time.LocalDateTime;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

/**
 * Crawling news from github news
 *
 * @author hu
 */

public class FaZhiNewsCrawler extends BreadthCrawler {






    private Map<String,String> urlMap = new HashMap<>();



    public FaZhiNewsCrawler(String crawlPath, boolean autoParse) {

        super(crawlPath, autoParse);
        /*start pages*/
       this.addSeed(getUrl());

        setThreads(20);
        getConf().setTopN(100);


    }


   public  static  String  getCommon() {
       LocalDateTime now = LocalDateTime.now();
       Integer year = now.getYear();
       Integer day = now.getDayOfMonth();
       Integer month = now.getMonthValue();
       String common = null;
       if (month < 10) {
           if (day < 10) {
               common = year + "0" + month + "0" + day;

           } else {
               common = year.toString() + "0" + month + day;

           }
       } else {
           if (day < 10) {
               common = year.toString() + month + "0" + day;

           } else {
               common = year.toString() + month + day;

           }

       }
       return   common;
   }


    public static  String getUrl(){


        String url = "http://epaper.legaldaily.com.cn/fzrb/content/"+getCommon()+"/Page01TB.htm" ;

        return  url;

    }


    @SneakyThrows
    @Override
    public void visit(Page page, CrawlDatums next) {
        Elements elements = page.select("td a[class=atitle]");
        Iterator<Element> iterator = elements.iterator();
        while(iterator.hasNext()){
            Element e = iterator.next();
            List<Node> nodes = e.childNodes();
            for (Node node : nodes) {
                String href = node.parentNode().attributes().get("href");
                if(href !=null){
                    if(href.contains(".htm")&& e.text().contains(":")){
                        String  url = "http://epaper.legaldaily.com.cn/fzrb/content/" +getCommon()+"/"+href;

                        urlMap.put(url,e.text());
                        System.out.println(url);
                        System.out.println(e.text());


                    }

                }

            }


         }

        }


    public static void main(String[] args) throws Exception {


        System.out.println(getUrl());

         FaZhiNewsCrawler crawler = new FaZhiNewsCrawler("crawl", true);
        /*start crawl with depth of 4*/
       crawler.start(2);
    }









    public Map<String, String> getUrlMap() {
        return urlMap;
    }

    public void setUrlMap(Map<String, String> urlMap) {
        this.urlMap = urlMap;
    }

}
