package org.jeecg.crawler.node;
import cn.edu.hfut.dmic.webcollector.model.CrawlDatums;
import cn.edu.hfut.dmic.webcollector.model.Page;
import cn.edu.hfut.dmic.webcollector.plugin.rocks.BreadthCrawler;
import org.jeecg.modules.crawlerpaper.entity.CrawlerInfo;
import lombok.SneakyThrows;
import org.jsoup.nodes.Element;
import org.jsoup.nodes.Node;
import org.jsoup.select.Elements;

import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

/**
 * Crawling news from github news
 *
 * @author hu
 */
public class DetailAutoNewsCrawler extends BreadthCrawler {
    /**
     * @param crawlPath crawlPath is the path of the directory which maintains
     *                  information of this crawler
     * @param autoParse if autoParse is true,BreadthCrawler will auto extract
     *                  links which match regex rules from pag
     *
     */
//    String date = "/202107/07/";
   private String baseUrl;
    private String sourceName;
    private String  articleTime ;

    private  Map<String ,String> urlMap ;


    public List<CrawlerInfo> crawlerInfoList = new ArrayList<>();

    public DetailAutoNewsCrawler(String crawlPath, boolean autoParse, Map<String ,String> urlMap, String baseUrl , String sourceName, String articleTime) {
        super(crawlPath, autoParse);
        for (String s : urlMap.keySet()) {
            this.addSeed(s);

        }

        this.baseUrl = baseUrl;
        this.sourceName = sourceName;
        this.articleTime = articleTime;
        this.urlMap = urlMap;
        setThreads(49);
        getConf().setTopN(100);


    }

    @SneakyThrows
    @Override
    public void visit(Page page, CrawlDatums next) {

        Elements elements = page.select("div[class=newslist] ul li h3");
       String baseId =  baseUrl.substring(baseUrl.indexOf("http://")+7,baseUrl.indexOf("."));
        Iterator<Element> iterator = elements.iterator();
        while(iterator.hasNext()){
            Element e = iterator.next();


            CrawlerInfo crawlerInfo = new CrawlerInfo();
            crawlerInfo.setSourceName(sourceName);
            crawlerInfo.setScoureUrl(page.url());
            crawlerInfo.setArticleName(e.text());
            crawlerInfo.setArticleTime(this.articleTime);
            crawlerInfo.setLayout(urlMap.get(page.url()));

            List<Node> nodes = e.childNodes();
            for (Node node : nodes) {
                String href = node.attributes().get("href");
                if(href !=null){
                    String str = href.substring( href.indexOf("/con/"),href.length());
                    String  contentUrl =  baseUrl +str;
                    crawlerInfo.setArticleUrl(contentUrl);
                    crawlerInfo.setId(baseId+str.substring(str.indexOf("content_")+8,str.length()));
                    crawlerInfo.setArticleContent(getHtmlByUrl(contentUrl));

                }

            }
            crawlerInfoList.add(crawlerInfo);

         }

        }


    public static void main(String[] args) throws Exception {
        String baseUrl = "http://njcb.xhby.net/pc/con/202107/07/content_944464.html";
        String baseId =  baseUrl.substring(baseUrl.indexOf("http://")+7,baseUrl.indexOf("."));
        System.out.println(baseId);

    }



    public static String  getHtmlByUrl(String contentUrl)
            throws Exception {
        URL url = new URL(contentUrl);
        BufferedReader reader = new BufferedReader
                (new InputStreamReader(url.openStream()));

        String line;
        String content ="";
        while ((line = reader.readLine()) != null) {
            content +=line;
        }
        return  content;
    }

    public String getBaseUrl() {
        return baseUrl;
    }

    public void setBaseUrl(String baseUrl) {
        this.baseUrl = baseUrl;
    }

    public String getSourceName() {
        return sourceName;
    }

    public void setSourceName(String sourceName) {
        this.sourceName = sourceName;
    }

    public String getArticleTime() {
        return articleTime;
    }

    public void setArticleTime(String articleTime) {
        this.articleTime = articleTime;
    }


    public Map<String, String> getUrlMap() {
        return urlMap;
    }

    public void setUrlMap(Map<String, String> urlMap) {
        this.urlMap = urlMap;
    }

    public List<CrawlerInfo> getCrawlerInfoList() {
        return crawlerInfoList;
    }

    public void setCrawlerInfoList(List<CrawlerInfo> crawlerInfoList) {
        this.crawlerInfoList = crawlerInfoList;
    }
}
