package com.xwtec.crawler.service.impl;

import com.xwtec.crawler.entity.Crawler;
import com.xwtec.crawler.mapper.CrawlerMapper;
import com.xwtec.crawler.service.CrawlerService;
import com.xwtec.crawler.util.DateUtil;
import com.xwtec.crawler.util.SensitivewordFilter;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.util.*;

@Service
public class CrawlerServiceImpl implements CrawlerService {
    Logger log = LoggerFactory.getLogger(CrawlerServiceImpl.class);

    @Autowired
    CrawlerMapper crawlerMapper;

    private static Map<String, String> paramMap = null;
    static {
        paramMap = new HashMap<>();
    }

    SensitivewordFilter filter = new SensitivewordFilter();

    @Override
    //敏感词检测
    public void checkSensitiveWords(Map<String, Object> map, String insertUrl, int level) {
        //SensitivewordFilter filter = new SensitivewordFilter();
        //log.info("敏感词库中的敏感词数量：" + filter.sensitiveWordMap.size());

        String url = (String)map.get("url");
        Set<String> urlSet = (Set<String>)map.get("urls");
        //Set<String> wordSet = (Set<String>)map.get("words");
        String words = (String)map.get("words");
        //1.敏感词检测
        if (words != null){
            Set<String> set = filter.getSensitiveWord(words, 1);
            if(set.size() > 0){
                System.out.println("页面中包含敏感词的个数为：" + set.size() + "。包含：" + set);

                //2.检测结果入库
//                paramMap.put("curl", insertUrl.length()<2000 ? insertUrl:insertUrl.substring(0,2000));
//                paramMap.put("ctime", DateUtil.getDateTimeFormat(new Date()));
//                paramMap.put("clevel", String.valueOf(level));
//                paramMap.put("cword", set.toString());
//
//                crawlerMapper.insertCrawlerResult(paramMap);
            }
        }

    }

    @Override
    public List<String> queryUrls() {
        return crawlerMapper.queryUrls();
    }

    @Override
    public List<String> querySensitive() {
        return crawlerMapper.querySensitive();
    }

    //查询所有需要爬虫的页面url
    public List<Crawler> queryFirstUrls(){
        List<Crawler> firstUrls = crawlerMapper.queryFirstUrls();
        return firstUrls;
    }

    //爬虫获取当前页面包含的文字与链接
    @Override
    public Map<String, Object> getWordAndUrl(String furl) {
        Map<String, Object> map = new HashMap<>();
        Set<String> wordSet = new HashSet<>();
        Set<String> urlSet = new HashSet<>();
        try {
                //Document doc = Jsoup.connect(furl).get();
                //log.info("current url: {}", furl);
                System.out.println("current url: " + furl);
                //if (!StringUtil.isBlank(furl) && !furl.startsWith("/") && !furl.startsWith("javascript")){
                if (furl.startsWith("http") && !furl.endsWith("apk") && !furl.endsWith("jpg") && !furl.endsWith("png")
                        && !furl.endsWith("gif") && !furl.endsWith("xls") && !furl.endsWith("jpeg") && !furl.endsWith("pdf")
                        && !furl.startsWith("http:/v") && !furl.endsWith("zip") && !furl.endsWith("rar")){

                    String userAgent="Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.108 Safari/537.36";
                    Document doc = Jsoup.connect(furl).ignoreContentType(true).header("User-Agent", userAgent).timeout(10000)
                            .validateTLSCertificates(false).maxBodySize(0).followRedirects(false).get();

                    Elements links = doc.select("a[href]");
                    //System.out.println("doc==>"+doc);
                    //System.out.println("links length==>"+links.size());
                    Elements wordsElements = doc.select("html");
                    String words = wordsElements.first().text();

                    for (Element link : links) {
                        String url = link.attr("href").trim();
                        //String word = link.text().trim();
                        //System.out.println(url+"    "+url.startsWith("javascript")+"    "+word);
                        urlSet.add(url);
                        //wordSet.add(word);
                    }
                    map.put("words", words);
                    map.put("urls", urlSet);
                    map.put("url", furl);
                }

        } catch (IOException | IllegalArgumentException e) {
            e.printStackTrace();
        }
        return map;
    }

    public static void main(String[] args) {

    }
}
