package com.zh.SogouSpider;

import cn.edu.hfut.dmic.webcollector.model.CrawlDatum;
import cn.edu.hfut.dmic.webcollector.model.CrawlDatums;
import cn.edu.hfut.dmic.webcollector.model.Page;
import cn.edu.hfut.dmic.webcollector.plugin.berkeley.BreadthCrawler;
import com.google.gson.JsonObject;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.jsoup.select.Selector;

import java.io.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Stream;


public class SpiderApplication extends BreadthCrawler {

    private final String baseUrl = "https://pinyin.sogou.com";
    private final String cateReg = ".*“(.*)”.*";

    public SpiderApplication(String crawlPath, boolean autoParse) {
        super(crawlPath, autoParse);
        this.addSeedAndReturn(baseUrl+"/dict").type("category");
    }

    @Override
    public void visit(Page page, CrawlDatums crawlDatums) {

    }

    @MatchType(types = "category")
    public void visitCategory(Page page, CrawlDatums next){
        if (!page.matchType("category")) {
            return;
        }
        if (page.code() == 301 || page.code() == 302){
            next.addAndReturn(page.location()).type("category").meta(page.copyMeta());
            return;
        }
        Elements elements = page.select("div.dict_category_list");
        for (Element element : elements) {
            Element first = element.select("div.dict_category_list_title>a").first();
            String url = first.attr("href");
            next.addAndReturn(baseUrl+url).type("page");
        }
    }

    @MatchType(types = "page")
    public void visitPage(Page page, CrawlDatums next){
        if (!page.matchType("page")) {
            return;
        }
        if (page.code() == 301 || page.code() == 302){
            next.addAndReturn(page.location()).type("page").meta(page.copyMeta());
            return;
        }
        Elements cateTitle = page.select("div.cate_title");
        String category = null;
        if (cateTitle != null){
            String cateText = cateTitle.text();
            if (Pattern.matches(cateReg,cateText))
            {
                Pattern pattern = Pattern.compile(cateReg);
                Matcher matcher = pattern.matcher(cateText);
                if (matcher.find()){
                    category = matcher.group(1);
                }
            }
        }
        Elements elements = page.select("div.dict_detail_block");
        for (Element element : elements) {
            Element titleElement = element.select("div.detail_title>a").first();
            String name = titleElement.text();
            Elements downloadButton = element.select("div.dict_dl_btn>a");
            String url = downloadButton.attr("href");
            next.addAndReturn(url).type("download").meta("category",category).meta("name",name);
        }

        //下一页的数据
        Elements pages = page.select("div#dict_page_list>ul>li>span>a");
        if (pages != null && pages.size() > 0){
            Optional<Element> nextPageElement = pages.stream().filter(item -> item.text().equals("下一页")).findFirst();
            nextPageElement.ifPresent(element -> next.addAndReturn(baseUrl + element.attr("href")).type("page"));
        }
    }



    @MatchType(types = "download")
    public void visitDownload(Page page, CrawlDatums next) throws IOException {

        if (!page.matchType("download")){
            return;
        }
        if (page.code() == 301 || page.code() == 302){
            next.addAndReturn(page.location()).type("download").meta(page.copyMeta());
            return;
        }
        String category = page.meta("category");
        String name = page.meta("name");
        if (name.contains("/")){
            name = name.replace('/', '_');
        }
        System.out.println("category：" + category+",name：" + name);
        //下载文件
        File directory = new File("/home/zh/搜狗词库/" + category);
        if (!directory.exists()){
            directory.mkdirs();
        }

        String fileName = directory +"/" + name + ".scel";
        File file = new File(fileName);
        if (file.exists()){
            file.delete();
        }
        URL url = new URL(page.url());
        URLConnection conn = url.openConnection();
        InputStream inStream = conn.getInputStream();
        FileOutputStream fs = new FileOutputStream(fileName);
        byte[] buffer = new byte[120400];
        int byteread;
        while ((byteread = inStream.read(buffer)) != -1) {
            fs.write(buffer, 0, byteread);
        }
    }



    public static void main(String[] args) throws Exception {
        SpiderApplication crawler = new SpiderApplication("crawl", false);

        crawler.getConf().setExecuteInterval(5000);

        crawler.getConf().set("title_prefix","PREFIX_");
        crawler.getConf().set("content_length_limit", 20);

        /*start crawl with depth of 4*/
        crawler.start(100000);
    }
}
