package cn.edu.hfut.dmic;/*
 * Copyright (C) 2015 hu
 *
 * This program is free software; you can redistribute it and/or
 * modify it under the terms of the GNU General Public License
 * as published by the Free Software Foundation; either version 2
 * of the License, or (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program; if not, write to the Free Software
 * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
 */


import cn.edu.hfut.dmic.util.FileDownload;
import cn.edu.hfut.dmic.webcollector.model.CrawlDatum;
import cn.edu.hfut.dmic.webcollector.model.CrawlDatums;
import cn.edu.hfut.dmic.webcollector.model.Page;
import cn.edu.hfut.dmic.webcollector.plugin.berkeley.BreadthCrawler;
import cn.hutool.http.HttpUtil;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import java.io.*;
import java.util.*;

/**
 * 本教程和深度遍历没有任何关系
 * 一些爬取需求希望加入深度信息，即遍历树中网页的层
 * 利用2.20版本中的新特性MetaData可以轻松实现这个功能
 *
 * @author hu
 */
public class DemoDepthCrawler2 extends BreadthCrawler {

    private  String url="https://www.okfree.men/";

    public DemoDepthCrawler2(String crawlPath, boolean autoParse , HashSet<String> list ) {
        super(crawlPath, autoParse);
//        for (String s : list) {
//            String[] asmrs = s.split("asmr");
//            String href = asmrs[0];
//            addSeed(href);
//        }
        addSeed(list,false);
//        this.addSeed("https://www.okfree.men/file-4764.html");
        /*正则规则用于控制爬虫自动解析出的链接，用户手动添加的链接，例如添加的种子、或
          在visit方法中添加到next中的链接并不会参与正则过滤*/
        /*自动爬取类似"http://news.hfut.edu.cn/show-xxxxxxhtml"的链接*/
//        addRegex("https://www.okfree.men/file-*.html");
        /*不要爬取jpg|png|gif*/
        addRegex("-.*\\.(jpg|png|gif).*");
        setAutoParse(true);
        /*不要爬取包含"#"的链接*/
        addRegex("-.*#.*");

    }
    public DemoDepthCrawler2(String crawlPath, boolean autoParse ) {
        super(crawlPath, autoParse);
//        for (String s : list) {
//            String[] asmrs = s.split("asmr");
//            String href = asmrs[0];
//            addSeed(href);
//        }
        addSeed("https://www.okfree.men/file-2267.html");
//        this.addSeed("https://www.okfree.men/file-4764.html");
        /*正则规则用于控制爬虫自动解析出的链接，用户手动添加的链接，例如添加的种子、或
          在visit方法中添加到next中的链接并不会参与正则过滤*/
        /*自动爬取类似"http://news.hfut.edu.cn/show-xxxxxxhtml"的链接*/
//        addRegex("https://www.okfree.men/file-*.html");
        /*不要爬取jpg|png|gif*/
        addRegex("-.*\\.(jpg|png|gif).*");
        setThreads(10);
//        setAutoParse(true);
        /*不要爬取包含"#"的链接*/
        addRegex("-.*#.*");

    }

    @Override
    public void visit(Page page, CrawlDatums next) throws Exception {
        Document doc = page.doc();
        Elements select = doc.select("div.circle_box");
        Elements select1 = select.select("div.layout_box");
        Elements select3 = select1.select("div.l");
        Elements select4 = select3.select("div.file_item").eq(0);
        Elements script = select4.select("script").eq(2);
        String soundFile = script.attr("soundFile");
        Elements eq = script.eq(0).eq(0).eq(0);
        String text1 = eq.text();
        Elements eq1 = eq.eq(0);
        String urls = eq.toString().split("soundFile:")[1].split(",")[0].replace("\"", "");

        Elements texts = doc.select("div.layout_box").select("div.l").select("div.file_box");
        String text = texts.select("h3#file_tit").text();
        System.out.println(text);
        String filePath="F:\\wow.txt";
        File file = new File(filePath);
        file.getParentFile().mkdirs();
        file.createNewFile();
        OutputStreamWriter otp = new OutputStreamWriter(new FileOutputStream(filePath,true),"UTF-8");
        System.out.println(urls);
        otp.write(urls);
        otp.write("\r\n");
        otp.flush();
        System.out.println("111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111");
//        if (!f.exists()) {
//            FileDownload.downloadFile(url, "F:\\asmr\\"+text);
//            System.out.println(text+"下载完成");
//        }
        System.out.println("visiting:" + page.url() + "\tdepth=" + page.meta("depth"));
    }

    @Override
    protected void afterParse(Page page, CrawlDatums next) {
        //当前页面的depth为x，则从当前页面解析的后续任务的depth为x+1
        int depth = 1;
        //如果在添加种子时忘记添加depth信息，可以通过这种方式保证程序不出错
        try {
            depth = page.metaAsInt("depth");
           /* Document doc = page.doc();
            Elements select = doc.select("div.circle_box");
            Elements select1 = select.select("div.layout_box");
            Elements select3 = select1.select("div.l");
            Elements select2 = select3.select("div#report_box");
            String file_id = select2.select(" input[name='file_id']").attr("value");
            String file_key = select2.select("input[name='file_key']").attr("value");
            Elements texts = doc.select("div.layout_box").select("div.l").select("div.file_box");
            String text = texts.select("h3#file_tit").text();
            System.out.println("id"+file_id+"key"+file_key);
            System.out.println(text);
            String filePath="F:\\asmr\\"+ text;
            File f = new File(filePath);
            if (!f.exists()) {
                //调用下载，下载资源
                FileDownload.downloadFile("https://www.okfree.men/downfile.php?file_id="+file_id+"&file_key"+file_key, "F:\\asmr\\"+ text);
//                HttpUtil.downloadFile("https://www.okfree.men/downfile.php?file_id="+file_id+"&file_key"+file_key,"F:\\asmr\\"+ text);
            }*/

        } catch (Exception ex) {

        }
        depth++;
        next.meta("depth", depth);
    }
    public static void main(String[] args) throws Exception {
//        Map<String, Object> map = new HashMap<String, Object>();
//
//        map.put("action","login");
//        map.put("task","login");
//        map.put("ref","https://www.okfree.men/");
//        map.put("formhash","0af1aa15");
//        map.put("username","canger");
//        map.put("password","a4298675");
//        String post = (String) HttpUtil.post("https://www.okfree.men/account.php",map);
//        System.out.println("登录"+post);
        DemoDepthCrawler2 depth_crawler2 = new DemoDepthCrawler2("depth_craw", true);
        depth_crawler2.getConf().setTopN(5);
        depth_crawler2.start(1);



    }
}
