package cn.edu.hfut.dmic;/*
 * Copyright (C) 2015 hu
 *
 * This program is free software; you can redistribute it and/or
 * modify it under the terms of the GNU General Public License
 * as published by the Free Software Foundation; either version 2
 * of the License, or (at your option) any later version.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program; if not, write to the Free Software
 * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
 */


import cn.edu.hfut.dmic.util.FileDownload;
import cn.edu.hfut.dmic.webcollector.model.CrawlDatum;
import cn.edu.hfut.dmic.webcollector.model.CrawlDatums;
import cn.edu.hfut.dmic.webcollector.model.Page;
import cn.edu.hfut.dmic.webcollector.plugin.berkeley.BreadthCrawler;
import cn.edu.hfut.dmic.webcollector.util.ExceptionUtils;
import cn.hutool.http.HttpUtil;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import java.io.File;
import java.net.SocketTimeoutException;
import java.util.*;

/**
 * 本教程和深度遍历没有任何关系
 * 一些爬取需求希望加入深度信息，即遍历树中网页的层
 * 利用2.20版本中的新特性MetaData可以轻松实现这个功能
 *
 * @author hu
 */
public class DemoDepthCrawler extends BreadthCrawler {

    private  String url="https://www.okfree.men/";

    private List<String> list = new LinkedList<String>();
    private static HashSet<String> set = new HashSet<String>();

    public DemoDepthCrawler(String crawlPath, boolean autoParse) throws InterruptedException {
        super(crawlPath, autoParse);
        Thread thread = new Thread();

        for (int i = 1; i < 5; i++) {
            addSeed(new CrawlDatum("https://www.okfree.men/search.php?action=search&word=%E6%9D%8E%E8%8E%8E&scope=all&&pg=" + i )
                    .meta("depth", 1));
            Thread.sleep(100);
        }
        /*正则规则用于控制爬虫自动解析出的链接，用户手动添加的链接，例如添加的种子、或
          在visit方法中添加到next中的链接并不会参与正则过滤*/
        /*自动爬取类似"http://news.hfut.edu.cn/show-xxxxxxhtml"的链接*/
//        addRegex("https://www.okfree.men/file-*.html");
        /*不要爬取jpg|png|gif*/
        addRegex("-.*\\.(jpg|png|gif).*");
//        setAutoParse(true);
        /*不要爬取包含"#"的链接*/
        addRegex("-.*#.*");
    }

    @Override
    public void visit(Page page, CrawlDatums next) throws Exception {
        Document doc = page.doc();
        Elements elements = doc.select("div.layout_box").select("table.td_line").select("tr");
        for (Element element : elements) {
            Elements select = element.select("tr.color1").select("td").select("a[href]").eq(0);
            Elements selects = element.select("tr.color4").select("td").select("a[href]").eq(0);
            if (!select.isEmpty()){
                String hreg = select.attr("href");
                String text = select.text();
                set.add(url+hreg+"asmr"+text);
            }
            if (!selects.isEmpty()){
                String hreg2 = selects.attr("href");
                String text = selects.text();
                list.add(url+hreg2+"asmr"+text);
                set.add(url+hreg2+"asmr"+text);
            }
        }
        System.out.println("visiting:" + page.url() + "\tdepth=" + page.meta("depth"));

    }

    @Override
    protected void afterParse(Page page, CrawlDatums next) {
        //当前页面的depth为x，则从当前页面解析的后续任务的depth为x+1
        int depth = 1;
        //如果在添加种子时忘记添加depth信息，可以通过这种方式保证程序不出错
        try {
            depth = page.metaAsInt("depth");
        } catch (Exception ex) {

        }
        depth++;
        next.meta("depth", depth);
    }


    public static void main(String[] args) throws Exception {
        Map<String, Object> map = new HashMap<String, Object>();

        map.put("action","login");
        map.put("task","login");
        map.put("ref","https://www.okfree.men/");
        map.put("formhash","0af1aa15");
        map.put("username","canger");
        map.put("password","a4298675");
        String post = (String) HttpUtil.post("https://www.okfree.men/account.php",map);
        System.out.println("登录"+post);
        DemoDepthCrawler crawler = new DemoDepthCrawler("depth_cral", true);
        crawler.start(1);
//        DemoDepthCrawler2 depth_crawler2 = new DemoDepthCrawler2("depth_crawler2", true, set);
//        depth_crawler2.getConf().setTopN(5);
//        depth_crawler2.start(1);
    }

}
