package com.shishuo.cms.crawler;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.xml.sax.SAXException;

import javax.xml.parsers.ParserConfigurationException;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import java.util.Arrays;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * @author Li Zhao
 * @since 2016/5/27.
 */
public class Crawler {


    public static void findByClass(String docs,String clazz){
        Pattern pattern = Pattern.compile("<[^<]+class\\s*=\\s*[\'\"]\\s*(\\s*[\\w-]\\s+)*"+clazz+"(\\s+[[\\w-]]\\s*)*\\s*['\"][^>]*>[^<]*<[^>]+>");
        Matcher matcher = pattern.matcher(docs);
        while (matcher.find()){
            System.out.println(matcher.group());
        }
    }

    public static void main(String[] args) throws IOException, ParserConfigurationException, SAXException {
        String[] hotUrls = getHotUrls();
        println(getRealUrl(hotUrls[0]));
//        for (int i = 0; i < hotUrls.length; i++) {
//            println(getRealUrl(hotUrls[i]));
//        }
    }

    public static void println(String url) throws IOException {
        System.out.println(Jsoup.connect(url).get().html());
    }

    public static String[] getHotUrls() throws IOException {
        Document doc = Jsoup.connect("http://top.baidu.com/buzz?b=42&fr=topindex").get();
        Elements select = doc.select(".list-title");
        String[] ss = new String[select.size()];
        for (int i = 0; i < select.size(); i++) {
            ss[i]=select.get(i).attr("href");
        }
        return ss;
    }

    static String getRealUrl(String baiduUrl) throws IOException {
        Document doc = Jsoup.connect(baiduUrl).get();
        return doc.select(".t").get(0).select("a").attr("href");
    }

}
