package com.zhb.blog.util;

import com.zhb.blog.model.Tag;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import java.io.*;
import java.util.*;

public class JsoupReptileUtil {

    public static Map<String,String> searchBlogs(List<Tag> tags){
        //怕代理ip访问太多次页面导致失效，先只爬取第一个标签的内容
        Tag tag = tags.get(0);
        //爬取前，先查询本地文件有没有相关标签的urls
        try (BufferedReader reader = new BufferedReader(new FileReader("urls/"+tag.getName()+".txt"))){
            String entry;
            Map<String,String> urls = new HashMap<>();
            while ((entry = reader.readLine()) != null){
                String[] split = entry.split("--");
                if(split.length != 2){
                    continue;
                }
                urls.put(split[0],split[1]);
            }
            return urls;
        } catch (FileNotFoundException e) {
            //如果找不到相关文件，那就去爬取
        } catch (IOException e) {
            e.printStackTrace();
        }
        //获取连接
        Connection conn = getJsoupConnection("https://www.oschina.net/search?scope=all&q=", "60.169.248.114", 36323,tag.getName());
        Map<String,String> urls = new HashMap<>();
        try {
            Document doc = conn.method(Connection.Method.GET).get();
            //操作dom节点
            Element body = doc.body();
            Elements searchListWrap = body.getElementsByClass("search-list-wrap");
            Element searchList = searchListWrap.first();
            Elements blogsList = searchList.getElementsByClass("ui basic segment article-list");
            Element container = blogsList.first();
            Elements blogs = container.getElementsByClass("item");
            //爬取多篇博客
            for(Element blog : blogs){
                Elements content = blog.getElementsByClass("content");
                if(null == content){
                    continue;
                }
                Element header = content.first();
                if(header == null){
                    continue;
                }
                Elements aTag = header.getElementsByTag("a");
                if(null == aTag){
                    continue;
                }
                String href = aTag.attr("href");
                String title = aTag.attr("title");
                urls.put(title,href);
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
        //将urls内容读取到一个文件中
        try {
            writeToFile(tag.getName(),urls);
        } catch (IOException e) {
            e.printStackTrace();
        }
        return urls;
    }

    private static void writeToFile(String fileName,Map<String,String> urls) throws IOException {
        //将urls内容读取到一个文件中
        BufferedWriter writer = new BufferedWriter(new FileWriter("urls/"+fileName+".txt"));
        Set<String> keys = urls.keySet();
        for(String key:keys){
            String value = urls.get(key);
            writer.write(key+"--"+value);
            writer.newLine();
        }
        writer.flush();
        writer.close();
    }

    public static void main(String[] args) throws IOException {
//        Connection conn = getJsoupConnection("https://www.oschina.net/search?scope=all&q=spring", "180.107.20.144", 24922);
//        System.out.println(conn.method(Connection.Method.GET).execute().body());
//        Connection conn = getJsoupConnection("https://www.oschina.net/search?scope=all&q=", "180.107.20.144", 24922,"多线程");
//        Document doc = conn.method(Connection.Method.GET).get();
//        System.out.println(doc);
//        Map<String,String> urls = searchBlogs(new ArrayList<Tag>());
//        Set<String> keys = urls.keySet();
//        for(String key : keys){
//            String title = urls.get(key);
//            System.out.println(title+" "+key);
//        }
//        BufferedReader reader = new BufferedReader(new FileReader("urls/"+123+".txt"));
        List<Tag> tags = new ArrayList<>();
        Tag tag = new Tag();
        tag.setName("spring");
        tags.add(tag);
        Map<String, String> urls = searchBlogs(tags);
        for(String key: urls.keySet()){
            System.out.println(key+" "+urls.get(key));
        }
    }

    public static Connection getJsoupConnection(String url,String ip,int port,String keyword){
        Map<String,String> header = new HashMap<>();

        header.put("Accept","text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8");
        header.put("Accept-Language","zh-CN,zh;q=0.8");
        header.put("Cache-Control","max-age=0");
        header.put("Connection","keep-alive");
        header.put("Host","www.xicidaili.com");
        header.put("If-None-Match","W/\"f9ed210d1bd9f787ddddffceef37c9bc\"");
        header.put("User-Agent","Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0");
        header.put("Upgrade-Insecure-Requests","1");

        String USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko";
        Connection connection = Jsoup.connect(url+keyword).proxy(ip, port)
                .userAgent(USER_AGENT)
                .headers(header)
                .referrer("https://www.baidu.com/");
        return connection;
    }

}
