package org.example.demo;

import com.alibaba.fastjson.JSONObject;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.util.EntityUtils;
import org.example.utils.CookieUtils;
import org.example.utils.CrackJavaScript;
import org.example.utils.HttpUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * @Author lyz
 * @Date 2025/3/26 11:19
 */
public class CrawlerDemo {
    //请求的url 请求连接中20是 最大数据
    public static String requestUrl = "https://www.cnvd.org.cn/flaw/typeResult?typeId=27&max=20&offset=";
    public static String prefixRequestUrl = "https://www.cnvd.org.cn";

    //总页数
    public static int totalPage = 1; //169

    //偏移量  默认为20  需要调整的话需要,根据 max进行调整  max=20 offset=20, max=100 offset=100;
    public static int offset = 20;

    public static void main(String[] args) throws Exception {
        process();
    }

    public static void process() throws Exception {
        //请求头
        Map<String, String> headers = getHeader();
        AtomicInteger dataNumber = new AtomicInteger();
        for (int i = 0; i <= totalPage; i++) {
            offset = i * offset;
            String url = requestUrl + offset;
            //获取分页列表
            String pageHtml = sendRequest(url, headers);
            //获取每一页的数据
            Document document = Jsoup.parse(pageHtml);
            Elements tbodyElements = document.getElementsByTag("tbody");
            if (tbodyElements.size() > 0) {
                Element tbody = tbodyElements.get(0);
                Elements elements = tbody.getElementsByTag("a");
                System.out.println("获取A标签");
                List<String> detailsHref = new ArrayList<>();
                elements.stream().forEach(element -> {
                    detailsHref.add(element.attr("href"));
                });
                //请求详情
                for (String detailHref : detailsHref) {
                    //请求详情
                    String html = sendRequest(prefixRequestUrl + detailHref, headers);
                    dataNumber.incrementAndGet();
                    //解析具体数据
                    document = Jsoup.parse(html);
                    Elements tables = document.getElementsByClass("gg_detail");
                    if (tables.size() > 0) {
                        Element table = tables.get(0);
                        Elements trs = table.getElementsByTag("tr");
                        for (Element tr : trs) {
                            Elements tds = tr.getElementsByTag("td");
                            StringBuilder stringBuilder = new StringBuilder();
                            if (tds.size() == 2) {
                                stringBuilder.append(tds.get(0).text()).append(": ").append(tds.get(1).text());
                                System.out.println(stringBuilder.toString());
                            }
                        }
                    }
                    //睡上3秒 频率降低
                    Thread.sleep(3000);
                }
            }
        }
        System.out.println("共爬取" + dataNumber.get() + "条数据");
    }

    /**
     * @param url     请求连接
     * @param headers 请求头
     * @return
     * @throws Exception 异常信息
     */
    private static String sendRequest(String url, Map<String, String> headers) throws Exception {
        HttpResponse response = null;
        String responseJson = "";
        String jsluid = "", jslClearance = "", cookie = "";
        while (true) {
            response = HttpUtils.doGet(url, headers, null);
            responseJson = EntityUtils.toString(response.getEntity());
            if (response.getStatusLine().getStatusCode() == 200) {
                return responseJson;
            } else if (response.getStatusLine().getStatusCode() == 521) {
                Header[] headers1 = response.getHeaders("Set-Cookie");
                if (headers1 != null && headers1.length > 0) {
                    cookie = headers1[0].getValue();
                    jsluid = "__jsluid_s=" + cookie.split(";")[0].split("=")[1];
                    jslClearance = CrackJavaScript.getJslClearance(responseJson);
                    cookie = jsluid + ";" + jslClearance;
                    headers.put("Cookie", cookie);
                } else {
                    if (responseJson.contains("document.cookie")) {
                        jslClearance = CrackJavaScript.getJslClearance(responseJson);
                        cookie = jsluid + ";" + jslClearance;
                        headers.put("Cookie", cookie);
                    } else {
                        responseJson = responseJson.substring(responseJson.lastIndexOf("go(") + 3, responseJson.lastIndexOf(")"));
                        JSONObject data = JSONObject.parseObject(responseJson);
                        //<script>var _0x3c04=['wrMhGG0=','KMOnwoMR','Q3ROw7E=','w7zDjsKABQ==','wrgXPR4=','wq0SDsKT','cw/DgcKz','wpfDoEvCqA==','woQzwq46','w6PCqMKJw40=',
                        // 返回的数据需要进行解密
                        jslClearance = CookieUtils.go(data);
                        headers.put("Cookie", jsluid + ";" + jslClearance);
                    }
                }
            }
        }
    }

    /**
     * 设置请求头
     *
     * @return
     */
    private static Map<String, String> getHeader() {
        Map<String, String> headers = new HashMap<>();
        headers.put("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36");
        headers.put("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8");
        headers.put("Upgrade-Insecure-Requests", "1");
        headers.put("Cookie", "");
        return headers;
    }
}

