package com.me.spider;

import org.apache.commons.lang3.StringUtils;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.downloader.HttpClientDownloader;
import us.codecraft.webmagic.processor.PageProcessor;
import us.codecraft.webmagic.proxy.Proxy;
import us.codecraft.webmagic.proxy.SimpleProxyProvider;
import us.codecraft.webmagic.selector.Html;
import us.codecraft.webmagic.selector.Selectable;

import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.charset.StandardCharsets;

/**
 * 爬虫：http://www.cool18.com/bbs4/index.php（酷18）

 const aTags = document.querySelectorAll('#content-section > pre > font:nth-child(2) > b > font > a');
 let result = '';
 const startIndex = Math.max(aTags.length - 147, 0);
 // 从后向前遍历a标签
 for (let i = aTags.length - 1; i >= startIndex; i--) {
 const url = aTags[i].href;
 //const url = aTags[i].innerText;
 // 将链接添加到结果字符串，每个元素后增加换行符
 result += `"${url}",\n`;
 }

 console.log(result);
 */
public class WebMagicCool18 implements PageProcessor {

    public static void main(String[] args) {
        String[] urls = {
                "http://www.cool18.com/bbs4/index.php?app=forum&act=threadview&tid=13993105",
        };

        String outputFilePath = "/crawl_results.txt";
        BufferedWriter writer = null;
        try {
            writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputFilePath, true), StandardCharsets.UTF_8));
            HttpClientDownloader downloader = new HttpClientDownloader();
            downloader.setProxyProvider(SimpleProxyProvider.from(new Proxy("192.168.0.103", 8888)));
            for (int i = 0; i < urls.length; i++) {
                System.out.println("正在爬取第 " + (i + 1) + " 个链接: " + urls[i]);

                // 创建爬虫并处理当前链接
                Spider.create(new WebMagicCool18(writer))
                        .setDownloader(downloader)
                        .addUrl(urls[i])
                        .thread(1)
                        .run();
            }
        } catch (IOException e) {
            throw new RuntimeException(e);
        } finally {if (writer != null) {
            try {
                writer.close();
            } catch (Exception e) {
                System.err.println("关闭文件写入器时出错：" + e.getMessage());
            }
        }
        }
    }

    private final BufferedWriter writer;

    public WebMagicCool18(BufferedWriter writer) {
        this.writer = writer;
    }

    // 配置网站信息，设置超时时间等
    private Site site = Site.me()
            .setTimeOut(3000) // 超时时间10秒
            .setRetryTimes(1)  // 重试次数
            .setSleepTime(1000); // 爬取间隔

    @Override
    public void process(Page page) {
        // 获取页面HTML
        Html html = page.getHtml();
        StringBuilder sb = new StringBuilder();

        // 使用CSS选择器获取指定元素，相当于document.querySelector('#content-section > pre')
        // 获取元素的文本内容，相当于innerText
        String title = html.$("body > div.main-content > div.title-section > h1", "text").get();
        if (StringUtils.isBlank(title)) {
            System.err.println("错误：找不到 title");
            System.exit(1);
        }
        sb.append("\n\n\n\n").append(title);

        Selectable contentHtml = html.$("#content-section > pre");
        if (contentHtml == null) {
            System.err.println("错误：找不到 text");
            System.exit(1);
        }
        String content = contentHtml.get()
                .replaceAll("<br\\s*/?>", "\n")  // 处理<br>标签
                .replaceAll("</p>", "</p>\n")    // 处理段落结束
                .replaceAll("<[^>]+>", "")        // 移除所有剩余HTML标签
                .replaceAll("\\n+", "\n")         // 合并多个换行
                .trim(); ;
        sb.append("\n\n").append(content);
        System.out.println(title);
        try {
            writer.write(sb.toString());
            writer.flush();
        } catch (IOException e) {
            e.printStackTrace();
            System.exit(1);
        }
    }

    @Override
    public Site getSite() {
        return site;
    }
}
