package com.zh.system.crawler;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSON;
import com.zh.system.crawler.Pipeline.BiliPipeline;
import com.zh.system.monitor.SystemMonitor;
import com.zh.web.domain.media.entity.Article;
import com.zh.web.domain.media.vo.OriginalArtVo;
import com.zh.web.domain.system.dto.MessageDto;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.springframework.amqp.core.AmqpTemplate;
import org.springframework.stereotype.Component;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.processor.PageProcessor;
import us.codecraft.webmagic.selector.Html;
import us.codecraft.webmagic.selector.Selectable;

import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * @author nidie
 * @implNote 爬虫工具类
 */
@Component
@Data
@Slf4j
public class BiliArticleCrawler implements PageProcessor, Crawler {

    private Site site = Site.me()
            .setRetryTimes(3)
//            .setUserAgent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36 Edg/130.0.0.0")
            .setSleepTime(100);
    private Spider spider;
    private String startUrl;
    private Integer threadNum = 4;
    private LocalDateTime startTime = LocalDateTime.of(2024, 1, 1, 0, 0, 0);
    private final AmqpTemplate amqpTemplate;
    private static final AtomicInteger nextPage = new AtomicInteger(1);
    private static final AtomicInteger maxPage = new AtomicInteger(1);
    private MessageDto messageDto;

    @Override
    public void process(Page page) {
        try {
            Html html = page.getHtml();
            List<Selectable> list = page.getHtml().css(".article-item").nodes();
            List<Selectable> pageNums = page.getHtml().xpath("//button[@class='pagination-btn']/text()").nodes();
            if (maxPage.get() == 1 && CollUtil.isNotEmpty(pageNums)) {
                String maxPageStr = pageNums.get(pageNums.size() - 1).get().trim();
                maxPage.set(Integer.parseInt(maxPageStr));
            }
            //这一页的文章列表
            if (CollUtil.isNotEmpty(list)) {
                for (Selectable selectable : list) {
                    String url = selectable.links().toString();
                    page.addTargetRequest(url);
                }
                //下一页
                if (nextPage.get() < maxPage.get()) {
                    String url = startUrl + "&page=" + nextPage.incrementAndGet();
                    page.addTargetRequest(url);
                } else {
                    this.stop();
                }
            } else {
                String time = html.xpath("//span[@class='publish-text']/text()").get();
                LocalDateTime dateTime = LocalDateTime.parse(time, DateTimeFormatter.ofPattern("yyyy年MM月dd日 HH:mm"));
                if (startTime != null && dateTime.isBefore(startTime)) {
                    return;
                }
                String title = html.xpath("//h1[@class='title']/text()").get();
                String content = html.xpath("//div[@id='read-article-holder']").get();
                String author = html.xpath("//meta[@name='author']").get();
                String source = page.getUrl().get();
                author = Optional.ofNullable(author).map(item -> {
                    Document autorDoc = Jsoup.parse(item);
                    return autorDoc.select("meta[name=author]").attr("content");
                }).orElse("无");
                content = Optional.ofNullable(content)
                        .filter(item -> item.contains("img"))
                        .map(item -> {
                            Document contentDoc = Jsoup.parse(item);
                            Elements imgElements = contentDoc.select("img[data-src]");
                            for (Element img : imgElements) {
                                String dataSrc = img.attr("data-src");
                                String newSrc = "https:" + dataSrc;
                                img.attr("src", newSrc);
                                img.removeAttr("data-src");
                                System.out.println(img.outerHtml()); // 输出修改后的 img 标签
                            }
                            return contentDoc.html(); // 返回修改后的 HTML 内容
                        })
                        .orElse(content); // 如果没有 img 标签，返回原始内容
                //没有内容剔除掉.
                if (StrUtil.isEmpty(content)) {
                    page.setSkip(true);
                }
                OriginalArtVo originalVo = new OriginalArtVo();
                originalVo.setTitle(title);
                originalVo.setAuthor(author);
                originalVo.setCreateTime(dateTime);
                originalVo.setOriginalUrl(source);
                Article article = new Article();
                article.setTitle(title);
                article.setAuthor(author);
                article.setContent(content);
                article.setSource(source);
                article.setCreateTime(dateTime);
                article.setExtendContent(JSON.toJSONString(originalVo));
                page.putField("article", article);
                messageDto = MessageDto.builder()
                        .sendTime(LocalDateTime.now())
                        .type("info")
                        .content("爬取文章成功:" + article.getTitle())
                        .id(System.currentTimeMillis())
                        .build();
                SystemMonitor.sendMessageToAll(messageDto);
            }
        } catch (Exception e) {
            messageDto = MessageDto.builder()
                    .sendTime(LocalDateTime.now())
                    .type("error")
                    .content("爬取文章失败:" + e.getMessage())
                    .id(System.currentTimeMillis())
                    .build();
            SystemMonitor.sendMessageToAll(messageDto);
            log.error("爬取文章失败", e);
        }
    }

    @Override
    public Site getSite() {
        return site;
    }

    public void createSpider() {
        spider = Spider.create(this)
                .addUrl(startUrl)
                .addPipeline(new BiliPipeline(amqpTemplate))
                .thread(threadNum);
    }

    public String start() {
        try {
            if (spider == null) {
                createSpider();
            }
            if (Spider.Status.Running.equals(spider.getStatus())
            ) {
                log.warn("爬虫任务启动成功,请勿重复启动");
                messageDto = MessageDto.builder()
                        .sendTime(LocalDateTime.now())
                        .type("warn")
                        .content("爬虫任务启动成功,请勿重复启动")
                        .id(System.currentTimeMillis())
                        .build();
                SystemMonitor.sendMessageToAll(messageDto);
                return "爬虫任务启动成功,请勿重复启动";
            }
            spider.runAsync();
            messageDto = MessageDto.builder()
                    .sendTime(LocalDateTime.now())
                    .type("info")
                    .content("爬虫任务启动成功")
                    .id(System.currentTimeMillis())
                    .build();
            log.info("爬虫任务启动成功");
            return "爬虫任务启动成功";
        } catch (Exception e) {
            messageDto = MessageDto.builder()
                    .sendTime(LocalDateTime.now())
                    .type("error")
                    .content("爬虫任务启动失败")
                    .id(System.currentTimeMillis())
                    .build();
            log.error("爬虫任务启动失败", e);
            return "爬虫任务启动失败";
        }

    }

    public synchronized String stop() {
        try {
            if (spider == null) {
                log.warn("爬虫任务未启动");
                messageDto = MessageDto.builder()
                        .sendTime(LocalDateTime.now())
                        .type("warn")
                        .content("爬虫任务未启动")
                        .id(System.currentTimeMillis())
                        .build();
                SystemMonitor.sendMessageToAll(messageDto);
                log.warn("爬虫任务未启动");
                return "爬虫任务未启动";
            }
            if (!Spider.Status.Stopped.equals(spider.getStatus())) {
                spider.stop();
                this.reset();
                log.info("爬虫任务成功停止");
                messageDto = MessageDto.builder()
                        .sendTime(LocalDateTime.now())
                        .type("info")
                        .content("爬虫任务成功停止")
                        .id(System.currentTimeMillis())
                        .build();
                SystemMonitor.sendMessageToAll(messageDto);
                return "爬虫任务成功停止";
            } else {
                log.warn("爬虫任务成功停止,请勿重复操作");
                messageDto = MessageDto.builder()
                        .sendTime(LocalDateTime.now())
                        .type("warn")
                        .content("爬虫任务成功停止,请勿重复操作")
                        .id(System.currentTimeMillis())
                        .build();
                SystemMonitor.sendMessageToAll(messageDto);
                return "爬虫任务成功停止,请勿重复操作";
            }
        } catch (Exception e) {
            log.error("爬虫任务停止失败", e);
            messageDto = MessageDto.builder()
                    .sendTime(LocalDateTime.now())
                    .type("error")
                    .content("爬虫任务停止失败")
                    .id(System.currentTimeMillis())
                    .build();
            SystemMonitor.sendMessageToAll(messageDto);
            return "爬虫任务停止失败";
        }
    }

    @Override
    public synchronized void reset() {
        if (spider == null) {
            return;
        }
        int maxRetry = 3;
        String uuid = spider.getUUID();
        while (this.spider != null && maxRetry-- > 0) {
            if (Spider.Status.Running.equals(spider.getStatus())) {
                spider.stop();
            }
            nextPage.set(1);
            maxPage.set(1);
            this.setStartUrl(null);
            this.setThreadNum(4);
            this.setSpider(null);
            log.info("重置爬虫 {} 成功.", uuid);
        }
    }
}
