package alp.starcode.sanping.crawler.impl;

import alp.starcode.sanping.common.BizConstant;
import alp.starcode.sanping.framework.database.mariadb.mybatis.entity.Information;
import cn.hutool.http.HttpUtil;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang3.time.DateUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.processor.PageProcessor;
import us.codecraft.webmagic.scheduler.QueueScheduler;

import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;

/**
 * TODO 中国工信部资讯
 *
 * @author wujie
 * @date 2024/3/13 16:10
 */
public class CnGxbServiceImpl implements PageProcessor {

    // 消息主页地址
    static String mainUrl = "https://www.miit.gov.cn/api-gateway/jpaas-publish-server/front/page/build/unit?webId=8d828e408d90447786ddbe128d495e9e&pageId=f028c5e497004ea8bea78ce1fad64370&parseType=buildstatic&pageType=column&tagId=%E5%BD%93%E5%89%8D%E6%A0%8F%E7%9B%AE_list&tplSetId=209741b2109044b5b7695700b2bec37e&paramJson={%22pageNo%22:1,%22pageSize%22:%22100%22}";
    // 文章域名
    static String domain = "https://www.miit.gov.cn";
    // 资讯页面URL集合
    static List<String> pageUrlList = new ArrayList<>();
    // 请求计数
    static int count = 0;

    private Site site;

    public CnGxbServiceImpl() {
        // 配置爬虫的User-Agent和其他参数
        site = Site.me()
                .setUserAgent(BizConstant.SP_USER_AGENT)
                .setRetryTimes(BizConstant.SP_RETRY_TIMES)
                .setSleepTime(BizConstant.SP_SLEEP_TIME);
    }

    @Override
    public void process(Page page) {
        // 解析网页，提取需要的数据
        addTargetRequest(page);
        // 如果需要爬取其他页面，可以添加新的链接到Scheduler中
        count ++;
        if(count < pageUrlList.size()) {
            page.addTargetRequest(pageUrlList.get(count));
        }
    }

    public void addTargetRequest(Page page) {
        // 解析网页，提取需要的数据
        String title = page.getHtml().xpath("//h1/text()").get();
        String publishTime = page.getHtml().xpath("//span[@id='con_time']/text()").get().replace("发布时间：", "");
        String dataSource = page.getHtml().xpath("//div[@class='cinfo center']/span[2]/text()").get().replace("来源：", "");
        String content = page.getHtml().xpath("//div[@id='con_con']/html()").get();
        content = content.replaceAll("src=\"/gyhxxhb", "src=\""+domain+"/gyhxxhb");

        Information information = new Information();
        information.setId(UUID.randomUUID().toString());
        information.setContent(content);
        information.setCreateUserName(BizConstant.SP_CREATE_BY);
        information.setDataSource(dataSource);
        information.setInfoType("通告");
        information.setIndustryType("消费品");
        try {
            Date date = DateUtils.parseDate(publishTime, "yyyy-MM-dd HH:mm");
            information.setPublishTime(date.getTime());
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
        information.setRegion("全国");
        information.setTitle(title);
        BizConstant.SP_INFORMATION_LIST.add(information);
    }

    @Override
    public Site getSite() {
        return site;
    }

    /**
     * 运行入口
     */
    public static void run() {
        // 清空缓存
        BizConstant.SP_INFORMATION_LIST = new ArrayList<>();
        // 请求消息主页,拿到文章链接,并解析
        String result = HttpUtil.get(mainUrl);
        JSONObject resultJson = JSONObject.parseObject(result);
        String htmlStr = resultJson.getJSONObject("data").getString("html");

        // 使用Jsoup解析提取html元素
        Document doc = Jsoup.parse(htmlStr);
        Elements links = doc.select("a");
        // 封装pageUrl集合
        for(Element link : links) {
            String url = link.attr("href");
            if(url.startsWith("/gyhxxhb")) {
                url = domain + url;
            }
            pageUrlList.add(url);
        }

        if(pageUrlList.size() > 0) {
            // 创建爬虫
            Spider spider = Spider.create(new CnGxbServiceImpl())
                    .addUrl(pageUrlList.get(count))
                    .thread(5)
                    .setScheduler(new QueueScheduler());

            // 运行爬虫
            spider.run();
        }
    }

    public static void main(String[] args) {
        CnGxbServiceImpl.run();
    }

}
