package com.chance.cc.crawler.development.scripts.autohome.carSeriesArticle;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.*;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;

public class AutoHomeCarSeriesArticleCrawlerScript extends CrawlerCommonScript {
    private Logger log = LoggerFactory.getLogger(AutoHomeCarSeriesArticleCrawlerScript.class);//日志
    private static final String DOMAIN = "autohome";
    private static final String SITE = "carSeriesArticle";
    private static final String REQUEST_AGAIN_TAG = DOMAIN + "_request_again";

    //    https://car.autohome.com.cn/javascript/NewSpecCompare.js
    private static final String HomeRegulars = "https://car.autohome.com.cn/javascript/NewSpecCompare.js";//动态链接
    //    https://www.autohome.com.cn/135
    private static final String CarSeriesRegulars = "https://www.autohome.com.cn/\\d*";//车系链接
    //    https://www.autohome.com.cn/135/0/0-0-1-0/
    private static final String ArticleRegulars = "https://www.autohome.com.cn/\\d*/0/0-0-\\d*-0/";//文章链接
    //    https://www.autohome.com.cn/tech/202108/1189443-all.html
    //    https://www.autohome.com.cn/fastmessage/detail/1194256.html#pvareaid=6849223
    private static final String ContentRegulars = "https://www.autohome.com.cn/\\S*/\\d*/\\d*-all.html";//文章详情链接
    private static final String ContentTwoRegulars = "https://www.autohome.com.cn/fastmessage/detail/\\d*.html#pvareaid=\\d*";//文章详情链接
    //    https://www.autohome.com.cn/ExpertBlog/editor_7517.html#pvareaid=3311687
    private static final String AuthorRegulars = "https://www.autohome.com.cn/ExpertBlog/editor_\\d*.html";//作者页面链接
    //    https://www.autohome.com.cn/ashx/editorblog/ajaxfollow.ashx?typeid=3&userid=15630263
    private static final String FollowRegulars = "https://www.autohome.com.cn/ashx/editorblog/ajaxfollow.ashx\\?typeid=3&userid=\\d*";//粉丝数链接
    //    https://reply.autohome.com.cn/api/comments/show.json?count=50&page=1&id=1185889&appid=1
    private static final String CommentRegulars = "https://reply.autohome.com.cn/api/comments/show.json\\?count=50&page=\\d*&id=\\d*&appid=1";//评论链接
    //    https://i.autohome.com.cn/64774731
    private static final String UserRegulars = "https://i.autohome.com.cn/\\d*";//评论用户链接

    @Override
    public String domain() {
        return DOMAIN;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(HomeRegulars);
        addUrlRegular(CarSeriesRegulars);
        addUrlRegular(ArticleRegulars);
        addUrlRegular(ContentRegulars);
        addUrlRegular(ContentTwoRegulars);
        addUrlRegular(AuthorRegulars);
        addUrlRegular(FollowRegulars);
        addUrlRegular(CommentRegulars);
        addUrlRegular(UserRegulars);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        String site = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        return SITE.equals(site);
    }

    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> prepareLinks = new ArrayList<>();
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            String url = supportSourceRecord.getHttpRequest().getUrl();
            if (url.contains("keys")) {
                this.initKeyWord(crawlerRequestRecord, supportSourceRecord);
            }
        }
        return prepareLinks;
    }

    //取出代表不同的车系key
    private void initKeyWord(CrawlerRequestRecord crawlerRequestRecord, CrawlerRequestRecord supportSourceRecord) {
        HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
        Json json = internalDownloadPage.getJson();
        String msg = json.jsonPath($_type + ".msg").get();
        if (!"success".equals(msg)) {
            log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
            return;
        }
        List<String> keys = new ArrayList<>();
        List<String> all = json.jsonPath($_type + ".content").all();
        for (String data : all) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            String keyword = jsonObject.getString("keyword");
            keys.add(keyword);
        }
        if (keys.size()>0){
            crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("keys", keys);
        }
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList();
        //        判断页面是否加载成功
        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        if (page.getStatusCode() != 200 || (!page.isDownloadSuccess())) {
            log.error("download page url == {null} error status is {}", requestUrl, page.getStatusCode());
            if (page.getStatusCode() != 404) {//如果没有成功的下载  进行重新下载
                this.requestAgainCrawlerRecord(parsedLinks, crawlerRecord);
                crawlerRecord.setNeedWashPage(false);
                return parsedLinks;
            }
        }
        if (crawlerRecord.getHttpRequest().getUrl().matches(HomeRegulars)) {
            homeRequest(crawlerRecord, page, parsedLinks);//解析动态车系
        } else if (crawlerRecord.getHttpRequest().getUrl().matches(CarSeriesRegulars)) {
            carSeriesRequest(crawlerRecord, page, parsedLinks);//解析车系
        } else if (crawlerRecord.getHttpRequest().getUrl().matches(ArticleRegulars)) {
            articleRequest(crawlerRecord, page, parsedLinks);//解析文章列表
        } else if (crawlerRecord.getHttpRequest().getUrl().matches(AuthorRegulars)) {
            authorRequest(crawlerRecord, page, parsedLinks);//解析作者页面
        } else if (crawlerRecord.getHttpRequest().getUrl().matches(FollowRegulars)) {
            followsRequest(crawlerRecord, page, parsedLinks);//解析粉丝数
        } else if (crawlerRecord.getHttpRequest().getUrl().matches(ContentRegulars) || crawlerRecord.getHttpRequest().getUrl().matches(ContentTwoRegulars)) {
            contentRequest(crawlerRecord, page, parsedLinks);//解析文章内容
        }
        return parsedLinks;
    }

////    解析动态车系
//    private void homeRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
//        Map<String, String> carMap = new HashMap<>();
//        String rawText = page.getRawText();//获取解析url后的字符串
//        String json = rawText.substring(21, rawText.length() - 3);//获取json
//        List<String> number = new Json(json).jsonPath($_type + "..I").all();
//        List<String> carName = new Json(json).jsonPath($_type + "..N").all();
//        for (int i = 0; i < number.size(); i++) {
//            carMap.put(carName.get(i), number.get(i));
//        }
//        KVTag keys = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("keys");
//        List<String> name = (List<String>) keys.getVal();
//        ArrayList<String> list = new ArrayList<>();
//        for (int i = 0; i < name.size(); i++) {//
//            boolean discontinued = name.get(i).contains("停售");//直接进入停售页面
//            String nameNumber = carMap.get(name.get(i));//品牌转换为id数字
//            if (nameNumber == null) {
//                list.add(name.get(i));
//                log.error("没有这个车系（进口，停售等加上字段 例：开瑞K50EV(停售)）" + name.get(i));
//            } else {
//                if (discontinued) {
//                    String discontinuedUrl = "https://www.autohome.com.cn/" + nameNumber + "/sale.html";//得到车系页面
//                    CrawlerRequestRecord discontinuedRecord = CrawlerRequestRecord.builder()//解析停售
//                            .itemPageRequest(crawlerRecord)
//                            .recordKey(discontinuedUrl)
//                            .httpUrl(discontinuedUrl)
//                            .releaseTime(System.currentTimeMillis())
//                            .resultLabelTag(article)
//                            .copyBizTags()
//                            .build();
//                    parsedLinks.add(discontinuedRecord);
//                } else {
//                    String url = "https://www.autohome.com.cn/" + nameNumber;//得到车系页面
//                    CrawlerRequestRecord record = CrawlerRequestRecord.builder()
//                            .itemPageRequest(crawlerRecord)
//                            .httpUrl(url)
//                            .releaseTime(Long.valueOf(System.currentTimeMillis()))
//                            .copyBizTags()
//                            .build();
//                    parsedLinks.add(record);
//                }
//            }
//        }
//    }

    //    解析动态车系
    private void homeRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
        if (crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagListVal("keys")!=null){
            KVTag keys = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("keys");
            List<String> name = (List<String>) keys.getVal();
            if (name.size()>0){
                for (int i = 0; i < name.size(); i++) {//name.size()
                    String url = "https://www.autohome.com.cn/" + name.get(i);//得到车系页面
                    CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                            .itemPageRequest(crawlerRecord)
                            .httpUrl(url)
                            .releaseTime(Long.valueOf(System.currentTimeMillis()))
                            .copyBizTags()
                            .copyResultTags()
                            .copyRequestTags()
                            .build();
                    parsedLinks.add(record);
                }
            }
        }
    }

    //解析车系
    private void carSeriesRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
        String brandName = page.getHtml().xpath("//div[@class=\"container athm-crumb\"]/a[2]/text()").get();//品牌名
        String brandUrl = "https:" + page.getHtml().xpath("//div[@class=\"container athm-crumb\"]/a[2]/@href").get();//品牌url
        String brandId = Pattern.compile("[^0-9]").matcher(brandUrl).replaceAll("").trim();//品牌id
        String carSeriesUrl = page.getRequest().getUrl();//车系url
        String carSeriesName = page.getHtml().xpath("//h1/a").get();//车系名
        String carSeriesId = Pattern.compile("[^0-9]").matcher(carSeriesUrl).replaceAll("").trim();//车系id
        HashMap<String, String> seriesMap = new HashMap<>();
        seriesMap.put(AutoVMCommonField.Field_Series_name, carSeriesName);//车系名
        seriesMap.put(AutoVMCommonField.Field_Series_url, carSeriesUrl);//车系url
        seriesMap.put(AutoVMCommonField.Field_Series_id, carSeriesId);//车系url
        crawlerRecord.tagsCreator().bizTags().addCustomKV(AutoVMCommonField.Tag_Field_Series, seriesMap);//车系
        HashMap<String, String> brandMap = new HashMap<>();
        brandMap.put(AutoVMCommonField.Field_Brand_name, brandName);//品牌名
        brandMap.put(AutoVMCommonField.Field_Brand_url, brandUrl);//品牌url
        brandMap.put(AutoVMCommonField.Field_Brand_id, brandId);//品牌url
        crawlerRecord.tagsCreator().bizTags().addCustomKV(AutoVMCommonField.Tag_Field_Brand, brandMap);//品牌
        //https://www.autohome.com.cn/135/0/0-0-1-0/#pvareaid=3454442
        String articleId = Pattern.compile("[^0-9]").matcher(carSeriesUrl).replaceAll("").trim();//url的关键字
        String articleUrl = "https://www.autohome.com.cn/" + articleId + "/0/0-0-1-0/";//文章url
        CrawlerRequestRecord articleRecord = CrawlerRequestRecord.builder()//解析文章页
                .itemPageRequest(crawlerRecord)
                .recordKey(articleUrl)
                .httpUrl(articleUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        articleRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Tag_Site_Info, articleUrl);
        parsedLinks.add(articleRecord);
    }

    //解析文章列表
    private void articleRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
//        下一页
        String nextStr = page.getHtml().xpath("//a[./text()=\"下一页\"]/@href").get();
        if (nextStr != null && nextStr != "") {
            String nextPageUrl = "https:" + nextStr;//翻页
            CrawlerRequestRecord turnPageRequest = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .recordKey(nextPageUrl)
                    .httpUrl(nextPageUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            parsedLinks.add(turnPageRequest);
        }

//        明细页解析
        List<Selectable> nodes = page.getHtml().xpath("//div[@class=\"cont-info\"]/ul/li").nodes();//所有文章
        for (Selectable node : nodes) {
            String comments = node.xpath(".//p[@class=\"name-tx\"]/span[4]/text()").get();//评论数
            String views = node.xpath(".//p[@class=\"name-tx\"]/span[3]/text()").get();//浏览量
            String authorName = node.xpath(".//p[@class=\"name-tx\"]/span[1]//text()").get();//作者
            if (authorName == null) {
                authorName = node.xpath(".//p[@class=\"name-tx\"]/span[1]/text()").get();//作者
            }
            String articleUrlStr = node.xpath("./div/a/@href").get();//文章url
            String articleUrl = null;
            if (articleUrlStr != null || articleUrlStr != "") {
                articleUrl = "https://www.autohome.com.cn" + articleUrlStr;
            } else {
                //https://www.autohome.com.cn/fastmessage/detail/1194256.html#pvareaid=6849223
                String articleStr = node.xpath(".//p[.text()=\"阅读全文\"]/@href").get();
                if (articleStr != null) {
                    articleUrl = "https://www.autohome.com.cn" + articleStr;//文章url
                }
            }

            if (articleUrl != null) {
                String[] split = articleUrlStr.split("/");
                String[] articleId = Pattern.compile("[^0-9]").matcher(articleUrl).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ");//url的关键字
                if (!articleUrl.matches(ContentTwoRegulars)) {
                    articleUrl = "https://www.autohome.com.cn/" + split[1] + "/" + articleId[0] + "/" + articleId[1] + "-all.html";//全文url
                }
                String pubTime = node.xpath(".//p[@class=\"name-tx\"]/span[2]/text()").get();//时间
                if (StringUtils.isBlank(pubTime)) {
                    continue;
                }
                String authorUrl = node.xpath(".//p[@class=\"name-tx\"]/span[1]/a/@href").get();//作者url
                if (authorUrl == null) {//作者没有url
                    try {
                        CrawlerRequestRecord articleRecord = CrawlerRequestRecord.builder()//解析文章
                                .itemPageRequest(crawlerRecord)
                                .recordKey(articleUrl)
                                .httpUrl(articleUrl)
                                .releaseTime(DateUtils.parseDate(pubTime, "yyyy-MM-dd").getTime())
                                .copyBizTags()
//                            .resultLabelTag(interaction)
//                            .resultLabelTag(article)
                                .copyResultTags()
                                .needWashed(true)
                                .build();
                        articleRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_I_Comments, comments);
                        articleRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_I_Views, views);
                        articleRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_Urls, articleUrl);
                        articleRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_Author, authorName);
                        articleRecord.tagsCreator().bizTags().addCustomKV("pubTime", pubTime);
                        parsedLinks.add(articleRecord);
                    } catch (Exception e) {
                        log.error(e.getMessage(), e);
                    }
                } else {//有作者url
                    String authorStr = node.xpath(".//p[@class=\"name-tx\"]/span[1]/a/@href").get();
                    if (authorStr != null && authorStr != "") {
                        authorUrl = "https:" + authorStr;//作者url
                        try {
                            CrawlerRequestRecord authorRecord = CrawlerRequestRecord.builder()
                                    .itemPageRequest(crawlerRecord)
                                    .recordKey(authorUrl)
                                    .httpUrl(authorUrl)
                                    .releaseTime(DateUtils.parseDate(pubTime, "yyyy-MM-dd").getTime())
//                            .resultLabelTag(interaction)
                                    .copyResultTags()
                                    .copyBizTags()
                                    .needWashed(true)
                                    .build();
                            authorRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_I_Comments, comments);
                            authorRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_I_Views, views);
                            authorRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_Urls, articleUrl);
                            authorRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_Author, authorName);
                            authorRecord.tagsCreator().bizTags().addCustomKV("pubTime", pubTime);
                            parsedLinks.add(authorRecord);

                        } catch (Exception e) {
                            log.error(e.getMessage(), e);
                        }
                    }
                }
            }
        }
    }

    //解析作者页面
    private void authorRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
        try {
            String id = page.getHtml().xpath("//a[@class=\"btn eblog-btn-follow follow-btn__default\"]/@data-user-follow").get();//粉丝接口id
            if (id != null && id != "") {
                //        https://www.autohome.com.cn/ashx/editorblog/ajaxfollow.ashx?typeid=3&userid=15630263
                String followsUrl = "https://www.autohome.com.cn/ashx/editorblog/ajaxfollow.ashx?typeid=3&userid=" + id;//粉丝url
                CrawlerRequestRecord articleRecord = CrawlerRequestRecord.builder()//解析粉丝url
                        .itemPageRequest(crawlerRecord)
                        .recordKey(followsUrl)
                        .httpUrl(followsUrl)
                        .releaseTime(System.currentTimeMillis())
                        .httpHead("referer", "https://www.autohome.com.cn/ExpertBlog/editor_5270.html")
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                parsedLinks.add(articleRecord);
            }
        } catch (Exception e) {
            log.error("页面可能不存在" + e.getMessage(), e);
        }
    }


    //获取粉丝数
    private void followsRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
        String follows = "0";
        Selectable selectable = page.getJson().jsonPath($_type + "..result");
        if (selectable != null || "".equals(selectable)) {
            follows = page.getJson().jsonPath($_type + "..result").get();
        }
        String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//文章url
        if (articleUrl != null) {
            try {
                CrawlerRequestRecord articleRecord = CrawlerRequestRecord.builder()//解析文章
                        .itemPageRequest(crawlerRecord)
                        .recordKey(articleUrl)
                        .httpUrl(articleUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
//                    .resultLabelTag(article)
                        .copyResultTags()
                        .needWashed(true)
                        .build();
                articleRecord.tagsCreator().bizTags().addCustomKV(AutoVMCommonField.Field_Author_Follows, follows);
                parsedLinks.add(articleRecord);
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }
        }
    }

    //解析文章内容
    private List<CrawlerRequestRecord> contentRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
        String articleUrl = page.getRequest().getUrl();//文章url
        String urlComment = "http:" + page.getHtml().xpath("//a[@id=\"reply-all-btn3\"]/@href").get();//评论验证url
        String articleId = null;
        if (articleUrl.matches(ContentRegulars)) {
            articleId = Pattern.compile("[^0-9]").matcher(articleUrl).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ")[1];//文章url的关键字
        }
        if (articleUrl.matches(ContentTwoRegulars)) {
            articleId = Pattern.compile("[^0-9]").matcher(articleUrl).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ")[0];
        }
        int comments = Integer.parseInt(crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_I_Comments));//评论总数
        int commentsPage = (comments + 50 - 1) / 50; //总共页数
        String commentTags = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("commentTags");
        if ("comment_Tags".equals(commentTags)) {
            if (commentsPage != 0) {
                KVTag filterInfoTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
                CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);
                for (int i = 1; i <= commentsPage; i++) {
                    //      https://reply.autohome.com.cn/api/comments/show.json?count=50&page=1&id=1188992&appid=1
                    if (articleId != null) {
                        String commentUrl = "https://reply.autohome.com.cn/api/comments/show.json?count=50&page=" + i + "&id=" + articleId + "&appid=1";//评论
                        try {
                            CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()//解析评论
                                    .itemPageRequest(crawlerRecord)
                                    .recordKey(commentUrl)
                                    .httpUrl(commentUrl)
                                    .notFilterRecord()
//                            .resultLabelTag(comment)
//                            .resultLabelTag(interaction)
                                    .releaseTime(System.currentTimeMillis())
                                    .copyBizTags()
                                    .copyResultTags()
                                    .needWashed(true)
                                    .build();
                            commentRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_Urls, articleUrl);
                            commentRecord.tagsCreator().bizTags().addCustomKV("urlComment", urlComment);
                            commentRecord.setFilter(filterInfoRecord.getFilter());
                            commentRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                            parsedLinks.add(commentRecord);
                        } catch (Exception e) {
                            log.error(e.getMessage(), e);
                        }
                    }
                }
            }
        }
        return parsedLinks;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        if (crawlerRecord.getHttpRequest().getUrl().matches(ContentRegulars) || crawlerRecord.getHttpRequest().getUrl().matches(AuthorRegulars) || crawlerRecord.getHttpRequest().getUrl().matches(ContentTwoRegulars)) {//文章互动量
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(interaction)) {
                crawlerDataList.addAll(articleInteractionWash(crawlerRecord, page));
            }
        }
        if (crawlerRecord.getHttpRequest().getUrl().matches(ContentRegulars) || crawlerRecord.getHttpRequest().getUrl().matches(ContentTwoRegulars)) {//文章
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(article)) {
                crawlerDataList.addAll(articleWash(crawlerRecord, page));
            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(article)) {//文章链接
                crawlerDataList.addAll(articleListWash(crawlerRecord, page));
            }
        }
        if (crawlerRecord.getHttpRequest().getUrl().matches(CommentRegulars)) {//评论
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(comment)) {//评论
                crawlerDataList.addAll(commentWash(crawlerRecord, page));
            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(comment)) {//评论的回复
                crawlerDataList.addAll(replyWash(crawlerRecord, page));
            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(interaction)) {//评论互动量
                crawlerDataList.addAll(commentInteractionWash(crawlerRecord, page));
            }
        }
        return crawlerDataList;
    }

    //清洗文章链接
    public List<CrawlerData> articleListWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        try {
            crawlerRecord.getTags().clear();//去掉之前的标签
            String articleUrl = page.getRequest().getUrl();//文章url
            String releaseTime = page.getHtml().xpath("//span[@class=\"time\"]").get().trim();//时间
            String time = DateUtils.parseDate(releaseTime, "yyyy年MM月dd日 HH:mm").getTime() + "";//时间字符串
            String[] articleId = Pattern.compile("[^0-9]").matcher(articleUrl).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ");//url的关键字
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), article.enumVal(), articleId[0] + articleId[1]))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("article"))
                    .url(articleUrl)
                    .releaseTime(DateUtils.parseDate(releaseTime, "yyyy年MM月dd日 HH:mm").getTime())
                    .addContentKV("releaseTimeToLong", time)
                    .addContentKV("articleUrl", articleUrl)
                    .addContentKV(AICCommonField.Field_Author, "文章")
                    .flowInPipelineTag("redis")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerData);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        return crawlerArticleDataList;
    }


    //  文章互动量清洗
    private List<CrawlerData> articleInteractionWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        String comments = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_I_Comments);
        String views = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_I_Views);
        String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//文章url
        String releaseTime = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("pubTime");//文章url
        //https://www.autohome.com.cn/tuning/202108/1188992.html
        String[] articleId = Pattern.compile("[^0-9]").matcher(articleUrl).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ");//url的关键字
        try {

            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), interaction.enumVal(), articleId[0] + articleId[1]))
                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), article.enumVal(), articleId[0] + articleId[1]))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("interaction"))
                    .url(articleUrl)
                    .releaseTime(DateUtils.parseDate(releaseTime, "yyyy-MM-dd").getTime())
                    .addContentKV(AICCommonField.Field_I_Views, views)
                    .addContentKV(AICCommonField.Field_I_Comments, comments)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerData);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        return crawlerArticleDataList;
    }


    //解析文章
    public List<CrawlerData> articleWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        try {
            String articleUrl = page.getRequest().getUrl();//文章url
            String title = page.getHtml().xpath("//h1/text()").get();//标题
            if (title == null) {
                title = "无标题";
            }
            String follows = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AutoVMCommonField.Field_Author_Follows);//粉丝数
            String authorName = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Author);//粉丝数
            String adminUrl = page.getHtml().xpath("//a[@class=\"pic\"]/@href").get();//作者url
            if (adminUrl == null) {//作者没有id时为0
                adminUrl = "没有id0";
            }
            String authorId = Pattern.compile("[^0-9]").matcher(adminUrl).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ")[0];//作者id
            String releaseTime = page.getHtml().xpath("//span[@class=\"time\"]").get().trim();//时间
            List<String> tag = page.getHtml().xpath("//div[@class=\"marks\"]/a").all();//标签
            List<String> texts = page.getHtml().xpath("//div[@id=\"articleContent\"]//text()").all();//文章文本
            List<String> picture = page.getHtml().xpath("//div[@id=\"articleContent\"]//img/@src").all();//图片
            if (picture.size() == 0) {
                String pictureJson = page.getRawText().split("var FastImgList=")[1].split($_type + "('#pic').picShow")[0].trim();
                picture = new Json(pictureJson).jsonPath($_type + "..img").all();
            }
            StringBuffer conents = new StringBuffer();//将文本拼接
            for (String text : texts) {
                conents.append(text).append(" ");
            }
            StringBuffer pictures = new StringBuffer();//拼接图片
            for (String text : picture) {
                pictures.append(text).append("\0x1");
            }
            StringBuffer tags = new StringBuffer();//拼接标签
            for (String text : tag) {
                tags.append(text).append(" ");
            }

            String[] articleId = Pattern.compile("[^0-9]").matcher(articleUrl).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ");//url的关键字
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), article.enumVal(), articleId[0] + articleId[1]))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("article"))
                    .url(articleUrl)
                    .releaseTime(DateUtils.parseDate(releaseTime, "yyyy年MM月dd日 HH:mm").getTime())
                    .addContentKV(AICCommonField.Field_Content, conents.toString().trim())
                    .addContentKV(AICCommonField.Field_Title, title)
                    .addContentKV(AICCommonField.Field_Author, authorName)
                    .addContentKV(AICCommonField.Field_Author_Id, authorId)
                    .addContentKV(AICCommonField.Field_Images, pictures.toString().trim())
                    .addContentKV(AutoVMCommonField.Tag_Field_Topic_Type, tags.toString().trim())
                    .addContentKV(AutoVMCommonField.Field_Author_Follows, follows)
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerData);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        return crawlerArticleDataList;
    }

    //  评论清洗
    private Collection<? extends CrawlerData> commentWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        if (page.getJson() != null) {
            List<Selectable> nodes = page.getJson().jsonPath($_type + ".commentlist[*]").nodes();
            for (Selectable node : nodes) {
                try {
                    String content = new Json(node.get()).jsonPath(".RContent").get();//评论内容
                    String timeStr = new Json(node.get()).jsonPath(".RReplyDate").get();//回复时间
                    Long time = Long.valueOf(Pattern.compile("[^0-9]").matcher(timeStr).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ")[0]);
                    String user = new Json(node.get()).jsonPath(".RMemberName").get();//评论用户
                    String userId = new Json(node.get()).jsonPath(".RMemberId").get();//评论用户id
                    String commentId = new Json(node.get()).jsonPath(".ReplyId").get();//评论id

                    String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//文章url
                    String[] articleId = Pattern.compile("[^0-9]").matcher(articleUrl).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ");//url的关键字

                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRecord, page)
                            .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), comment.enumVal(), commentId))
                            .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), article.enumVal(), articleId[0] + articleId[1]))
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("comment"))
                            .url(articleUrl)
                            .releaseTime(time)
                            .addContentKV(AICCommonField.Field_Content, content)
                            .addContentKV(AICCommonField.Field_Author, user)
                            .addContentKV(AICCommonField.Field_Author_Id, userId)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .flowInPipelineTag("kafka")
                            .build();
                    crawlerData.setFilterPipelineResult(true);
                    crawlerArticleDataList.add(crawlerData);
                } catch (Exception e) {
                    log.error(e.getMessage(), e);
                }
            }
        }
        return crawlerArticleDataList;
    }

    //  评论互动量
    private Collection<? extends CrawlerData> commentInteractionWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        List<Selectable> nodes = page.getJson().jsonPath($_type + ".commentlist[*]").nodes();//所有评论
        for (Selectable node : nodes) {
            try {
                String like = new Json(node.get()).jsonPath(".RUp").get();//顶
                String timeStr = new Json(node.get()).jsonPath(".RReplyDate").get();//回复时间
                Long time = Long.valueOf(Pattern.compile("[^0-9]").matcher(timeStr).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ")[0]);
                String userId = new Json(node.get()).jsonPath(".RMemberId").get();//用户id
                String commentId = new Json(node.get()).jsonPath(".ReplyId").get();//评论id
                String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//文章url

                CrawlerData crawlerData = CrawlerData.builder()
                        .data(crawlerRecord, page)
                        .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), interaction.enumVal(), commentId))
                        .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), comment.enumVal(), userId))
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("comment"))
                        .url(articleUrl)
                        .releaseTime(time)
                        .addContentKV(AICCommonField.Field_I_Likes, like)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .flowInPipelineTag("kafka")
                        .build();
                crawlerData.setFilterPipelineResult(true);
                crawlerArticleDataList.add(crawlerData);
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }
        }
        return crawlerArticleDataList;
    }

    //  评论回复
    private Collection<? extends CrawlerData> replyWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        List<Selectable> nodes = page.getJson().jsonPath($_type + ".commentlist[*]").nodes();
        for (Selectable node : nodes) {
            String userId2 = new Json(node.get()).jsonPath(".Quote.RMemberId").get();//原评论用户id
            if (userId2 != null) {
                try {
                    String content1 = new Json(node.get()).jsonPath(".RContent").get();//回复内容
                    String timeStr = new Json(node.get()).jsonPath(".RReplyDate").get();//回复时间
                    Long time = Long.valueOf(Pattern.compile("[^0-9]").matcher(timeStr).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ")[0]);
                    String user1 = new Json(node.get()).jsonPath(".RMemberName").get();//回复用户
                    String userId1 = new Json(node.get()).jsonPath(".RMemberId").get();//回复用户id
                    String commentId1 = new Json(node.get()).jsonPath(".ReplyId").get();//回复评论id
                    String commentId2 = new Json(node.get()).jsonPath(".Quote.ReplyId").get();//原评论id
                    String commentId = commentId1 + commentId2;
                    String content2 = new Json(node.get()).jsonPath(".Quote.RContent").get();//原评论内容
                    String user2 = new Json(node.get()).jsonPath(".Quote.RMemberName").get();//原评论用户
                    String content = "回复：" + content1 + "原评论：" + content2;
                    String user = "回复：" + user1 + "原评论：" + user2;
                    String userId = "回复：" + userId1 + "原评论：" + userId2;
                    String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//文章url
                    String[] articleId = Pattern.compile("[^0-9]").matcher(articleUrl).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ");//url的关键字

                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRecord, page)
                            .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), comment.enumVal(), commentId))
                            .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), article.enumVal(), articleId[0] + articleId[1]))
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("comment"))
                            .url(articleUrl)
                            .releaseTime(time)
                            .addContentKV(AICCommonField.Field_Content, content)
                            .addContentKV(AICCommonField.Field_Author, user)
                            .addContentKV(AICCommonField.Field_Author_Id, userId)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .flowInPipelineTag("kafka")
                            .build();
                    crawlerData.setFilterPipelineResult(true);
                    crawlerArticleDataList.add(crawlerData);
                } catch (Exception e) {
                    log.error(e.getMessage(), e);
                }
            }
        }
        return crawlerArticleDataList;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {
    }

    /*
     * 重新下载
     * */
    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 30) {
                log.error("autohome download page the number of retries exceeds the limit" +
                        ",request url {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        CrawlerRequestRecord crawlerRequestRecord = null;
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }

        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }


}
