package com.chance.cc.crawler.development.scripts.wangyiCar;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;

public class WangYiNewsArticleCrawlerScript extends CrawlerCommonScript {
    private Logger log = LoggerFactory.getLogger(WangYiNewsArticleCrawlerScript.class);//日志
    private static final String DOMAIN = "wangyi";
    private static final String SITE = "newsArticle";
    private static final String REQUEST_AGAIN_TAG = DOMAIN + "_request_again";

    //    https://car.autohome.com.cn/javascript/NewSpecCompare.js?20131010
    private static final String HomeRegulars = "https://car.autohome.com.cn/javascript/NewSpecCompare.js\\?20131010";//动态链接
    //   https://www.baidu.com/s?si=news.163.com&ct=2097152&wd=喜力啤酒&pn=0
    private static final String NewsRegulars = "https://www.baidu.com/s\\?si=news.163.com&ct=2097152&wd=\\S*&pn=\\d*";//文章页链接
    //   https://www.baidu.com/link?url=VGczNWDVGZAagLODhZk0zWhO29g0yctljt-swupVhGaZHiEO1YZ2plfnPaZXdZANNrf1gRE9Emuv1hDMx9_7eq_vO6LzHejU9QgCH85dFdW&wd=&eqid=9af5ba660002b08200000004616fb43e
    private static final String ArticleRegulars = "http://www.baidu.com/link\\?url=\\S*";//文章页链接

    private static final String ContentThreeRegulars = "https://\\S*.163.com/\\S*/\\S*/\\S*.html\\S*";//文章详情链接
//https://liaoning.news.163.com/21/0707/13/GEAAURRJ04229BRN.html
//https://www.163.com/news/article/6QBTR5OD00014JB5.html
//    https://fj.news.163.com/21/0430/11/G8R2J5C804419AC3.html
//    https://www.163.com/news/article/7CAE92AK00014JB5.html
//    http://news.163.com/14/0616/09/9UROQ4NG00014Q4P.html#from=relevant#xwwzy_35_bottomnewskwd


    //    http://dy.163.com/article/GIR5BBDG05278022.html#wzlbpc
    //   https://auto.163.com/21/0401/16/G6GRT36K0008856R.html#wzlbpc
    private static final String ContentRegulars = "http://dy.163.com/article/\\S*.html\\S*";//文章详情链接
    private static final String ContentTwoRegulars = "https://auto.163.com/\\d*/\\d*/\\d*/\\S*.html\\S*";//文章详情链接

    //    https://comment.api.163.com/api/v1/products/a2869674571f77b5a0867c3d71db5856/threads/GKVE5QA00008856V?ibc=jssdk
    private static final String ContentInteractionRegulars = "https://comment.api.163.com/api/v1/products/a2869674571f77b5a0867c3d71db5856/threads/\\S*\\?ibc=jssdk";//文章互动量详情链接

    //    https://comment.api.163.com/api/v1/products/a2869674571f77b5a0867c3d71db5856/threads/GCRVBIJP0008856R/comments/newList?limit=30&offset=70
    private static final String CommentRegulars = "https://comment.api.163.com/api/v1/products/\\S*/threads/\\S*/comments/newList\\?limit=30&offset=\\d*";//评论链接

    @Override
    public String domain() {
        return DOMAIN;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(HomeRegulars);
        addUrlRegular(NewsRegulars);
        addUrlRegular(ArticleRegulars);
        addUrlRegular(ContentRegulars);
        addUrlRegular(ContentTwoRegulars);
        addUrlRegular(ContentThreeRegulars);
        addUrlRegular(CommentRegulars);
        addUrlRegular(ContentInteractionRegulars);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        String site = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        return SITE.equals(site);
    }

    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> prepareLinks = new ArrayList<>();
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            String url = supportSourceRecord.getHttpRequest().getUrl();
            if (url.contains("keys")) {
                this.initKeyWord(crawlerRequestRecord, supportSourceRecord);
            }
        }
        return prepareLinks;
    }

    //取出代表不同的车系key
    private void initKeyWord(CrawlerRequestRecord crawlerRequestRecord, CrawlerRequestRecord supportSourceRecord) {
        HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
        Json json = internalDownloadPage.getJson();
        String msg = json.jsonPath($_type + ".msg").get();
        if (!"success".equals(msg)) {
            log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
            return;
        }
        List<String> keys = new ArrayList<>();
        List<String> all = json.jsonPath($_type + ".content").all();
        for (String data : all) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            String keyword = jsonObject.getString("keyword");
            keys.add(keyword);
        }
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("keys", keys);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList();
        //        判断页面是否加载成功
        String requestUrl = crawlerRecord.getHttpRequest().getUrl();

        if (page.getStatusCode() != 200 || (!page.isDownloadSuccess())) {
            log.error("download page url == {null} error status is {}", requestUrl, page.getStatusCode());
            if (page.getStatusCode() != 404) {//如果没有成功的下载  进行重新下载
                this.requestAgainCrawlerRecord(parsedLinks, crawlerRecord);
                crawlerRecord.setNeedWashPage(false);
                return parsedLinks;
            }
        }
        if (crawlerRecord.getHttpRequest().getUrl().matches(HomeRegulars)) {
            homeRequest(crawlerRecord, page, parsedLinks);//解析动态车系
        }  else if (crawlerRecord.getHttpRequest().getUrl().matches(NewsRegulars)) {
            articleRequest(crawlerRecord, page, parsedLinks);//解析文章
        } else if (crawlerRecord.getHttpRequest().getUrl().matches(ArticleRegulars)) {
           interactionRequest(crawlerRecord, page, parsedLinks);//解析文章
        } else if (crawlerRecord.getHttpRequest().getUrl().matches(ContentInteractionRegulars) || crawlerRecord.getHttpRequest().getUrl().matches(CommentRegulars)) {
            commentRequest(crawlerRecord, page, parsedLinks);//解析评论
        }
        return parsedLinks;
    }

    //    解析动态车系
    private void homeRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
        KVTag keys = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("keys");
        List<String> name = (List<String>) keys.getVal();
        for (int i = 0; i < name.size(); i++) {//name.size()
            String url = "https://www.baidu.com/s?wd=" + name.get(i) + "&si=news.163.com&ct=2097152&pn=0";//得到车系页面
            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .releaseTime(Long.valueOf(System.currentTimeMillis()))
                    .copyBizTags()
                    .copyResultTags()
                    .httpHead("User-Agent","Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4542.2 Safari/537.36")
                    .build();
            parsedLinks.add(record);
        }

    }

    //解析文章列表
    private void articleRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
//        下一页
        String url = page.getRequest().getUrl();
        if (url.matches(NewsRegulars)){
            String[] next = url.replaceAll("wd=", "/").replaceAll("&pn=", "/").split("/");
            String nextPageUrl = "https://www.baidu.com/s?si=news.163.com&ct=2097152&wd="+next[next.length-2]+"&pn="+(Integer.parseInt(next[next.length-1])+10);
            CrawlerRequestRecord turnPageRequest = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .recordKey(nextPageUrl)
                    .httpUrl(nextPageUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyResultTags()
                    .httpHead("User-Agent","Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4542.2 Safari/537.36")
                    .build();
            parsedLinks.add(turnPageRequest);
        }

//        明细页解析
        List<Selectable> nodes = page.getHtml().xpath("//div[@class=\"result c-container new-pmd\"]").nodes();//所有文章
        for (Selectable node : nodes) {
            try {
                String articleUrl = node.xpath(".//h3[@class=\"t\"]/a/@href").get();//文章url
                if (StringUtils.isBlank(articleUrl)) {
                    continue;
                }
                String timeStr = node.xpath(".//span[@class=\"newTimeFactor_before_abs c-color-gray2 m\"]/text()").get();
                if (StringUtils.isBlank(timeStr)) {
                    continue;
                }
                String[] split = timeStr.replaceAll("年", "-").replaceAll("月", "-").replaceAll("日", "-").trim().split("-");
                if (split[1].length()==1){
                    split[1]=  "0"+split[1];
                }
                if (split[2].length()==1){
                    split[2]=  "0"+split[2];
                }
                timeStr = split[0] + "-" + split[1] + "-" + split[2];
                long time = DateUtils.parseDate(timeStr.replaceAll("&nbsp;", "").trim(), "yyyy-MM-dd").getTime();
                CrawlerRequestRecord articleRecord = CrawlerRequestRecord.builder()//解析文章
                        .itemPageRequest(crawlerRecord)
                        .recordKey(articleUrl)
                        .httpUrl(articleUrl)
                        .releaseTime(time)
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                parsedLinks.add(articleRecord);

            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }
        }
    }



    private void interactionRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
        String articleUrl = page.getHtml().xpath("//link[@rel=\"canonical\"]/@href").get();
        if (StringUtils.isBlank(articleUrl)||articleUrl.contains("xwwzy_35_bottomnewskwd")) {
            return;
        }

//        http://news.163.com/14/0616/09/9UROQ4NG00014Q4P.html#from=relevant#xwwzy_35_bottomnewskwd
        //                http://www.baidu.com/link?url=lgjcNupPaFEE6HCN2hVn2a-ZmoyznSYU5buMQdFqBxOn4zFigcts0jfZEegIcgTfO_f_QCWB6T6aEhx0h-rFXHlaLJ93qeDAC1u3WmsiKoq
        crawlerRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_Urls, articleUrl);
        String[] split = articleUrl.replaceAll(".html", "/").trim().split("/");
        String articleId = split[split.length - 1];
        String interactionUrl = "https://comment.api.163.com/api/v1/products/a2869674571f77b5a0867c3d71db5856/threads/" + articleId + "?ibc=jssdk";

        CrawlerRequestRecord articleRecord = CrawlerRequestRecord.builder()//解析文章
                .itemPageRequest(crawlerRecord)
                .recordKey(articleUrl)
                .httpUrl(articleUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .needWashed(true)
                .build();
        parsedLinks.add(articleRecord);

        CrawlerRequestRecord interactionRecord = CrawlerRequestRecord.builder()//解析文章
                .itemPageRequest(crawlerRecord)
                .recordKey(interactionUrl)
                .httpUrl(interactionUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .needWashed(true)
                .build();
        parsedLinks.add(interactionRecord);

}

    //解析评论列表
    private void commentRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
        if (page.getRequest().getUrl().matches(ContentInteractionRegulars)) {
            String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);
            String comments = page.getJson().jsonPath($_type + "..tcount").get();
            String[] split = articleUrl.replaceAll(".html", "/").trim().split("/");
            String articleId = split[split.length - 1];

            String nextPageUrl = "https://comment.api.163.com/api/v1/products/a2869674571f77b5a0867c3d71db5856/threads/" + articleId + "/comments/newList?limit=30&offset=0";
            if (Integer.parseInt(comments) > 0) {
                KVTag filterInfoTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
                CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);
                CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()//解析评论
                        .itemPageRequest(crawlerRecord)
                        .recordKey(nextPageUrl)
                        .httpUrl(nextPageUrl)
                        .releaseTime(System.currentTimeMillis())
                        .notFilterRecord()
                        .copyBizTags()
                        .copyResultTags()
                        .needWashed(true)
                        .build();
                commentRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_I_Comments, comments);
                commentRecord.setFilter(filterInfoRecord.getFilter());
                commentRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                parsedLinks.add(commentRecord);
            }
        } else {
            String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);
            String[] split = articleUrl.replaceAll(".html", "/").trim().split("/");
            String articleId = split[split.length - 1];

            String comments = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_I_Comments);
            String url = page.getRequest().getUrl();
            String[] nextId = Pattern.compile("[^0-9]").matcher(url).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ");//url的关键字
            int num = Integer.parseInt(nextId[nextId.length - 1]) + 30;
            String nextPageUrl = "https://comment.api.163.com/api/v1/products/a2869674571f77b5a0867c3d71db5856/threads/" + articleId + "/comments/newList?limit=30&offset=" + num;
            if (Integer.parseInt(comments) - num > 0) {
                KVTag filterInfoTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
                CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);
                CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()//解析评论
                        .itemPageRequest(crawlerRecord)
                        .recordKey(nextPageUrl)
                        .httpUrl(nextPageUrl)
                        .releaseTime(System.currentTimeMillis())
                        .notFilterRecord()
                        .copyBizTags()
                        .copyResultTags()
                        .needWashed(true)
                        .build();
                commentRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_I_Comments, comments);
                commentRecord.setFilter(filterInfoRecord.getFilter());
                commentRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                parsedLinks.add(commentRecord);
            }
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        if (crawlerRecord.getHttpRequest().getUrl().matches(ContentInteractionRegulars) ) {
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(interaction)) {//文章互动量
                crawlerDataList.addAll(articleInteractionWash(crawlerRecord, page));
            }
        }
        if (crawlerRecord.getHttpRequest().getUrl().matches(ContentRegulars) || crawlerRecord.getHttpRequest().getUrl().matches(ContentTwoRegulars)|| crawlerRecord.getHttpRequest().getUrl().matches(ContentThreeRegulars)) {
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(article)) {//文章
                crawlerDataList.addAll(articleWash(crawlerRecord, page));
            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(article)) {//文章链接
                crawlerDataList.addAll(articleListWash(crawlerRecord, page));
            }
        }
        if (crawlerRecord.getHttpRequest().getUrl().matches(CommentRegulars)) {
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(comment)) {//评论
                crawlerDataList.addAll(commentWash(crawlerRecord, page));
            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(comment)) {//评论的回复
                crawlerDataList.addAll(replyWash(crawlerRecord, page));
            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(interaction)) {//评论互动量
                crawlerDataList.addAll(commentInteractionWash(crawlerRecord, page));
            }
        }
        return crawlerDataList;
    }

    //清洗文章链接
    public List<CrawlerData> articleListWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        log.info("清洗文章链接");
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        try {
//            crawlerRecord.getTags().clear();//去掉之前的标签
            String articleUrl = page.getRequest().getUrl();//文章url
            String releaseTimeStr = page.getHtml().xpath("//div[@class=\"post_info\"]/text()").get().trim();//时间
            String[] s = releaseTimeStr.trim().replaceAll("　", " ").split(" ");
            String releaseTime = s[0] + " " + s[1];
            String time = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime() + "";//时间字符串
            String[] split = articleUrl.replaceAll(".html", "/").trim().split("/");
            String articleId = split[split.length - 2];
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, article.enumVal(), articleId))
                    .resultLabelTag(article)
                    .url(articleUrl)
                    .releaseTime(DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime())
                    .addContentKV("releaseTimeToLong", time)
                    .addContentKV("articleUrl", articleUrl)
                    .addContentKV(AICCommonField.Field_Author, "文章")
                    .flowInPipelineTag("redis")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerData);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        return crawlerArticleDataList;
    }

    //  文章互动量清洗
    private List<CrawlerData> articleInteractionWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        log.info("文章互动量清洗");
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        String comments = page.getJson().jsonPath($_type + "..tcount").get();
        String views = page.getJson().jsonPath($_type + "..cmtCount").get();
        String pubTime = page.getJson().jsonPath($_type + "..createTime").get();
        if (StringUtils.isBlank(pubTime)){
            return crawlerArticleDataList;
        }
        String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);
        try {
            String[] split = articleUrl.replaceAll(".html", "/").trim().split("/");
            String articleId = split[split.length - 1];
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, interaction.enumVal(), articleId))
                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, article.enumVal(), articleId))
                    .resultLabelTag(interaction)
                    .url(articleUrl)
                    .releaseTime(DateUtils.parseDate(pubTime, "yyyy-MM-dd HH:mm:ss").getTime())
                    .addContentKV(AICCommonField.Field_I_Comments, comments)
                    .addContentKV(AICCommonField.Field_I_Views, views)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerData);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        return crawlerArticleDataList;
    }

    //解析文章
    public List<CrawlerData> articleWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        log.info("解析文章");
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        try {
            String articleUrl = page.getRequest().getUrl();//文章url
            String title = page.getHtml().xpath("//h1/text()").get();//标题
//            String follows = page.getHtml().xpath("//span[@class=\"post_wemedia_info2\"]/a/em").get();//粉丝数
            String authorIdStr = page.getHtml().xpath("//div[@class=\"post_info\"]/a/@href").get();//作者id
            String[] ai = null;
            String authorId = null;
            if (authorIdStr != null) {
                ai = authorIdStr.replaceAll(".html", "").trim().split("/");
                if (ai != null) {
                    authorId = ai[ai.length - 1];
                }
            }
            String releaseTimeStr = page.getHtml().xpath("//div[@class=\"post_info\"]/text()").get().trim();//时间
            String[] s = releaseTimeStr.trim().replaceAll("　", " ").split(" ");
            String releaseTime = s[0] + " " + s[1];
            String authorName = page.getHtml().xpath("//div[@class=\"post_info\"]/a/text()").get().trim();//作者名
            if (authorName==null||authorName.contains("举报")){
                authorName=s[3];
                authorId =null;
            }
            List<String> texts = page.getHtml().xpath("//div[@class=\"post_body\"]//p/text()").all();//文章文本
            List<String> picture = page.getHtml().xpath("//div[@class=\"post_body\"]//p//img/@src").all();//图片
            StringBuffer conents = new StringBuffer();//将文本拼接
            for (String text : texts) {
                conents.append(text).append(" ");
            }
            StringBuffer pictures = new StringBuffer();//拼接图片
            for (String text : picture) {
                pictures.append(text).append("\0x1");
            }
            String[] split = articleUrl.replaceAll(".html", "/").trim().split("/");//关键字
            String articleId = split[split.length - 2];
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, article.enumVal(), articleId))
                    .resultLabelTag(article)
                    .url(articleUrl)
                    .releaseTime(DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime())
                    .addContentKV(AICCommonField.Field_Content, conents.toString().trim())
                    .addContentKV(AICCommonField.Field_Title, title)
                    .addContentKV(AICCommonField.Field_Author, authorName)
                    .addContentKV(AICCommonField.Field_Author_Id, authorId)
                    .addContentKV(AICCommonField.Field_Images, pictures.toString().trim())
//                    .addContentKV(AutoVMCommonField.Field_Author_Follows, follows)
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerData);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        return crawlerArticleDataList;
    }
    //  评论清洗
    private Collection<? extends CrawlerData> commentWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        log.info("评论清洗");
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        if (page.getJson() != null) {
            List<Selectable> nodes = page.getJson().jsonPath($_type + ".comments[*]").nodes();
            for (Selectable node : nodes) {
                try {
                    String content = new Json(node.get()).jsonPath(".content").get();//评论内容
                    String releaseTime = new Json(node.get()).jsonPath(".createTime").get();//回复时间
                    String user = new Json(node.get()).jsonPath(".nickname").get();//评论用户
                    String userId = new Json(node.get()).jsonPath(".userId").get();//评论用户id
                    String commentId = new Json(node.get()).jsonPath(".commentId").get();//评论id

                    String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//文章url
                    String[] split = articleUrl.replaceAll(".html", "/").trim().split("/");//关键字
                    String articleId = split[split.length - 1];
                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRecord, page)
                            .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, comment.enumVal(), commentId))
                            .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, article.enumVal(), articleId))
                            .resultLabelTag(comment)
                            .url(articleUrl)
                            .releaseTime(DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime())
                            .addContentKV(AICCommonField.Field_Content, content)
                            .addContentKV(AICCommonField.Field_Author, user)
                            .addContentKV(AICCommonField.Field_Author_Id, userId)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .flowInPipelineTag("kafka")
                            .build();
                    crawlerData.setFilterPipelineResult(true);
                    crawlerArticleDataList.add(crawlerData);
                } catch (Exception e) {
                    log.error(e.getMessage(), e);
                }
            }
        }
        return crawlerArticleDataList;
    }

    //  评论互动量
    private Collection<? extends CrawlerData> commentInteractionWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        log.info("评论互动量");
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        List<Selectable> nodes = page.getJson().jsonPath($_type + ".comments[*]").nodes();//所有评论
        for (Selectable node : nodes) {
            try {
                String like = new Json(node.get()).jsonPath(".vote").get();//顶
                String bad = new Json(node.get()).jsonPath(".against").get();//踩
                String releaseTime = new Json(node.get()).jsonPath(".createTime").get();//回复时间
                String commentId = new Json(node.get()).jsonPath(".commentId").get();//评论id
                String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//文章url

                CrawlerData crawlerData = CrawlerData.builder()
                        .data(crawlerRecord, page)
                        .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, interaction.enumVal(), commentId))
                        .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, comment.enumVal(), commentId))
                        .resultLabelTag(interaction)
                        .url(articleUrl)
                        .releaseTime(DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime())
                        .addContentKV(AICCommonField.Field_I_Likes, like)
                        .addContentKV(AICCommonField.Field_I_Dislikes, bad)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .flowInPipelineTag("kafka")
                        .build();
                crawlerData.setFilterPipelineResult(true);
                crawlerArticleDataList.add(crawlerData);
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }
        }
        return crawlerArticleDataList;
    }


    //  评论回复
    private Collection<? extends CrawlerData> replyWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        log.info("评论回复");
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        List<String> all = page.getJson().jsonPath(".commentIds[*]").all();//评论数
        if (all.size() > 0) {
            for (int i = 0; i < all.size() - 1; i++) {
                String commentIds = page.getJson().jsonPath($_type + ".commentIds[" + i + "]").get();//回复id数组
                if (commentIds != null) {
                    String[] split = commentIds.trim().split(",");
                    if (split.length > 1) {//楼数大于1
                        List<Selectable> nodes = page.getJson().jsonPath($_type + ".comments[*]").nodes();//所有评论
                        for (Selectable node : nodes) {
                            String commentId2 = new Json(node.get()).jsonPath(".commentId").get();//原评论id
                            for (int j = 0; j <= split.length - 2; j++) {
                                if (commentId2 == split[j]) {//原评论
                                    String userId2 = new Json(node.get()).jsonPath(".userId").get();//用户id
                                    String content2 = new Json(node.get()).jsonPath(".content").get();//原评论内容
                                    String user2 = new Json(node.get()).jsonPath(".nickname").get();//原评论用户
                                    String lou2 = j + "";//楼数
                                    List<Selectable> nnodes = page.getJson().jsonPath($_type + ".comments[*]").nodes();//所有评论
                                    for (Selectable nnode : nnodes) {
                                        String commentId1 = new Json(nnode.get()).jsonPath(".commentId").get();//回复评论id
                                        if (commentId1 == split[j + 1]) {//回复
                                            String userId1 = new Json(node.get()).jsonPath(".userId").get();//用户id
                                            String content1 = new Json(nnode.get()).jsonPath(".content").get();//回复内容
                                            String releaseTime = new Json(nnode.get()).jsonPath(".createTime").get();//回复时间
                                            String user1 = new Json(nnode.get()).jsonPath(".nickname").get();//回复用户
                                            String lou1 = j + 1 + "";//楼数

                                            String commentId = commentId1 + commentId2;
                                            String content = "回复：" + content1 + " 原评论：" + content2;
                                            String user = "回复：" + user1 + " 原评论：" + user2;
                                            String userId = "回复：" + userId1 + " 原评论：" + userId2;
                                            String floor = "回复楼数：" + lou1 + " 原评论楼数：" + lou2;
                                            String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//文章url
                                            String[] splits = articleUrl.replaceAll(".html", "/").trim().split("/");//关键字
                                            String articleId = splits[splits.length - 2];
                                            try {
                                                CrawlerData crawlerData = CrawlerData.builder()
                                                        .data(crawlerRecord, page)
                                                        .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, comment.enumVal(), commentId))
                                                        .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, article.enumVal(), articleId))
                                                        .resultLabelTag(comment)
                                                        .url(articleUrl)
                                                        .releaseTime(DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime())
                                                        .addContentKV(AICCommonField.Field_Floor, floor)
                                                        .addContentKV(AICCommonField.Field_Content, content)
                                                        .addContentKV(AICCommonField.Field_Author, user)
                                                        .addContentKV(AICCommonField.Field_Author_Id, userId)
                                                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                                        .flowInPipelineTag("kafka")
                                                        .build();
                                                crawlerData.setFilterPipelineResult(true);
                                                crawlerArticleDataList.add(crawlerData);
                                            } catch (Exception e) {
                                                log.error(e.getMessage(), e);
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
        return crawlerArticleDataList;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {
    }

    /*
     * 重新下载
     * */
    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 30) {
                log.error("autohome download page the number of retries exceeds the limit" +
                        ",request url {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        CrawlerRequestRecord crawlerRequestRecord = null;
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .httpHead("User-Agent","Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4542.2 Safari/537.36")
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .copyBizTags()
                    .copyResultTags()
                    .httpHead("User-Agent","Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4542.2 Safari/537.36")
                    .build();
        }

        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }
}
