package com.chance.cc.crawler.development.scripts.tenxun.qqCarSeries;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


import java.util.*;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;

public class TengXunCarSeriesArticleCrawlerScript extends CrawlerCommonScript {
    private Logger log = LoggerFactory.getLogger(TengXunCarSeriesArticleCrawlerScript.class);//日志
    private static final String DOMAIN = "tenxun";
    private static final String SITE = "carSeriesArticle";
    private static final String REQUEST_AGAIN_TAG = DOMAIN + "_request_again";

    //    https://car.autohome.com.cn/javascript/NewSpecCompare.js?20131010
    private static final String HomeRegulars = "https://car.autohome.com.cn/javascript/NewSpecCompare.js\\?20131010";//动态链接
    //    https://auto.qq.com/car_serial/news.shtml?serial_id=2072
    private static final String CarSeriesRegulars = "https://auto.qq.com/car_serial/news.shtml\\?serial_id=\\d*";//车系链接
    //    https://ait.auto.qq.com/v12/cardata/serial/feed?callback=news_list&column=&next_id=1589510062&serial_id=2072&_t=1630665832466&sign=52ebbc0bd1a06f47e06e1257c79c29e638e748bb
    //    https://ait.auto.qq.com/v12/cardata/serial/feed?callback=news_list&column=&next_id=1583924621&serial_id=2072&_t=1630486499&sign=676f9df527726db4c5a786e4934aed28742247aa
    private static final String ArticleRegulars = "https://ait.auto.qq.com/v12/cardata/serial/feed\\?callback=news_list&column=&next_id=\\S*&serial_id=\\d*&_t=\\d*&sign=\\S*";//文章链接
    //    http://view.inews.qq.com/a/20200623A0F3JX00
    private static final String ContentRegulars = "http://view.inews.qq.com/a/\\S*";//文章详情链接
    //    https://coral.qq.com/article/5560214768/comment/v2
    private static final String CommentRegulars = "https://coral.qq.com/article/\\S*/comment/v2";//评论链接


    @Override
    public String domain() {
        return DOMAIN;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(HomeRegulars);
        addUrlRegular(CarSeriesRegulars);
        addUrlRegular(ArticleRegulars);
        addUrlRegular(ContentRegulars);
        addUrlRegular(CommentRegulars);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        String site = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        return SITE.equals(site);
    }

    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> prepareLinks = new ArrayList<>();
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            String url = supportSourceRecord.getHttpRequest().getUrl();
            if (url.contains("keys")) {
                this.initKeyWord(crawlerRequestRecord, supportSourceRecord);
            }
        }
        return prepareLinks;
    }

    //取出代表不同的车系key
    private void initKeyWord(CrawlerRequestRecord crawlerRequestRecord, CrawlerRequestRecord supportSourceRecord) {
        HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
        Json json = internalDownloadPage.getJson();
        String msg = json.jsonPath($_type + ".msg").get();
        if (!"success".equals(msg)) {
            log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
            return;
        }
        List<String> keys = new ArrayList<>();
        List<String> all = json.jsonPath($_type + ".content").all();
        for (String data : all) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            String keyword = jsonObject.getString("keyword");
            keys.add(keyword);
        }
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("keys", keys);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList();
        //        判断页面是否加载成功
        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        if (page.getStatusCode() != 200 || (!page.isDownloadSuccess())) {
            log.error("download page url == {null} error status is {}", requestUrl, page.getStatusCode());
            if (page.getStatusCode() != 404) {//如果没有成功的下载  进行重新下载
                this.requestAgainCrawlerRecord(parsedLinks, crawlerRecord);
                crawlerRecord.setNeedWashPage(false);
                return parsedLinks;
            }
        }
        if (crawlerRecord.getHttpRequest().getUrl().matches(HomeRegulars)) {
            homeRequest(crawlerRecord, page, parsedLinks);//解析动态车系
        } else if (crawlerRecord.getHttpRequest().getUrl().matches(CarSeriesRegulars)) {
            carSeriesRequest(crawlerRecord, page, parsedLinks);//解析车系
        } else if (crawlerRecord.getHttpRequest().getUrl().matches(ArticleRegulars)) {
            articleRequest(crawlerRecord, page, parsedLinks);//解析文章列表
        } else if (crawlerRecord.getHttpRequest().getUrl().matches(ContentRegulars)) {
            commentRequest(crawlerRecord, page, parsedLinks);//解析文章列表
        }
        return parsedLinks;
    }

    //    解析动态车系
    private void homeRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
        KVTag keys = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("keys");
        List<String> name = (List<String>) keys.getVal();
        for (int i = 0; i < name.size(); i++) {//name.size()
            String url = "https://auto.qq.com/car_serial/news.shtml?serial_id=" + name.get(i);//得到车系页面
            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .releaseTime(Long.valueOf(System.currentTimeMillis()))
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            parsedLinks.add(record);
        }
    }

    //解析首页
    private void carSeriesRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
        String carSeriesUrl = page.getRequest().getUrl();//车系url
        String articleId = Pattern.compile("[^0-9]").matcher(carSeriesUrl).replaceAll("").trim();//url的关键字
        String time = String.valueOf(System.currentTimeMillis()).substring(0, 10);
        String sign = sign(articleId, "", time);
        String articleUrl = "https://ait.auto.qq.com/v12/cardata/serial/feed?callback=news_list&column=&next_id=&serial_id=" + articleId + "&_t=" + time + "&sign=" + sign;//文章url

        CrawlerRequestRecord articleRecord = CrawlerRequestRecord.builder()//解析文章页
                .itemPageRequest(crawlerRecord)
                .recordKey(articleUrl)
                .httpUrl(articleUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        articleRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Tag_Site_Info, articleUrl);
        articleRecord.tagsCreator().bizTags().addCustomKV("articleId", articleId);
        parsedLinks.add(articleRecord);
    }

    //解析文章列表
    private void articleRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
        String articleId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleId");
        String time = String.valueOf(System.currentTimeMillis()).substring(0, 10);
        String json = page.getRawText().substring(10, page.getRawText().length() - 1);
        String nextId = new Json(json).jsonPath($_type + "..next_id").get();
        String sign = sign(articleId, nextId, time);
//        下一页
        if (nextId != null || nextId != "") {
            String nextPageUrl = "https://ait.auto.qq.com/v12/cardata/serial/feed?callback=news_list&column=&next_id=" + nextId + "&serial_id=" + articleId + "&_t=" + time + "&sign=" + sign;//翻页
            CrawlerRequestRecord turnPageRequest = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .recordKey(nextPageUrl)
                    .httpUrl(nextPageUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            parsedLinks.add(turnPageRequest);
        }

//        明细页解析

        List<Selectable> nodes = new Json(json).jsonPath($_type + "..list[*]").nodes();//所有文章
        for (Selectable node : nodes) {
            String authorName = new Json(node.get()).jsonPath(".source_name").get();//作者名
            String title = new Json(node.get()).jsonPath(".title").get();//标题

            try {
                String pubTime = new Json(node.get()).jsonPath(".create_at").get();//时间
//              http://view.inews.qq.com/a/20200623A0F3JX00
                String articleUrl = new Json(node.get()).jsonPath(".source_url").get();//文章详情链接
                if (pubTime != null && articleUrl != null) {
                    CrawlerRequestRecord contentRecord = CrawlerRequestRecord.builder()//解析文章
                            .itemPageRequest(crawlerRecord)
                            .recordKey(articleUrl)
                            .httpUrl(articleUrl)
                            .releaseTime(DateUtils.parseDate(pubTime, "yyyy-MM-dd").getTime())
                            .copyBizTags()
                            .copyResultTags()
                            .needWashed(true)
                            .build();
                    contentRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_Urls, articleUrl);
                    contentRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_Title, title);
                    contentRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_Author, authorName);
                    contentRecord.tagsCreator().bizTags().addCustomKV("pubTime", pubTime);
                    parsedLinks.add(contentRecord);
                }
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }
        }
    }


    //解析评论
    private void commentRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
        String commentId = null;
        try {
            commentId = page.getHtml().xpath("//div[@id=\"commentData\"]/@targetid").get();
            if (commentId == null) {
                String trim = page.getRawText().split("<script>window.initData = ")[1].split(";</script><script>")[0].trim();
                commentId = new Json(trim).jsonPath($_type + "..cid").get();//评论id
            }
        } catch (Exception e) {
            log.error("{}评论id为空，页面可能不存在{}", e, page.getRequest().getUrl());
            return;
        }
        //https://coral.qq.com/article/4104356503/comment/v2
        try {
                if (commentId != null || commentId != "") {
                    String commentUrl = "https://coral.qq.com/article/" + commentId + "/comment/v2";//文章详情链接
                    KVTag filterInfoTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
                    CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);
                    CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()//解析文章页
                            .itemPageRequest(crawlerRecord)
                            .recordKey(commentUrl)
                            .httpUrl(commentUrl)
                            .releaseTime(System.currentTimeMillis())
                            .notFilterRecord()
                            .copyBizTags()
                            .copyResultTags()
                            .needWashed(true)
                            .build();
                    commentRecord.setFilter(filterInfoRecord.getFilter());
                    commentRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                    parsedLinks.add(commentRecord);
                }
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
    }


    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();

        if (crawlerRecord.getHttpRequest().getUrl().matches(ContentRegulars)) {//文章
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(article)) {
                crawlerDataList.addAll(articleWash(crawlerRecord, page));
            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(article)) {//文章链接
                crawlerDataList.addAll(articleListWash(crawlerRecord, page));
            }
        }
        if (crawlerRecord.getHttpRequest().getUrl().matches(CommentRegulars)) {//评论
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(comment)) {//评论
                crawlerDataList.addAll(commentWash(crawlerRecord, page));
            }
//            if (crawlerRecord.tagsCreator().resultTags().hasDataType(comment)) {//评论的回复
//                crawlerDataList.addAll(replyWash(crawlerRecord, page));
//            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(interaction)) {//评论互动量
                crawlerDataList.addAll(commentInteractionWash(crawlerRecord, page));
            }
        }
        return crawlerDataList;
    }

    //清洗文章链接
    public List<CrawlerData> articleListWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        log.info("采集链接");
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        try {
            String releaseTime = page.getHtml().xpath("//span[@class=\"date\"]/text()").get();//时间
            long time = 0;
            if (releaseTime != null) {
                time = DateUtils.parseDate(releaseTime, "yyyy-MM-dd").getTime();
            } else {
                try {
                    String trim = page.getRawText().split("<script>window.initData = ")[1].split(";</script><script>")[0].trim();
                    releaseTime = new Json(trim).jsonPath($_type + "..pubtime").get();//时间
                    time = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime();
                } catch (Exception e) {
                    log.error("{}时间为空，页面可能不存在{}", e, page.getRequest().getUrl());
                    return crawlerArticleDataList;
                }
            }
            String articleUrl = page.getRequest().getUrl();//文章url
            String times = time + "";//时间字符串

            String[] articleId = articleUrl.split("/");//url的关键字
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, article.enumVal(), articleId[articleId.length - 1]))
                    .resultLabelTag(article)
                    .url(articleUrl)
                    .releaseTime(time)
                    .addContentKV("releaseTimeToLong", times)
                    .addContentKV("articleUrl", articleUrl)
                    .addContentKV(AICCommonField.Field_Author, "文章")
                    .flowInPipelineTag("redis")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerData);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        return crawlerArticleDataList;
    }

    //解析文章
    public List<CrawlerData> articleWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        log.info("采集文章");
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        try {
            String[] authorStr =page.getRawText().split("chlid\":\"");//作者id
            String authorId=null;
            if (authorStr.length>1){
                String[] split = authorStr[1].split("\",");
                authorId=split[0];
            }

            String releaseTime = page.getHtml().xpath("//span[@class=\"date\"]/text()").get();//时间
            long time = 0;
            String conents = null;//文章文本
            List<String> picture = null;//图片
            if (releaseTime != null) {
                time = DateUtils.parseDate(releaseTime, "yyyy-MM-dd").getTime();
                List<String> conent = page.getHtml().xpath("//section[@class=\"article\"]//p/text()").all();//文章文本
                picture = page.getHtml().xpath("//section[@class=\"article\"]//img/@src").all();//图片
                StringBuffer conentss = new StringBuffer();//拼接文章
                for (String text : conent) {
                    conentss.append(text).append(" ");
                }
                conents = conentss.toString();
            } else {
                try {
                    String trim = page.getRawText().split("<script>window.initData = ")[1].split(";</script><script>")[0].trim();
                    releaseTime = new Json(trim).jsonPath($_type + "..pubtime").get();//时间
                    time = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime();
                    conents = new Json(trim).jsonPath($_type + "..cnt_html").get();//文章文本
                    picture = new Json(trim).jsonPath($_type + "..imgurl641.imgurl").all();//图片
                } catch (Exception e) {
                    log.error("{}时间为空，页面可能不存在{}", e, page.getRequest().getUrl());
                    return null;
                }
            }
            String articleUrl = page.getRequest().getUrl();//文章url
            String authorName = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Author);//作者名
            String title = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Title);//标题



            StringBuffer pictures = new StringBuffer();//拼接图片
            for (String text : picture) {
                pictures.append(text).append("\0x1");
            }

            String[] articleId = articleUrl.split("/");//url的关键字
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, article.enumVal(), articleId[articleId.length - 1]))
                    .resultLabelTag(article)
                    .url(articleUrl)
                    .releaseTime(time)
                    .addContentKV(AICCommonField.Field_Content, conents.replaceAll("<P>","").replaceAll("</P>","").trim())
                    .addContentKV(AICCommonField.Field_Title, title)
                    .addContentKV(AICCommonField.Field_Author, authorName)
                    .addContentKV(AICCommonField.Field_Author_Id, authorId)
                    .addContentKV(AICCommonField.Field_Images, pictures.toString().trim())
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerData);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        return crawlerArticleDataList;
    }

    //  评论清洗
    private Collection<? extends CrawlerData> commentWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        log.info("采集评论");
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        if (page.getJson() != null) {
            List<Selectable> nodes = page.getJson().jsonPath($_type + "..oriCommList[*]").nodes();
            for (Selectable node : nodes) {
                String content = new Json(node.get()).jsonPath(".content").get();//评论内容
                String timeStr = new Json(node.get()).jsonPath(".time").get();//回复时间
                long time = Long.valueOf(timeStr)*1000;
                String userId = new Json(node.get()).jsonPath(".userid").get();//评论用户id
                String commentId = new Json(node.get()).jsonPath(".id").get();//评论id
                List<Selectable> nnodes = page.getJson().jsonPath($_type + "..userList[*]").nodes();
                String user = null;
                for (Selectable nnode : nnodes) {
                    String uuserId = new Json(nnode.get()).jsonPath(".userid").get();//评论用户id
                    if (userId == uuserId && uuserId != null) {
                        user = new Json(nnode.get()).jsonPath(".nick").get();//评论用户
                    }
                }
                String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//文章url
                String[] articleId = articleUrl.split("/");//url的关键字
                try {
                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRecord, page)
                            .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, comment.enumVal(), commentId))
                            .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, article.enumVal(), articleId[articleId.length - 1]))
                            .url(articleUrl)
                            .releaseTime(time)
                            .addContentKV(AICCommonField.Field_Content, content)
                            .addContentKV(AICCommonField.Field_Author, user)
                            .addContentKV(AICCommonField.Field_Author_Id, userId)
                            .resultLabelTag(comment)
//                          .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .flowInPipelineTag("kafka")
                            .build();
                    crawlerData.setFilterPipelineResult(true);
                    crawlerArticleDataList.add(crawlerData);
                } catch (Exception e) {
                    log.error(e.getMessage(), e);
                }
            }
        }
        return crawlerArticleDataList;
    }

    //  评论互动量
    private Collection<? extends CrawlerData> commentInteractionWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        log.info("采集评论互动量");
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        List<Selectable> nodes = page.getJson().jsonPath($_type + "..oriCommList[*]").nodes();
        for (Selectable node : nodes) {
            String like = new Json(node.get()).jsonPath(".up").get();//顶
            String timeStr = new Json(node.get()).jsonPath(".time").get();//回复时间
            long time = Long.valueOf(timeStr);
            String userId = new Json(node.get()).jsonPath(".userid").get();//评论用户id
            String commentId = new Json(node.get()).jsonPath(".id").get();//评论id
            String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//文章url
            try {
                CrawlerData crawlerData = CrawlerData.builder()
                        .data(crawlerRecord, page)
                        .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, interaction.enumVal(), commentId))
                        .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, comment.enumVal(), commentId))
                        .resultLabelTag(interaction)
                        .url(articleUrl)
                        .releaseTime(time)
                        .addContentKV(AICCommonField.Field_I_Likes, like)
//                       .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .flowInPipelineTag("kafka")
                        .build();
                crawlerData.setFilterPipelineResult(true);
                crawlerArticleDataList.add(crawlerData);
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }
        }
        return crawlerArticleDataList;
    }

//    //  评论回复
//    private Collection<? extends CrawlerData> replyWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
//        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
//        List<Selectable> nodes = page.getJson().jsonPath($_type + ".commentlist[*]").nodes();
//        for (Selectable node : nodes) {
//            String userId2 = new Json(node.get()).jsonPath(".Quote.RMemberId").get();//原评论用户id
//            if (userId2 != null) {
//                try {
//                    String content1 = new Json(node.get()).jsonPath(".RContent").get();//回复内容
//                    String timeStr = new Json(node.get()).jsonPath(".RReplyDate").get();//回复时间
//                    Long time = Long.valueOf(Pattern.compile("[^0-9]").matcher(timeStr).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ")[0]);
//                    String user1 = new Json(node.get()).jsonPath(".RMemberName").get();//回复用户
//                    String userId1 = new Json(node.get()).jsonPath(".RMemberId").get();//回复用户id
//                    String commentId1 = new Json(node.get()).jsonPath(".ReplyId").get();//回复评论id
//                    String commentId2 = new Json(node.get()).jsonPath(".Quote.ReplyId").get();//原评论id
//                    String commentId = commentId1 + commentId2;
//                    String content2 = new Json(node.get()).jsonPath(".Quote.RContent").get();//原评论内容
//                    String user2 = new Json(node.get()).jsonPath(".Quote.RMemberName").get();//原评论用户
//                    String content = "回复：" + content1 + "原评论：" + content2;
//                    String user = "回复：" + user1 + "原评论：" + user2;
//                    String userId = "回复：" + userId1 + "原评论：" + userId2;
//                    String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//文章url
//                    String[] articleId = Pattern.compile("[^0-9]").matcher(articleUrl).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ");//url的关键字
//
//                    CrawlerData crawlerData = CrawlerData.builder()
//                            .data(crawlerRecord, page)
//                            .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, comment.enumVal(), commentId))
//                            .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), SITE, article.enumVal(), articleId[0] + articleId[1]))
//                            .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("comment"))
//                            .url(articleUrl)
//                            .releaseTime(time)
//                            .addContentKV(AICCommonField.Field_Content, content)
//                            .addContentKV(AICCommonField.Field_Author, user)
//                            .addContentKV(AICCommonField.Field_Author_Id, userId)
//                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
//                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
//                            .flowInPipelineTag("kafka")
//                            .build();
//                    crawlerData.setFilterPipelineResult(true);
//                    crawlerArticleDataList.add(crawlerData);
//                } catch (Exception e) {
//                    log.error(e.getMessage(), e);
//                }
//            }
//        }
//        return crawlerArticleDataList;
//    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {
    }

    /*
     * 重新下载
     * */
    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 10) {
                log.error("pcauto download page the number of retries exceeds the limit" +
                        ",request url {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        CrawlerRequestRecord crawlerRequestRecord = null;
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }

        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }


    //sha1加密
    public String sign(String id, String nextId, String time) {
        String _t = "_t=" + time;
        String callback = "@callback=news_list";
        String column = "@column=";
        String next_id = "@next_id=" + nextId;
        String salt = "@salt=t*e&n^c%e%n#t@a(u*t&o";
        String serial_id = "@serial_id=" + id;
        String sign = _t + callback + column + next_id + salt + serial_id;
        return DigestUtils.sha1Hex(sign);
    }

}
