package com.chance.cc.crawler.development.scripts.xiaohongshu;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.security.NoSuchAlgorithmException;
import java.text.ParseException;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Field_Domain_Result_Json;
import static org.codehaus.groovy.runtime.EncodingGroovyMethods.md5;

/**
 * @author bx
 * @date 2021/1/8 0008 12:11
 */
public class XHSCommentCrawlerScript extends CrawlerCommonScript {

    private static final Logger logger = LoggerFactory.getLogger(XHSCommentCrawlerScript.class);

    public static final String site= "xhs-comment";
    public static final String site_biz= "search";

    public static final String commentStartUrl = "https://start/fe_api/burdock/weixin/v2/notes/start/comments/start";

    public static final String wxappArticleCommentUrlRegular = "https://\\S*/fe_api/burdock/weixin/v2/notes/\\S*/comments\\S*";
    public static final String wxappArticleCommentUrlRegular1 = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/notes/\\S*/comments?pageSize=10";
//    public static final String wxappArticleCommentUrlFormat1 = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/notes/%s/comments?pageSize=10";
    public static final String wxappArticleCommentUrlFormat2 = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/notes/%s/comments?pageSize=10&endId=%s";

    public static final String wxappArticleBannerUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/banner?noteId=%s&platform=weixin";
    public static final String wxappArticleRelatedUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/note/%s/related?openId=%s&pageSize=8&page=1&noteType=1&needGifCover=true";
    public static final String wxappArticleActivityBannerUrlFormat = "https://pages.xiaohongshu.com/data/sns/mp_activity_banner?sid=%s";

    private LinkedBlockingQueue<String> authorInfos;
    private Object lock = new Object();

    public static final String headerReferer = "referer";
    public static final String headerAuthorization = "authorization";
    public static final String headerDeviceFingerprint = "device-fingerprint";
    public static final String authorOpenId = "openId";
    public static final String authorSid = "sid";
    public static final String authorType = "type"; //类型

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {

        List<CrawlerRequestRecord> crawlerRequestRecords = new ArrayList<>();

        if (!page.isDownloadSuccess()){
            logger.error("xhs comment request download has error, status code {},error info [{}] ,will retry",
                    page.getStatusCode(),page.getRawText());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        if (isUrlMatch(page.getRequest().getUrl(),wxappArticleCommentUrlRegular)){
            parseArticleCommentLinks(crawlerRequestRecords,crawlerRecord,page);
        }

        return crawlerRequestRecords;
    }

    private void parseArticleCommentLinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        try {
            Json noteCommentPage = new Json(page.getRawText());
            //模拟真实操作流程，防止跳出滑动验证
//            String url = "https://servicewechat.com/ci.xiaohongshu.com/2b8b5668-64ea-471a-86e2-969e9883f43f";
//            internalDownloadCommon(crawlerRequestRecords,crawlerRecord,url);

            if (isReturn(noteCommentPage,page)){
                crawlerRecord.tagsCreator().bizTags().addCustomKV("isFilter",true);
            } else {
                String code = noteCommentPage.jsonPath($_type + ".code").get();
                if (!code.equals("0")){
                    logger.info("xhs article comment request url {},error content {},thread local user author info {}"
                            ,page.getRequest().getUrl(),page.getRawText(),getLocalAuthInfo());
                    addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
                    crawlerRecord.setNeedWashPage(false);
                    return;
                }

                String dataId = noteCommentPage.jsonPath($_type + ".data.targetNoteId").get();

                //模拟真实操作流程，防止跳出滑动验证
                internalDownloadBanner(crawlerRequestRecords,crawlerRecord,dataId);
                internalDownloadRelated(crawlerRequestRecords,crawlerRecord,dataId);
                internalDownloadActivityBanner(crawlerRequestRecords,crawlerRecord,dataId);

                String endId = null;

                List<String> noteComments = noteCommentPage.jsonPath(".data.comments.*").all();
                List<String> images = new ArrayList<>();
                if (noteComments != null && noteComments.size() > 0 ) {
                    for (String comment : noteComments) {
                        Json commentJson = new Json(comment);
                        endId = commentJson.jsonPath($_type + ".id").get();
                        String image = commentJson.jsonPath($_type + ".user.image").get();
                        images.add(image);
                    }

                    for (String image : images) {
                        internalDownloadCommon(crawlerRequestRecords,crawlerRecord,image);
                    }
                }


                if (StringUtils.isNotBlank(endId)){

                    String noteUrl = String.format(wxappArticleCommentUrlFormat2, dataId,endId);

                    CrawlerRequestRecord crawlerArticleRequestRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(crawlerRecord)
                            .httpUrl(noteUrl)
                            .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                            .releaseTime(System.currentTimeMillis())
                            .copyBizTags()
                            .needWashed(true)
                            .build();
                    if (crawlerArticleRequestRecord.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)){
                        crawlerArticleRequestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(Tag_Field_Domain_Result_Json);
                    }
                    if (isUrlMatch(page.getRequest().getUrl(),wxappArticleCommentUrlRegular1)){
                        //第二页不去重，防止过高点赞 排在首页 而非最新评论
                        crawlerArticleRequestRecord.tagsCreator().requestTags().removeRequestType(filter);
                    }
                    crawlerRequestRecords.add(crawlerArticleRequestRecord);
                }
            }

        } catch (Exception e) {
            logger.error("xhs article comment download error,header oauthor info {},thread local author info {},request url: {},page rawText {},error info :{},will retry",
                    page.getRequest().getHeaders().get(headerAuthorization),getLocalAuthInfo(),page.getRequest().getUrl(),page.getRawText(),e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false);
        }
    }

    private boolean isReturn(Json articleResultJson,HttpPage page){

        String code = articleResultJson.jsonPath($_type + ".code").get();
        if (!"0".equals(code)){
            String msg = articleResultJson.jsonPath($_type + ".msg").get();
            if(!"Spam".equals(msg) && !"登录已过期".equals(msg)&& !msg.contains("封号")){
                logger.warn("xhs note url {} download error: {}",page.getRequest().getUrl(),msg);
                return true;
            }
        }
        return false;
    }

    private void internalDownloadBanner(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String dataId) throws NoSuchAlgorithmException {
        String url = String.format(wxappArticleBannerUrlFormat,dataId);
        internalDownload(crawlerRequestRecords,crawlerRecord,url);
    }

    private void internalDownloadRelated(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String dataId) throws NoSuchAlgorithmException {
        String oauthInfo = fillThreadLocalAuthorInfo();
        Map oauthInfoMap = new Json(oauthInfo).toObject(Map.class);
        String url = String.format(wxappArticleRelatedUrlFormat,dataId,oauthInfoMap.get(authorOpenId).toString());
        internalDownload(crawlerRequestRecords,crawlerRecord,url);
    }

    private void internalDownloadActivityBanner(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String dataId) throws NoSuchAlgorithmException {
        String oauthInfo = fillThreadLocalAuthorInfo();
        Map oauthInfoMap = new Json(oauthInfo).toObject(Map.class);
        String url = String.format(wxappArticleActivityBannerUrlFormat,oauthInfoMap.get(authorSid).toString());
        CrawlerRequestRecord commentCountRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        crawlerRequestRecords.add(commentCountRecord);
    }

    private void internalDownloadCommon(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String url) throws NoSuchAlgorithmException {
        if (StringUtils.isNotBlank(url)){
            CrawlerRequestRecord commentCountRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .releaseTime(System.currentTimeMillis())
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .build();
            commentCountRecord.getHttpConfig().setResponseTextGenerateHtml(false);
            crawlerRequestRecords.add(commentCountRecord);
        }
    }

    private void internalDownload(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String url) throws NoSuchAlgorithmException {
        String urlPath = url.substring(url.indexOf("/fe_api/"));

        String x_sign = "X" + md5(urlPath + "WSUDD");
        CrawlerRequestRecord commentCountRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                .httpHead("x-sign",x_sign)
                .httpHead("content-type","application/json")
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        crawlerRequestRecords.add(commentCountRecord);

    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {

        List<CrawlerData> crawlerDatas = new ArrayList<>();

        if (crawlerRecord.tagsCreator().bizTags().hasKVTag("isFilter")){

            //主贴id -- 累积 更新上次评论数 -- 有过滤标签 直接更新上次评论数为当前评论数
            String resultJson = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(Tag_Field_Domain_Result_Json);
            CrawlerData crawlerData = JSON.parseObject(resultJson, CrawlerData.class);
            Map map = JSON.parseObject(crawlerData.getCrawlerContent(), Map.class);
            map.put("lastComments",map.get("currentComments").toString());
            crawlerData.setCrawlerContent(JSON.toJSONString(map));
            crawlerDatas.add(crawlerData);
        } else if (isUrlMatch(page.getRequest().getUrl(),wxappArticleCommentUrlRegular)){
            washComment(crawlerDatas,crawlerRecord,page);
        }

        return crawlerDatas;
    }

    private void washComment(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord, HttpPage page){
        Json commentsJson = new Json(page.getRawText());
        List<String> comments = commentsJson.jsonPath($_type + ".data.comments").all();
        String commentsTotal = commentsJson.jsonPath($_type + ".data.commentsTotal").get();
        if (comments != null && comments.size() > 0){
            for (String comment : comments) {
                Json commentJson = new Json(comment);
                String targetNoteId = commentJson.jsonPath($_type + ".targetNoteId").get();
                List<String> subComments = commentJson.jsonPath($_type + ".subComments").all();
                processCommentRecord(crawlerDatas,crawlerRecord,page,commentJson,targetNoteId);
                if (subComments != null && subComments.size() > 0){
                    for (String subComment : subComments) {
                        processCommentRecord(crawlerDatas,crawlerRecord,page,new Json(subComment),targetNoteId);
                    }
                }
            }
        }

        //主贴id -- 累积 更新上次评论数
        if(crawlerRecord.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)){
            String resultJson = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(Tag_Field_Domain_Result_Json);
            CrawlerData crawlerData = JSON.parseObject(resultJson, CrawlerData.class);
            Map map = JSON.parseObject(crawlerData.getCrawlerContent(), Map.class);
            map.put("currentComments",commentsTotal);
            map.put("lastComments",commentsTotal);
            crawlerData.setCrawlerContent(JSON.toJSONString(map));
            crawlerDatas.add(crawlerData);
        }

    }

    private void processCommentRecord(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord,
                                      HttpPage page,Json commentJson,String noteId){

        try {

            String releaseTime = commentJson.jsonPath($_type + ".time").get();
            String author = commentJson.jsonPath($_type + ".user.nickname").get();
            String authorId = commentJson.jsonPath($_type + ".user.id").get();
            String commentId = commentJson.jsonPath($_type + ".id").get();
            String likes = commentJson.jsonPath($_type + ".likes").get();
            String content = commentJson.jsonPath($_type + ".content").get();
            String article_url = "https://www.xiaohongshu.com/discovery/item/"+noteId;
            CrawlerData crawlerArticleData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, CrawlerEnum.CrawlerDataType.comment.enumVal(), commentId))
                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), noteId))
                    .releaseTime(dateToTimestamp(releaseTime))
                    .content(commentJson.get())
                    .url(article_url)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                    .requestLabelTag(result)
                    .requestLabelTag(filter)
                    .isEndTimeFromSchedulerTime(false)
                    .flowInPipelineTag("article_comment")
                    .build();
            crawlerArticleData.setFilterPipelineResult(true);
            crawlerArticleData.tagsCreator().bizTags().addDomain(domain());
            crawlerArticleData.tagsCreator().bizTags().addSite(site);
            crawlerArticleData.tagsCreator().bizTags().addSiteBiz(site_biz);
            if (crawlerArticleData.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)){
                crawlerArticleData.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(Tag_Field_Domain_Result_Json);
            }
            crawlerDatas.add(crawlerArticleData);

//            CrawlerData crawlerArticleInteractionData = CrawlerData.builder()
//                    .data(crawlerRecord, page)
//                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, interaction.enumVal(), commentId))
//                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, CrawlerEnum.CrawlerDataType.comment.enumVal(), commentId))
//                    .releaseTime(dateToTimestamp(releaseTime))
//                    .addContentKV(Field_I_Likes,likes)
//                    .url(article_url)
//                    .resultLabelTag(interaction)
//                    .flowInPipelineTag("article_comment")
//                    .build();
//            crawlerArticleInteractionData.setFilterPipelineResult(true);
//            crawlerArticleInteractionData.tagsCreator().bizTags().addDomain(domain());
//            crawlerArticleInteractionData.tagsCreator().bizTags().addSite(site);
//            crawlerArticleInteractionData.tagsCreator().bizTags().addSiteBiz(site_biz);
//            if (crawlerArticleInteractionData.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)){
//                crawlerArticleInteractionData.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(Tag_Field_Domain_Result_Json);
//            }
//            crawlerDatas.add(crawlerArticleInteractionData);
        } catch (ParseException e) {
            logger.error("xhs article comment wash data {} has error {}",commentJson.get(),e.getMessage());
        }
    }

    @Override
    public String domain() {
        return "xhs";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(commentStartUrl); //start url--只是为了进入脚本没有实际意义
        addUrlRegular(wxappArticleCommentUrlRegular); // 微信小程序文章评论入口
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {

        String recordSite = crawlerRecord.tagsCreator().bizTags().site();
        if (site.equals(recordSite)){
            return true;
        }
        return false;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {

        if(supportSourceRecords != null && supportSourceRecords.size() > 0 ){
            for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
                if (supportSourceRecord != null &&
                        supportSourceRecord.getHttpRequest().getUrl().contains("/crawler/oauth/api/v1/xhs/userOauthInfos")){

                    if (authorInfos == null || authorInfos.size() < 1){
                        synchronized (lock){
                            if (authorInfos == null || authorInfos.size() < 1){
                                authorInfos = new LinkedBlockingQueue<>();
                                initAuthorInfos(supportSourceRecord); //初始 认证信息
                            }
                        }
                    }
                }
            }
        }

        downloadSleep();
        String authorInfo = fillThreadLocalAuthorInfo(); //填充认证信息
        fillCurrentRecordRequestHeader(requestRecord,authorInfo);
        logger.info("xhs current thread local author info {}",authorInfo);
        requestRecord.getHttpConfig().setResponseTextGenerateHtml(false);
        return super.prepareRequest(requestRecord, supportSourceRecords);
    }

    private void initAuthorInfos(CrawlerRequestRecord supportSourceRecord){
        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            Json rawText = new Json(httpPage.getRawText());
            String status = rawText.jsonPath($_type + ".status").get();
            List<String> contents = rawText.jsonPath($_type + ".content").all();

            if ("0".equals(status) && contents != null && contents.size() > 0){

                List<Map> userOauthInfos = new Json(contents.get(0)).toList(Map.class);
                for (Map userOauthInfo : userOauthInfos) {
                    String oauthInfo = String.valueOf(userOauthInfo.get("oauthInfo"));
                    Map oauthInfoMap = new Json(oauthInfo).toObject(Map.class);
                    if ("comment".equals(oauthInfoMap.get(authorType).toString())){
                        authorInfos.put(oauthInfo);
                    }
                }
            }

        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private void downloadSleep(){

        Random rand = new Random();
        long sleepTime = (long)((rand.nextFloat()+2)*2000L);

        logger.info("xhs download sleep time {}",sleepTime);
        try {
            Thread.sleep(sleepTime);
        } catch (InterruptedException e) {
            logger.error(e.getMessage());
        }
    }

    private String fillThreadLocalAuthorInfo(){
        String authorInfo = getLocalAuthInfo();
        if (StringUtils.isBlank(authorInfo)){
            try {
                authorInfo = authorInfos.poll(10, TimeUnit.SECONDS);
                if (StringUtils.isBlank(authorInfo)){
                    logger.error("Unable to obtain authentication information. The current task will not continue!");
                    throw new RuntimeException("Unable to obtain authentication information. The current task will not continue!");
                }
                updateLocalAuthInfo(authorInfo);
            } catch (InterruptedException e) {
                logger.error(e.getMessage());
            }
        }
        return authorInfo;
    }

    private void fillArticleCurrentRecord(CrawlerRequestRecord requestRecord){
        // 设置 评论去重时间
        requestRecord.setReleaseTime(System.currentTimeMillis());
        requestRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange);
        FilterInfo dateRangeFilterInfo = FilterUtils.dateRangeFilterInfo(7*24, null);
        FilterInfo keyFilterInfo =FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-","filter",domain(),site,"queue"));
        requestRecord.setFilterInfos(Arrays.asList(dateRangeFilterInfo,keyFilterInfo));
    }

    private void fillCurrentRecordRequestHeader(CrawlerRequestRecord requestRecord,String authorInfo){
        Map authorInfoMap = new Json(authorInfo).toObject(Map.class);
        Map<String, String> headers = new HashMap<>();
        headers.put("charset", "utf-8");
        headers.put("Accept-Encoding", "gzip");
        headers.put("referer", authorInfoMap.get(headerReferer).toString());
        headers.put("device-fingerprint",authorInfoMap.get(headerDeviceFingerprint).toString());
        headers.put("authorization", authorInfoMap.get(headerAuthorization).toString());
        headers.put("content-type", "application/json");
        headers.put("User-Agent", "Mozilla/5.0 (Linux; Android 6.0; DIG-AL00 Build/HUAWEIDIG-AL00; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/55.0.2883.91 Mobile Safari/537.36 MicroMessenger/7.0.3.1400(0x27000334) Process/appbrand0 NetType/WIFI Language/zh_CN");
        headers.put("Host", "www.xiaohongshu.com");
        headers.put("Connection", "Keep-Alive");

        String noteUrl = requestRecord.getHttpRequest().getUrl();
        String urlPath = noteUrl.substring(noteUrl.indexOf("/fe_api/"));
        try {
            String x_sign = "X" + md5(urlPath + "WSUDD");
            headers.put("x-sign",x_sign);
        } catch (NoSuchAlgorithmException e) {
            logger.error(e.getMessage());
        }

        requestRecord.getHttpRequest().setHeaders(headers);
    }

    public static void main(String[] args) {
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord){

//        int count = 1;
//        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
//        if (crawlerBusinessTags.hasKVTag("download_retry_count")){
//            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
//            if (count >= 100){
//                logger.error("xhs search article comment for time descending download he number of retries exceeds the limit" +
//                        ",request url {},download detail {}",crawlerRecord.getHttpRequest().getUrl());
//                return;
//            }
//        }
//
//        count++;
//        crawlerBusinessTags.addCustomKV("download_retry_count",count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .build();

        if(crawlerRecord.tagsCreator().requestTags().hasRequestType(turnPageItem)){
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(turnPage);
            crawlerRequestRecord.tagsCreator().requestTags().addRequestType(turnPageItem);
        }
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecords.add(crawlerRequestRecord);
    }


    public long dateToTimestamp(String dataStr) throws ParseException {
        String regEx="[^0-9]+"; //正则表达式，用于匹配非数字串，+号用于匹配出多个非数字串
        Pattern pattern = Pattern.compile(regEx);

        if (dataStr.equals("刚刚")){
            return System.currentTimeMillis();
        } else if (Pattern.matches("\\d*秒前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*1000L);
        } else if (Pattern.matches("\\d*分钟前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*1000L);
        } else if (Pattern.matches("\\d*小时前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*60*1000L);
        } else if (Pattern.matches("\\d*小时\\d*分钟前",dataStr)){
            int hourNumber = Integer.valueOf(pattern.split(dataStr)[0]);
            int secondNumber = Integer.valueOf(pattern.split(dataStr)[1]);
            long totalMillis = hourNumber*60*60*1000L + secondNumber*60*1000L;
            return (System.currentTimeMillis() - totalMillis);
        } else if (dataStr.startsWith("今天")){
            String currentTime = DateFormatUtils.format(System.currentTimeMillis(), TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("今天", currentTime);
        } else if (dataStr.startsWith("昨天")){
            String yesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("昨天", yesterdayTime);
        } else if (dataStr.startsWith("前天")){
            String beforeYesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 2*60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("前天", beforeYesterdayTime);
        } else if (Pattern.matches("\\d{2}月\\d{2}[日]*", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"年"+dataStr;
        } else if (Pattern.matches("\\d{2}-\\d{2}", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"-"+dataStr;
        }
        return DateUtils.parseDateStrictly(dataStr, TimeForamtEnum.allFormats()).getTime();
    }

    public enum TimeForamtEnum {
        format1("yyyy年MM月dd日"),
        format1_1("yyyy年MM月dd日 HH:mm:ss"),
        format1_2("yyyy年MM月dd日 HH:mm"),
        format1_3("yyyy年MM月dd日HH:mm"),
        format1_4("yyyy年MM月dd日 HH点mm分"),
        format1_5("yyyy年MM月dd日 HH点mm分ss秒"),
        format1_6("yyyy年MM月dd日HH点mm分"),
        format1_7("yyyy年MM月dd日HH点mm分ss秒"),


        format4("yyyy-MM-dd HH:mm:ss"),
        format5("yyyy-MM-dd HH:mm"),
        format6("yyyy-MM-dd"),

        format7("yyyy/MM/dd HH:mm:ss"),
        format7_1("MM/dd/yyyy HH:mm:ss"),
        format8("yyyy/MM/dd HH:mm"),
        format9("yyyy/MM/dd"),

        format10("yyyy.MM.dd HH:mm:ss"),
        format11("yyyy.MM.dd HH:mm"),
        format12("yyyy.MM.dd"),

        format13("EEE MMM d HH:mm:ss +0800 yyyy");

        private String format;

        private TimeForamtEnum(String format) {
            this.format = format;
        }

        public static String[] allFormats() {
            TimeForamtEnum[] timeForamtEnums = TimeForamtEnum.values();
            String[] formats = new String[timeForamtEnums.length];
            for (int count = 0; count < timeForamtEnums.length; count++) {
                formats[count] = timeForamtEnums[count].format;
            }
            return formats;
        }

        public String getFormat() {
            return format;
        }
    }


}
