package com.chance.cc.crawler.development.scripts.xiaohongshu;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.tags.CategoryTag;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.meta.core.bean.common.MetaResponse;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainKeys;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainUrls;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.security.NoSuchAlgorithmException;
import java.text.ParseException;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static org.codehaus.groovy.runtime.EncodingGroovyMethods.md5;

/**
 * @author bx
 * @date 2021/1/8 0008 12:11
 */
public class XHSUserNotesCrawlerScript extends CrawlerCommonScript {

    private static final Logger logger = LoggerFactory.getLogger(XHSUserNotesCrawlerScript.class);

    public static final String site= "user_notes";
    public static final String site_biz= "notes";

    public static final String userNotesStartUrl = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/user/";

    public static final String wxappArticleUrlRegular = "https://\\S*/fe_api/burdock/weixin/v2/note/\\S*/single_feed";
    public static final String wxappArticleUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/note/%s/single_feed";
    public static final String wxappUserNotesUrlRegular = "https://\\S*/fe_api/burdock/weixin/v2/user/\\S*/notes\\?\\S*";
    public static final String wxappUserNotesUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/user/%s/notes?page=%d&page_size=15";

    public static final String wxappArticleCommentUrlRegular = "https://\\S*/fe_api/burdock/weixin/v2/notes/\\S*/comments\\S*";
    public static final String wxappArticleCommentUrlRegular1 = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/notes/\\S*/comments?pageSize=10";
    public static final String wxappArticleCommentUrlFormat1 = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/notes/%s/comments?pageSize=10";
    public static final String wxappArticleCommentUrlFormat2 = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/notes/%s/comments?pageSize=10&endId=%s";

    public static final String wxappArticleBannerUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/banner?noteId=%s&platform=weixin";
    public static final String wxappArticleRelatedUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/note/%s/related?openId=%s&pageSize=8&page=1&noteType=1&needGifCover=true";
    public static final String wxappArticleActivityBannerUrlFormat = "https://pages.xiaohongshu.com/data/sns/mp_activity_banner?sid=%s";

    private LinkedBlockingQueue<String> authorInfos = new LinkedBlockingQueue<>();
    private Set<String> customKeywords;
    private final Object lock = new Object();
    private Object keyLock = new Object();

    public static final String headerReferer = "referer";
    public static final String headerAuthorization = "authorization";
    public static final String headerDeviceFingerprint = "device-fingerprint";
    public static final String authorOpenId = "openId";
    public static final String authorSid = "sid";
    public static final String authorType = "type"; //认证信息类型

    private static Proxy proxy = new Proxy();

    static {
        //代理配置
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {

        List<CrawlerRequestRecord> crawlerRequestRecords = new ArrayList<>();

        if (!page.isDownloadSuccess()){
            logger.error("xhs user notes request download has error, status code {},error info [{}] ,will retry",
                    page.getStatusCode(),page.getRawText());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        if (isUrlMatch(page.getRequest().getUrl(),wxappUserNotesUrlRegular)){
            parseUserNotesLinks(crawlerRequestRecords,crawlerRecord,page);
        }
        if (isUrlMatch(page.getRequest().getUrl(),wxappArticleUrlRegular)){
            parseArticleLinks(crawlerRequestRecords,crawlerRecord,page);
        }
        if (isUrlMatch(page.getRequest().getUrl(),wxappArticleCommentUrlRegular)){
            parseArticleCommentLinks(crawlerRequestRecords,crawlerRecord,page);
        }
        return crawlerRequestRecords;
    }

    private void parseUserNotesLinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        try {
            Json userNotesJson = new Json(page.getRawText());

            if (isReturn(userNotesJson,page)){
                crawlerRecord.setNeedWashPage(false);
                return;
            }

            List<String> items = userNotesJson.jsonPath($_type + ".data").all();
            if (items != null && items.size()>0 ){
                String uid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("uid");
                int nextPage = Integer.valueOf(crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("current_page")) + 1;
                String userNotesUrl = String.format(wxappUserNotesUrlFormat, uid,nextPage);

                CrawlerRequestRecord crawlerUserNotesRequestRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRecord)
                        .httpUrl(userNotesUrl)
                        .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                crawlerUserNotesRequestRecord.tagsCreator().bizTags().addCustomKV("current_page",nextPage);
                crawlerRequestRecords.add(crawlerUserNotesRequestRecord);
            }


            //解析文章列表
            for (String item : items) {
                Json itemJson = new Json(item);
                String noteId = itemJson.jsonPath($_type + ".id").get();
                String image = itemJson.jsonPath($_type + ".cover.url").get();
                String image2 = itemJson.jsonPath($_type + ".user.image").get();
                String time = itemJson.jsonPath($_type + ".time").get();
                //模拟真实操作流程，防止跳出滑动验证
                internalDownloadCommon(crawlerRequestRecords,crawlerRecord,image);
                internalDownloadCommon(crawlerRequestRecords,crawlerRecord,image2);
                String noteUrl = String.format(wxappArticleUrlFormat, noteId);
                String urlPath = noteUrl.substring(noteUrl.indexOf("/fe_api/"));
                String x_sign = "X" + md5(urlPath + "WSUDD");

                CrawlerRequestRecord crawlerArticleRequestRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRecord)
                        .httpUrl(noteUrl)
                        .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                        .httpHead("x-sign",x_sign)
                        .releaseTime(dateToTimestamp(time))
                        .needWashed(true)
                        .copyBizTags()
                        .build();
                crawlerRequestRecords.add(crawlerArticleRequestRecord);
            }
        } catch (Exception e) {
            logger.error("xhs user notes list download error,header oauthor info {}, thread local author info {},request url: {}," +
                            "page rawText {},error info :{},will retry",
                    page.getRequest().getHeaders().get(headerAuthorization),getLocalAuthInfo(),page.getRequest().getUrl(),page.getRawText(),e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false);
        }
    }

    private void parseArticleLinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        try {
            Json articleResultJson = new Json(page.getRawText());

            if (isReturn(articleResultJson,page)){
                crawlerRecord.setNeedWashPage(false);
                return;
            }

            String dataId = articleResultJson.jsonPath($_type + ".data.id").get();
            String image = articleResultJson.jsonPath($_type + ".data.user.image").get();

            //模拟真实操作流程，防止跳出滑动验证
            internalDownloadBanner(crawlerRequestRecords,crawlerRecord,dataId);
            internalDownloadRelated(crawlerRequestRecords,crawlerRecord,dataId);
            internalDownloadActivityBanner(crawlerRequestRecords,crawlerRecord,dataId);
            internalDownloadCommon(crawlerRequestRecords,crawlerRecord,image);

            //获取评论数
            String comments = articleResultJson.jsonPath($_type + ".data.comments").get();

            /*1.需要采集评论吗 2.需要判断上次采集评论数吗*/
            boolean isCrawlerCmt = false;
            CategoryTag categoryTag = crawlerRecord.tagsCreator().scheduleTags().getCategoryTag();
            if (categoryTag.getLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal()) != null) {
                isCrawlerCmt = true;
                if (!crawlerRecord.tagsCreator().bizTags().hasKVTag("comment_record_filter_info")) {
                    logger.error("xhs user note crawler comment need to filter information!");
                    isCrawlerCmt = false;
                }
            }
            if (crawlerRecord.tagsCreator().bizTags().hasKVTag("compare_last_comments")){
                String lastComments = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("lastComments");
                int lastCmtNum = Integer.parseInt(lastComments);
                int curCmtNum = Integer.parseInt(comments);
                if (lastCmtNum >= curCmtNum){
                    isCrawlerCmt = false;
                }else {
                    isCrawlerCmt = true;
                    crawlerRecord.tagsCreator().bizTags().addCustomKV("Is_Updated",true);
                }
            }
            if (isCrawlerCmt){
                /* 创建评论采集链接*/
                KVTag filterInfoTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
                CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);
                String commentUrl = String.format(wxappArticleCommentUrlFormat1, dataId);
                CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRecord)
                        .httpUrl(commentUrl)
                        .releaseTime(System.currentTimeMillis())
                        .notFilterRecord()
                        .needWashed(true)
                        .resultLabelTag(comment)
                        .resultLabelTag(interaction)
                        .copyBizTags()
                        .build();
                commentRecord.setFilter(filterInfoRecord.getFilter());
                commentRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                crawlerRequestRecords.add(commentRecord);
            }

        } catch (Exception e) {
            logger.error("xhs article detail download error,header oauthor info {}, thread local author info {},request url: {}," +
                            "page rawText {},error info :{},will retry",
                    page.getRequest().getHeaders().get(headerAuthorization),getLocalAuthInfo(),page.getRequest().getUrl(),page.getRawText(),e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false);
        }
    }

    private void parseArticleCommentLinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        try {
            Json noteCommentPage = new Json(page.getRawText());
            //模拟真实操作流程，防止跳出滑动验证
//            String url = "https://servicewechat.com/ci.xiaohongshu.com/2b8b5668-64ea-471a-86e2-969e9883f43f";
//            internalDownloadCommon(crawlerRequestRecords,crawlerRecord,url);

            if (isReturn(noteCommentPage,page)){
                crawlerRecord.tagsCreator().bizTags().addCustomKV("isFilter",true);
            } else {
                String code = noteCommentPage.jsonPath($_type + ".code").get();
                if (!code.equals("0")){
                    logger.info("xhs article comment request url {},error content {},thread local user author info {}"
                            ,page.getRequest().getUrl(),page.getRawText(),getLocalAuthInfo());
                    addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
                    crawlerRecord.setNeedWashPage(false);
                    return;
                }

                String dataId = noteCommentPage.jsonPath($_type + ".data.targetNoteId").get();

                //模拟真实操作流程，防止跳出滑动验证
                internalDownloadBanner(crawlerRequestRecords,crawlerRecord,dataId);
                internalDownloadRelated(crawlerRequestRecords,crawlerRecord,dataId);
                internalDownloadActivityBanner(crawlerRequestRecords,crawlerRecord,dataId);

                String endId = null;

                List<String> noteComments = noteCommentPage.jsonPath(".data.comments.*").all();
                List<String> images = new ArrayList<>();
                if (noteComments != null && noteComments.size() > 0 ) {
                    for (String comment : noteComments) {
                        Json commentJson = new Json(comment);
                        endId = commentJson.jsonPath($_type + ".id").get();
                        String image = commentJson.jsonPath($_type + ".user.image").get();
                        images.add(image);
                    }

                    for (String image : images) {
                        internalDownloadCommon(crawlerRequestRecords,crawlerRecord,image);
                    }
                }


                if (StringUtils.isNotBlank(endId)){

                    String noteUrl = String.format(wxappArticleCommentUrlFormat2, dataId,endId);

                    CrawlerRequestRecord crawlerArticleRequestRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(crawlerRecord)
                            .httpUrl(noteUrl)
                            .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                            .releaseTime(System.currentTimeMillis())
                            .copyBizTags()
                            .needWashed(true)
                            .build();
                    if (crawlerArticleRequestRecord.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)){
                        crawlerArticleRequestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(Tag_Field_Domain_Result_Json);
                    }
                    if (isUrlMatch(page.getRequest().getUrl(),wxappArticleCommentUrlRegular1)){
                        //第二页不去重，防止过高点赞 排在首页 而非最新评论
                        crawlerArticleRequestRecord.tagsCreator().requestTags().removeRequestType(filter);
                    }
                    crawlerRequestRecords.add(crawlerArticleRequestRecord);
                }
            }

        } catch (Exception e) {
            logger.error("xhs article comment download error,header oauthor info {},thread local author info {},request url: {},page rawText {},error info :{},will retry",
                    page.getRequest().getHeaders().get(headerAuthorization),getLocalAuthInfo(),page.getRequest().getUrl(),page.getRawText(),e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false);
        }
    }

    private boolean isReturn(Json articleResultJson,HttpPage page){

        String code = articleResultJson.jsonPath($_type + ".code").get();
        if (!"0".equals(code)){
            String msg = articleResultJson.jsonPath($_type + ".msg").get();
            if(!"Spam".equals(msg) && !"登录已过期".equals(msg)&& !msg.contains("封号")){
                logger.warn("xhs user note url {} download error: {}",page.getRequest().getUrl(),msg);
                return true;
            }
        }
        return false;
    }

    private void internalDownloadBanner(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String dataId) throws NoSuchAlgorithmException {
        String url = String.format(wxappArticleBannerUrlFormat,dataId);
        internalDownload(crawlerRequestRecords,crawlerRecord,url);
    }

    private void internalDownloadRelated(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String dataId) throws NoSuchAlgorithmException {
        String oauthInfo = getLocalAuthInfo();
        Map oauthInfoMap = new Json(oauthInfo).toObject(Map.class);
        String url = String.format(wxappArticleRelatedUrlFormat,dataId,oauthInfoMap.get(authorOpenId).toString());
        internalDownload(crawlerRequestRecords,crawlerRecord,url);
    }

    private void internalDownloadActivityBanner(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String dataId) throws NoSuchAlgorithmException {
        String oauthInfo = getLocalAuthInfo();
        Map oauthInfoMap = new Json(oauthInfo).toObject(Map.class);
        String url = String.format(wxappArticleActivityBannerUrlFormat,oauthInfoMap.get(authorSid).toString());
        CrawlerRequestRecord commentCountRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        crawlerRequestRecords.add(commentCountRecord);
    }

    private void internalDownloadCommon(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String url) throws NoSuchAlgorithmException {
        if (StringUtils.isNotBlank(url)){
            CrawlerRequestRecord commentCountRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .releaseTime(System.currentTimeMillis())
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .build();
            commentCountRecord.getHttpConfig().setResponseTextGenerateHtml(false);
            crawlerRequestRecords.add(commentCountRecord);
        }
    }

    private void internalDownload(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String url) throws NoSuchAlgorithmException {
        String urlPath = url.substring(url.indexOf("/fe_api/"));

        String x_sign = "X" + md5(urlPath + "WSUDD");
        CrawlerRequestRecord commentCountRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                .httpHead("x-sign",x_sign)
                .httpHead("content-type","application/json")
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        crawlerRequestRecords.add(commentCountRecord);

    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDatas = new ArrayList<>();
        if (isUrlMatch(page.getRequest().getUrl(),wxappArticleUrlRegular)){
            washUserNotesArticle(crawlerDatas,crawlerRecord,page);
        }
        if (isUrlMatch(page.getRequest().getUrl(),wxappArticleCommentUrlRegular)){
            washUserNotesComment(crawlerDatas,crawlerRecord,page);
        }
        return crawlerDatas;
    }

    private void washUserNotesArticle(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord, HttpPage page){
        Json articleJson = new Json(page.getRawText());
        // article
        String releaseTime = articleJson.jsonPath($_type + ".data.time").get();
        String title = articleJson.jsonPath($_type + ".data.title").get();
        String content = articleJson.jsonPath($_type + ".data.desc").get();
        String contentId = articleJson.jsonPath($_type + ".data.id").get();
        String article_url = "https://www.xiaohongshu.com/discovery/item/"+contentId;
        String keyword = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(Tag_Field_Keword);

        //user
        String author = articleJson.jsonPath($_type + ".data.user.nickname").get();
        String authorId = articleJson.jsonPath($_type + ".data.user.id").get();
        String redId = articleJson.jsonPath($_type + ".data.user.red_id").get();
        String location = articleJson.jsonPath($_type + ".data.user.location").get();
        String desc = articleJson.jsonPath($_type + ".data.user.desc").get();
        String fans = articleJson.jsonPath($_type + ".data.user.fans").get();
        String follows = articleJson.jsonPath($_type + ".data.user.follows").get();
        String collected = articleJson.jsonPath($_type + ".data.user.collected").get();
        String liked = articleJson.jsonPath($_type + ".data.user.liked").get();
        String gender = articleJson.jsonPath($_type + ".data.user.gender").get();
        String notes = articleJson.jsonPath($_type + ".data.user.notes").get();
        String profile_url = "https://www.xiaohongshu.com/user/profile/"+authorId;

        //interaction
        String like = articleJson.jsonPath($_type + ".data.likes").get();
        String collects = articleJson.jsonPath($_type + ".data.collects").get();
        String shareCount = articleJson.jsonPath($_type + ".data.shareCount").get();
        String comments = articleJson.jsonPath($_type + ".data.comments").get();

        try {
            CrawlerData crawlerArticleData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), contentId))
                    .releaseTime(dateToTimestamp(releaseTime))
                    .content(page.getRawText())
                    .url(article_url)
                    .resultLabelTag(article)
//                    .requestLabelTag(result)
//                    .requestLabelTag(filter)
                    .flowInPipelineTag("article_result")
                    .flowInPipelineTag("file")
                    .flowInPipelineTag("console")
                    .isEndTimeFromSchedulerTime(false)
                    .build();
            crawlerArticleData.setFilterPipelineResult(true);
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Keword,keyword);
            crawlerArticleData.tagsCreator().bizTags().addDomain(domain());
            crawlerArticleData.tagsCreator().bizTags().addSite(site);
            crawlerArticleData.tagsCreator().bizTags().addSiteBiz(site_biz);
            crawlerDatas.add(crawlerArticleData);

//            CrawlerData crawlerArticleInteractionData = CrawlerData.builder()
//                    .data(crawlerRecord, page)
//                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, interaction.enumVal(), contentId))
//                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), contentId))
//                    .releaseTime(dateToTimestamp(releaseTime))
//                    .addContentKV(Field_I_Likes,like)
//                    .addContentKV(Field_I_Collection,collects)
//                    .addContentKV(Field_I_Forwards,shareCount)
//                    .addContentKV(Field_I_Comments,comments)
//                    .url(article_url)
//                    .resultLabelTag(interaction)
//                    .isEndTimeFromSchedulerTime(false)
//                    .flowInPipelineTag("article_result")
//                    .flowInPipelineTag("file")
//                    .flowInPipelineTag("console")
//                    .build();
//            crawlerArticleInteractionData.setFilterPipelineResult(true);
//            crawlerArticleData.tagsCreator().bizTags().addDomain(domain());
//            crawlerArticleData.tagsCreator().bizTags().addSite(site);
//            crawlerArticleData.tagsCreator().bizTags().addSiteBiz(site_biz);
//            crawlerDatas.add(crawlerArticleInteractionData);

            //主贴 评论数 -- 累积
            CrawlerData crawlerArticleAccumulateData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), contentId))
                    .releaseTime(dateToTimestamp(releaseTime))
                    .addContentKV("comments",comments)
                    .addContentKV("articleId",contentId)
                    .addContentKV("releaseTime", String.valueOf(dateToTimestamp(releaseTime)))
                    .url(article_url)
                    .resultLabelTag(article)
                    .flowInPipelineTag("article_accumulate")
                    .build();
            crawlerArticleAccumulateData.setFilterPipelineResult(true);
            crawlerDatas.add(crawlerArticleAccumulateData);

            /*更新数据库中的评论数*/
            if (crawlerRecord.tagsCreator().bizTags().hasKVTag("Is_Updated")){
                CrawlerData crawlerArticleUpdatedData = CrawlerData.builder()
                        .data(crawlerRecord, page)
                        .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), contentId))
                        .releaseTime(dateToTimestamp(releaseTime))
                        .addContentKV("comments",comments)
                        .addContentKV("articleId",contentId)
                        .addContentKV("releaseTime", String.valueOf(dateToTimestamp(releaseTime)))
                        .url(article_url)
                        .resultLabelTag(article)
                        .flowInPipelineTag("article_updated")
                        .build();
                crawlerArticleUpdatedData.setFilterPipelineResult(true);
                crawlerDatas.add(crawlerArticleUpdatedData);
            }

        } catch (Exception e) {
            logger.error("xhs article wash data {} has error {}",page.getRawText(),e.getMessage());
        }
    }

    private void washUserNotesComment(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord, HttpPage page){
        Json commentsJson = new Json(page.getRawText());
        List<String> comments = commentsJson.jsonPath($_type + ".data.comments").all();
        if (comments != null && comments.size() > 0){
            for (String comment : comments) {
                Json commentJson = new Json(comment);
                String targetNoteId = commentJson.jsonPath($_type + ".targetNoteId").get();
                List<String> subComments = commentJson.jsonPath($_type + ".subComments").all();
                processCommentRecord(crawlerDatas,crawlerRecord,page,commentJson,targetNoteId);
                if (subComments != null && subComments.size() > 0){
                    for (String subComment : subComments) {
                        processCommentRecord(crawlerDatas,crawlerRecord,page,new Json(subComment),targetNoteId);
                    }
                }
            }
        }
    }

    private void processCommentRecord(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord,
                                      HttpPage page,Json commentJson,String noteId){

        try {

            String releaseTime = commentJson.jsonPath($_type + ".time").get();
            String author = commentJson.jsonPath($_type + ".user.nickname").get();
            String authorId = commentJson.jsonPath($_type + ".user.id").get();
            String commentId = commentJson.jsonPath($_type + ".id").get();
            String likes = commentJson.jsonPath($_type + ".likes").get();
            String content = commentJson.jsonPath($_type + ".content").get();
            String article_url = "https://www.xiaohongshu.com/discovery/item/"+noteId;
            CrawlerData crawlerArticleData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, CrawlerEnum.CrawlerDataType.comment.enumVal(), commentId))
                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), noteId))
                    .releaseTime(dateToTimestamp(releaseTime))
                    .content(commentJson.get())
                    .url(article_url)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                    .requestLabelTag(result)
                    .requestLabelTag(filter)
                    .isEndTimeFromSchedulerTime(false)
                    .flowInPipelineTag("article_result")
                    .flowInPipelineTag("file")
                    .flowInPipelineTag("console")
                    .build();
            crawlerArticleData.setFilterPipelineResult(true);
            crawlerArticleData.tagsCreator().bizTags().addDomain(domain());
            crawlerArticleData.tagsCreator().bizTags().addSite(site);
            crawlerArticleData.tagsCreator().bizTags().addSiteBiz(site_biz);
            if (crawlerArticleData.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)){
                crawlerArticleData.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(Tag_Field_Domain_Result_Json);
            }
            crawlerDatas.add(crawlerArticleData);

//            CrawlerData crawlerArticleInteractionData = CrawlerData.builder()
//                    .data(crawlerRecord, page)
//                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, interaction.enumVal(), commentId))
//                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, CrawlerEnum.CrawlerDataType.comment.enumVal(), commentId))
//                    .releaseTime(dateToTimestamp(releaseTime))
//                    .addContentKV(Field_I_Likes,likes)
//                    .url(article_url)
//                    .resultLabelTag(interaction)
//                    .requestLabelTag(result)
//                    .requestLabelTag(filter)
//                    .flowInPipelineTag("article_result")
//                    .flowInPipelineTag("file")
//                    .flowInPipelineTag("console")
//                    .build();
//            crawlerArticleInteractionData.setFilterPipelineResult(true);
//            crawlerArticleInteractionData.tagsCreator().bizTags().addDomain(domain());
//            crawlerArticleInteractionData.tagsCreator().bizTags().addSite(site);
//            crawlerArticleInteractionData.tagsCreator().bizTags().addSiteBiz(site_biz);
//            if (crawlerArticleInteractionData.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)){
//                crawlerArticleInteractionData.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(Tag_Field_Domain_Result_Json);
//            }
//            crawlerDatas.add(crawlerArticleInteractionData);
        } catch (ParseException e) {
            logger.error("xhs article comment wash data {} has error {}",commentJson.get(),e.getMessage());
        }
    }

    @Override
    public String domain() {
        return "xhs";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(userNotesStartUrl); //start url--只是为了进入脚本没有实际意义
        addUrlRegular(wxappUserNotesUrlRegular); // 用户列表接口
        addUrlRegular(wxappArticleUrlRegular);
        addUrlRegular(wxappArticleCommentUrlRegular);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {

        String recordSite = crawlerRecord.tagsCreator().bizTags().site();
        return site.equals(recordSite);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();
        HttpRequest httpRequest = requestRecord.getHttpRequest();
        /*初始文章详情*/
        if(requestRecord.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)){
            KVTag domainResultJson = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag(Tag_Field_Domain_Result_Json);
            CrawlerDomainUrls crawlerDomainUrls = JSON.parseObject(String.valueOf(domainResultJson.getVal()),CrawlerDomainUrls.class);
            String url = crawlerDomainUrls.getUrl();
            JSONObject contentObj = JSONObject.parseObject(url);
            String articleId = contentObj.getString("articleId");
            String articleUrl = String.format(wxappArticleUrlFormat,articleId);
            Long releaseTime = contentObj.getLong("releaseTime");
            //对文章进行时间的判断，若不是在规定的时间范围内不进行解析清洗
            if(!isDateRange(requestRecord,releaseTime)){
                return allItemRecords;
            }
            String comments = contentObj.getString("comments");
            if (StringUtils.isNotBlank(comments)){
                requestRecord.tagsCreator().bizTags().addCustomKV("lastComments",comments);
            }
            requestRecord.setReleaseTime(releaseTime);
            requestRecord.setNeedParsedPage(true);
            requestRecord.setNeedWashPage(true);
            requestRecord.tagsCreator().resultTags().addResultDataType(article);
            requestRecord.tagsCreator().resultTags().addResultDataType(interaction);
            httpRequest.setUrl(articleUrl);
            requestRecord.setRecordKey(articleUrl);
            requestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(Tag_Field_Domain_Result_Json); //移除
        }
        if(supportSourceRecords != null && supportSourceRecords.size() > 0 ){
            for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
                if (supportSourceRecord.getHttpRequest().getUrl().contains("/crawler/oauth/api/v1/xhs/userOauthInfos")) {
                    authorInfos.clear();
                    initAuthorInfos(supportSourceRecord); //初始 认证信息
                }

                if (supportSourceRecord.getHttpRequest().getUrl().contains("/v1/meta/"+domain()+"/keys?site=")){
                    initAllCrawlerRecordByKeyword(requestRecord,supportSourceRecord,allItemRecords);
                }
            }
        }
        downloadSleep();
//        String authorInfo = fillThreadLocalAuthorInfo(); //填充认证信息
        String authInfo = pollOneAuthInfo();
        try {
            fillCurrentRecordRequestHeader(requestRecord,authInfo);
        } catch (NoSuchAlgorithmException e) {
            logger.error(e.getMessage());
        }
        logger.info("xhs current thread local author info {}",authInfo);
        requestRecord.getHttpConfig().setResponseTextGenerateHtml(false);
        return allItemRecords;
    }

    private String pollOneAuthInfo() {
        String authInfo = "";
        try {
            synchronized (lock){
                authInfo = authorInfos.poll(3, TimeUnit.SECONDS);
                if (StringUtils.isBlank(authInfo)){
                    logger.error("Unable to obtain authentication information. The current task will not continue!");
                    while (authorInfos.size() == 0){
                        logger.error("xhs user notes crawler has poll null authInfo ,thread [{}] has been wait for 3000 ms",Thread.currentThread().getName());
                        Thread.sleep(3000);
                    }
                    authInfo = authorInfos.poll(3, TimeUnit.SECONDS);
                }else {
                    /*取完就归还*/
                    authorInfos.add(authInfo);
                    updateLocalAuthInfo(authInfo);
                }
            }
        }catch (Exception e){
            logger.error(e.getMessage(),e);
        }
        return authInfo;
    }

    private void initAllCrawlerRecordByKeyword(CrawlerRequestRecord requestRecord,
                                               CrawlerRequestRecord supportSourceRecord,
                                               List<CrawlerRecord> allItemRecords){
        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            MetaResponse metaResponse = JSON.parseObject(httpPage.getRawText(), MetaResponse.class);
            if (metaResponse.getStatus() == 0 && metaResponse.getContent() != null){
                List<String> contents = (List<String>) metaResponse.getContent();
                for (String content : contents) {
                    CrawlerDomainKeys crawlerDomainKeys = JSON.parseObject(content, CrawlerDomainKeys.class);
                    String keyword = crawlerDomainKeys.getKeyword();

                    CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(requestRecord)
                            .httpUrl(String.format(wxappUserNotesUrlFormat,keyword,1))
                            .httpHeads(requestRecord.getHttpRequest().getHeaders())
                            .releaseTime(System.currentTimeMillis())
                            .copyBizTags()
                            .notFilterRecord()
                            .build();
                    crawlerRequestRecord.tagsCreator().bizTags().addKeywords(keyword);
                    crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("uid",keyword);
                    crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("current_page",1);
                    allItemRecords.add(crawlerRequestRecord);
                }
            }
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private void initAuthorInfos(CrawlerRequestRecord supportSourceRecord){
        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            Json rawText = new Json(httpPage.getRawText());
            String status = rawText.jsonPath($_type + ".status").get();
            List<String> contents = rawText.jsonPath($_type + ".content").all();

            if ("0".equals(status) && contents != null && contents.size() > 0){

                List<Map> userOauthInfos = new Json(contents.get(0)).toList(Map.class);
                for (Map userOauthInfo : userOauthInfos) {
                    String oauthInfo = String.valueOf(userOauthInfo.get("oauthInfo"));
                    Map oauthInfoMap = new Json(oauthInfo).toObject(Map.class);
                    if ("userNotes".equals(oauthInfoMap.get(authorType).toString())){
                        authorInfos.put(oauthInfo);
                    }
                }
            }

        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private void downloadSleep(){
        Random rand = new Random();
        long sleepTime = (long)((rand.nextFloat()+1.5)*2000L);
//        long sleepTime = (long)((rand.nextFloat())*1000L);
        logger.info("xhs download sleep time {}",sleepTime);
        try {
            Thread.sleep(sleepTime);
        } catch (InterruptedException e) {
            logger.error(e.getMessage());
        }
    }

    private String fillThreadLocalAuthorInfo(){
        String authorInfo = getLocalAuthInfo();
        if (StringUtils.isBlank(authorInfo)){
            try {
                authorInfo = authorInfos.poll(3, TimeUnit.SECONDS);
                if (StringUtils.isBlank(authorInfo)){
                    logger.error("Unable to obtain authentication information. The current task will not continue!");
                    throw new RuntimeException("Unable to obtain authentication information. The current task will not continue!");
                }
                updateLocalAuthInfo(authorInfo);
            } catch (InterruptedException e) {
                logger.error(e.getMessage());
            }
        }
        return authorInfo;
    }

    private void fillCurrentRecordRequestHeader(CrawlerRequestRecord requestRecord,String authorInfo) throws NoSuchAlgorithmException {
        String urlPath = requestRecord.getHttpRequest().getUrl().substring(requestRecord.getHttpRequest().getUrl().indexOf("/fe_api/"));
        String x_sign = "X" + md5(urlPath + "WSUDD");
        Map authorInfoMap = new Json(authorInfo).toObject(Map.class);
        Map<String, String> headers = requestRecord.getHttpRequest().getHeaders();
        if (headers == null){
            headers = new HashMap<>();
        }
        headers.put("charset", "utf-8");
        headers.put("Accept-Encoding", "gzip");
        headers.put("referer", authorInfoMap.get(headerReferer).toString());
        headers.put("device-fingerprint",authorInfoMap.get(headerDeviceFingerprint).toString());
        headers.put("authorization", authorInfoMap.get(headerAuthorization).toString());
        headers.put("content-type", "application/json");
        headers.put("User-Agent", "Mozilla/5.0 (Linux; Android 6.0; DIG-AL00 Build/HUAWEIDIG-AL00; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/55.0.2883.91 Mobile Safari/537.36 MicroMessenger/7.0.3.1400(0x27000334) Process/appbrand0 NetType/WIFI Language/zh_CN");
        headers.put("Host", "www.xiaohongshu.com");
        headers.put("Connection", "Keep-Alive");
        headers.put("x-sign",x_sign);
        // set proxy
        requestRecord.getHttpConfig().setUseProxy(true);
        requestRecord.getHttpConfig().setProxy(proxy);
    }

    public static void main(String[] args) {
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord){

//        int count = 1;
//        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
//        if (crawlerBusinessTags.hasKVTag("download_retry_count")){
//            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
//            if (count >= 100){
//                logger.error("xhs search for time descending download he number of retries exceeds the limit" +
//                        ",request url {},download detail {}",crawlerRecord.getHttpRequest().getUrl());
//                return;
//            }
//        }
//
//        count++;
//        crawlerBusinessTags.addCustomKV("download_retry_count",count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .build();
        crawlerRequestRecords.add(crawlerRequestRecord);

        if(crawlerRecord.tagsCreator().requestTags().hasRequestType(turnPageItem)){
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(turnPage);
            crawlerRequestRecord.tagsCreator().requestTags().addRequestType(turnPageItem);
        }
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
    }


    public long dateToTimestamp(String dataStr) throws ParseException {
        String regEx="[^0-9]+"; //正则表达式，用于匹配非数字串，+号用于匹配出多个非数字串
        Pattern pattern = Pattern.compile(regEx);

        if (dataStr.equals("刚刚")){
            return System.currentTimeMillis();
        } else if (Pattern.matches("\\d*秒前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*1000L);
        } else if (Pattern.matches("\\d*分钟前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*1000L);
        } else if (Pattern.matches("\\d*小时前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*60*1000L);
        } else if (Pattern.matches("\\d*小时\\d*分钟前",dataStr)){
            int hourNumber = Integer.valueOf(pattern.split(dataStr)[0]);
            int secondNumber = Integer.valueOf(pattern.split(dataStr)[1]);
            long totalMillis = hourNumber*60*60*1000L + secondNumber*60*1000L;
            return (System.currentTimeMillis() - totalMillis);
        } else if (dataStr.startsWith("今天")){
            String currentTime = DateFormatUtils.format(System.currentTimeMillis(), TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("今天", currentTime);
        } else if (dataStr.startsWith("昨天")){
            String yesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("昨天", yesterdayTime);
        } else if (dataStr.startsWith("前天")){
            String beforeYesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 2*60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("前天", beforeYesterdayTime);
        } else if (Pattern.matches("\\d{2}月\\d{2}[日]*", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"年"+dataStr;
        } else if (Pattern.matches("\\d{2}-\\d{2}", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"-"+dataStr;
        }
        return DateUtils.parseDateStrictly(dataStr, TimeForamtEnum.allFormats()).getTime();
    }

    public enum TimeForamtEnum {
        format1("yyyy年MM月dd日"),
        format1_1("yyyy年MM月dd日 HH:mm:ss"),
        format1_2("yyyy年MM月dd日 HH:mm"),
        format1_3("yyyy年MM月dd日HH:mm"),
        format1_4("yyyy年MM月dd日 HH点mm分"),
        format1_5("yyyy年MM月dd日 HH点mm分ss秒"),
        format1_6("yyyy年MM月dd日HH点mm分"),
        format1_7("yyyy年MM月dd日HH点mm分ss秒"),


        format4("yyyy-MM-dd HH:mm:ss"),
        format5("yyyy-MM-dd HH:mm"),
        format6("yyyy-MM-dd"),

        format7("yyyy/MM/dd HH:mm:ss"),
        format7_1("MM/dd/yyyy HH:mm:ss"),
        format8("yyyy/MM/dd HH:mm"),
        format9("yyyy/MM/dd"),

        format10("yyyy.MM.dd HH:mm:ss"),
        format11("yyyy.MM.dd HH:mm"),
        format12("yyyy.MM.dd"),

        format13("EEE MMM d HH:mm:ss +0800 yyyy");

        private String format;

        private TimeForamtEnum(String format) {
            this.format = format;
        }

        public static String[] allFormats() {
            TimeForamtEnum[] timeForamtEnums = TimeForamtEnum.values();
            String[] formats = new String[timeForamtEnums.length];
            for (int count = 0; count < timeForamtEnums.length; count++) {
                formats[count] = timeForamtEnums[count].format;
            }
            return formats;
        }

        public String getFormat() {
            return format;
        }
    }

    /**
     * 判断是否在时间范围内
     *
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord, Long releaseTimeToLong) {
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    } else if (hourFromNow != 0) {
                        endTime = System.currentTimeMillis();
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if (startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime) {
                isRange = true;
            }
        } else {
            isRange = true;
        }
        return isRange;
    }


}
