package com.chance.cc.crawler.development.scripts.xiaohongshu;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.security.NoSuchAlgorithmException;
import java.text.ParseException;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.turnPage;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.turnPageItem;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Field_Keword;
import static org.codehaus.groovy.runtime.EncodingGroovyMethods.md5;

/**
 * @author bx
 * @date 2021/1/8 0008 12:11
 */
public class XHSSearchCustomCrawlerScript extends CrawlerCommonScript {

    private static final Logger logger = LoggerFactory.getLogger(XHSSearchCustomCrawlerScript.class);

    public static final String site= "search_custom";
    public static final String site_biz= "search_custom";

    public static final String searchStartUrl = "https://start/api/sns/v10/search/notes";
    public static final String appSearchUrlRegular = "https://\\S*/api/sns/v10/search/notes[\\s\\S]*";

    public static final String wxappArticleUrlRegular = "https://\\S*/fe_api/burdock/weixin/v2/note/\\S*/single_feed";
    public static final String wxappArticleUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/note/%s/single_feed";
    public static final String wxappArticleBannerUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/banner?noteId=%s&platform=weixin";
    public static final String wxappArticleRelatedUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/note/%s/related?openId=%s&pageSize=8&page=1&noteType=1&needGifCover=true";
    public static final String wxappArticleActivityBannerUrlFormat = "https://pages.xiaohongshu.com/data/sns/mp_activity_banner?sid=%s";

    public static final String wxappArticleCommentUrlRegular = "https://\\S*/fe_api/burdock/weixin/v2/notes/\\S*/comments\\S*";
    public static final String wxappArticleCommentUrlRegular1 = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/notes/\\S*/comments?pageSize=10";
    public static final String wxappArticleCommentUrlFormat1 = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/notes/%s/comments?pageSize=10";
    public static final String wxappArticleCommentUrlFormat2 = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/notes/%s/comments?pageSize=10&endId=%s";

    private LinkedBlockingQueue<String> authorInfos;
    private Object lock = new Object();

    public static final String headerReferer = "referer";
    public static final String headerAuthorization = "authorization";
    public static final String headerDeviceFingerprint = "device-fingerprint";
    public static final String authorOpenId = "openId";
    public static final String authorSid = "sid";

    private static Proxy proxy = new Proxy();

    static {
        //代理配置
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }

    private static List<String> defaultImages =new ArrayList<>();
    static {
        defaultImages.add("https://img.xiaohongshu.com/avatar/5f2064eb080dcf0001cc574e.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/5eae50b231af6300011333b7.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/6020f03f3ef6327de6eb8728.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/6013128bd4e15f3d0266ba5b.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/411309441983cdc4ce073ff8da73b5b6.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/5fb86c356fc7700001fa6d13.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/5fd03576d4d68a00015be3c0.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/5da66dc30000000001009e60.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/5c468493000000001000c2ea.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/5c3b6996f6928f00016e6720.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/5e7b0132c5569500012034f9.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/5e1b4de900000000010060c3.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/5d540937474e3500013269c4.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/603daa81ea0ea072103b986a.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/5d0470dee51467000108a1fd.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/5e78934a4d2eec00015914a2.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/5f6d48b4e2dbee0001d0a80e.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/605019de66e26627726307b5.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/582d1d336a6a691a12808485.jpg@80w_80h_90q_1e_1c_1x.jpg");
        defaultImages.add("https://img.xiaohongshu.com/avatar/5be1311641f120000155db9e.jpg@80w_80h_90q_1e_1c_1x.jpg");
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {

        List<CrawlerRequestRecord> crawlerRequestRecords = new ArrayList<>();

        if (!isUrlMatch(crawlerRecord.getHttpRequest().getUrl(),appSearchUrlRegular) &&!page.isDownloadSuccess()){
            logger.error("xhs request url {} download has error ,will retry");
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        if (isUrlMatch(crawlerRecord.getHttpRequest().getUrl(),appSearchUrlRegular)){
            parseSearchLinks(crawlerRequestRecords,crawlerRecord,page);
        } else if (isUrlMatch(page.getRequest().getUrl(),wxappArticleUrlRegular)){
            parseArticleLinks(crawlerRequestRecords,crawlerRecord,page);
        } else if (isUrlMatch(page.getRequest().getUrl(),wxappArticleCommentUrlRegular)){
            parseArticleCommentLinks(crawlerRequestRecords,crawlerRecord,page);
        }

        return crawlerRequestRecords;
    }

    private void parseSearchLinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        String keyword = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("keyword");
        String noteId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("noteId");
        String releaseTime = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("releaseTime");
        logger.info("xhs custom noteId {},releaseTime {}",noteId,releaseTime);
        try {

            String noteUrl = String.format(wxappArticleUrlFormat, noteId);
            String urlPath = noteUrl.substring(noteUrl.indexOf("/fe_api/"));
            String x_sign = "X" + md5(urlPath + "WSUDD");

            CrawlerRequestRecord crawlerArticleRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(noteUrl)
                    .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                    .httpHead("x-sign",x_sign)
                    .releaseTime(dateToTimestamp(releaseTime))
                    .needWashed(true)
                    .copyBizTags()
                    .build();
            crawlerArticleRequestRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Keword,keyword);
//            crawlerArticleRequestRecord.tagsCreator().bizTags().addCustomKV("crawler_biz",keyword);
            crawlerRequestRecords.add(crawlerArticleRequestRecord);
        }catch (Exception e){
            logger.error("error record {}", JSON.toJSONString(crawlerRecord));
        }
    }

    private void parseArticleLinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        try {
            Json articleResultJson = new Json(page.getRawText());

            if (isReturn(articleResultJson,page)){
                crawlerRecord.setNeedWashPage(false);
                return;
            }


            String dataId = articleResultJson.jsonPath($_type + ".data.id").get();
            String image = articleResultJson.jsonPath($_type + ".data.user.image").get();

            //模拟真实操作流程，防止跳出滑动验证
            internalDownloadBanner(crawlerRequestRecords,crawlerRecord,dataId);
            internalDownloadRelated(crawlerRequestRecords,crawlerRecord,dataId);
            internalDownloadActivityBanner(crawlerRequestRecords,crawlerRecord,dataId);
            internalDownloadCommon(crawlerRequestRecords,crawlerRecord,image);

            int comments = Integer.valueOf(articleResultJson.jsonPath(".data.comments").get());
            if (comments > 0 ){
                String releaseTime = articleResultJson.jsonPath($_type + ".data.time").get();

                String noteUrl = String.format(wxappArticleCommentUrlFormat1, dataId);
                String urlPath = noteUrl.substring(noteUrl.indexOf("/fe_api/"));
                String x_sign = "X" + md5(urlPath + "WSUDD");

                CrawlerRequestRecord crawlerCommentRequestRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRecord)
                        .httpUrl(noteUrl)
                        .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                        .httpHead("x-sign",x_sign)
                        .releaseTime(dateToTimestamp(releaseTime))
                        .needWashed(true)
                        .notFilterRecord()
                        .copyBizTags()
                        .build();
                crawlerRequestRecords.add(crawlerCommentRequestRecord);

            }
        } catch (Exception e) {
            logger.error("xhs article detail download error,header oauthor info {}, thread local author info {},request url: {}," +
                            "page rawText {},error info :{},will retry",
                    page.getRequest().getHeaders().get(headerAuthorization),getLocalAuthInfo(),page.getRequest().getUrl(),page.getRawText(),e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false);
        }
    }

    private boolean isReturn(Json articleResultJson,HttpPage page){

        String code = articleResultJson.jsonPath($_type + ".code").get();
        if (!"0".equals(code)){
            String msg = articleResultJson.jsonPath($_type + ".msg").get();
            if(!"Spam".equals(msg) && !"登录已过期".equals(msg)&& !msg.contains("封号")){
                logger.warn("xhs user note url {} download error: {}",page.getRequest().getUrl(),msg);
                return true;
            }
        }
        return false;
    }

    private void parseArticleCommentLinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        try {
            Json noteCommentPage = new Json(page.getRawText());
            //模拟真实操作流程，防止跳出滑动验证
//            String url = "https://servicewechat.com/ci.xiaohongshu.com/2b8b5668-64ea-471a-86e2-969e9883f43f";
//            internalDownloadCommon(crawlerRequestRecords,crawlerRecord,url);

            String code = noteCommentPage.jsonPath($_type + ".code").get();

            if (isReturn(noteCommentPage,page)){
                crawlerRecord.setNeedWashPage(false);
                return;
            }

            if (!code.equals("0")){
                logger.info("xhs article comment request url {},error content {},thread local user author info {}, ip proxy {}"
                        ,page.getRequest().getUrl(),page.getRawText(),getLocalAuthInfo(),crawlerRecord.getHttpConfig().getProxy());
                addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
                crawlerRecord.setNeedWashPage(false);
                return;
            }

            String dataId = noteCommentPage.jsonPath($_type + ".data.targetNoteId").get();

            //模拟真实操作流程，防止跳出滑动验证
            internalDownloadBanner(crawlerRequestRecords,crawlerRecord,dataId);
            internalDownloadRelated(crawlerRequestRecords,crawlerRecord,dataId);
            internalDownloadActivityBanner(crawlerRequestRecords,crawlerRecord,dataId);

            String endId = null;

            List<String> noteComments = noteCommentPage.jsonPath(".data.comments.*").all();
            if (noteComments != null && noteComments.size() > 0 ) {
                List<String> images = new ArrayList<>();
                for (String comment : noteComments) {
                    Json commentJson = new Json(comment);
                    endId = commentJson.jsonPath($_type + ".id").get();
                    String image = commentJson.jsonPath($_type + ".user.image").get();
                    images.add(image);
                }

//                if (images.size() < 10){
//                    int diff = 10-images.size();
//                    Random rand = new Random();
//                    int index =0;
//                    while (index < diff){
//                        if (index > defaultImages.size() - 1){
//                            break;
//                        }
//                        String image = defaultImages.get(rand.nextInt(defaultImages.size()));
//                        if (images.contains(image)){
//                            continue;
//                        }
//                        images.add(image);
//                        index++;
//                    }
//                }

                for (String image : images) {
                    internalDownloadCommon(crawlerRequestRecords,crawlerRecord,image);
                }
            }

            if (StringUtils.isNotBlank(endId)){

                String noteUrl = String.format(wxappArticleCommentUrlFormat2, dataId,endId);
                String urlPath = noteUrl.substring(noteUrl.indexOf("/fe_api/"));
                String x_sign = "X" + md5(urlPath + "WSUDD");

                CrawlerRequestRecord crawlerArticleRequestRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRecord)
                        .httpUrl(noteUrl)
                        .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                        .httpHead("x-sign",x_sign)
                        .releaseTime(System.currentTimeMillis())
                        .needWashed(true)
                        .copyBizTags()
                        .notFilterRecord()
                        .build();

                crawlerRequestRecords.add(crawlerArticleRequestRecord);
            }

        } catch (Exception e) {
            logger.error("xhs article comment download error,header oauthor info {},thread local author info {},request url: {},page rawText {},error info :{},will retry",
                    page.getRequest().getHeaders().get(headerAuthorization),getLocalAuthInfo(),page.getRequest().getUrl(),page.getRawText(),e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false);
        }
    }

    private void internalDownloadBanner(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String dataId) throws NoSuchAlgorithmException {
        String url = String.format(wxappArticleBannerUrlFormat,dataId);
        internalDownload(crawlerRequestRecords,crawlerRecord,url);
    }

    private void internalDownloadRelated(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String dataId) throws NoSuchAlgorithmException {
        String oauthInfo = fillThreadLocalAuthorInfo();
        Map oauthInfoMap = new Json(oauthInfo).toObject(Map.class);
        String url = String.format(wxappArticleRelatedUrlFormat,dataId,oauthInfoMap.get(authorOpenId).toString());
        internalDownload(crawlerRequestRecords,crawlerRecord,url);
    }

    private void internalDownloadActivityBanner(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String dataId) throws NoSuchAlgorithmException {
        String oauthInfo = fillThreadLocalAuthorInfo();
        Map oauthInfoMap = new Json(oauthInfo).toObject(Map.class);
        String url = String.format(wxappArticleActivityBannerUrlFormat,oauthInfoMap.get(authorSid).toString());
        CrawlerRequestRecord commentCountRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        crawlerRequestRecords.add(commentCountRecord);
    }

    private void internalDownloadCommon(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String url) throws NoSuchAlgorithmException {
        if (StringUtils.isNotBlank(url)){
            CrawlerRequestRecord commentCountRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .releaseTime(System.currentTimeMillis())
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .build();
            commentCountRecord.getHttpConfig().setResponseTextGenerateHtml(false);
            crawlerRequestRecords.add(commentCountRecord);
        }
    }

    private void internalDownload(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String url) throws NoSuchAlgorithmException {
        String urlPath = url.substring(url.indexOf("/fe_api/"));

        String x_sign = "X" + md5(urlPath + "WSUDD");
        CrawlerRequestRecord commentCountRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                .httpHead("x-sign",x_sign)
                .httpHead("content-type","application/json")
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        crawlerRequestRecords.add(commentCountRecord);

    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDatas = new ArrayList<>();
        if (isUrlMatch(page.getRequest().getUrl(),wxappArticleUrlRegular)){
            washArticle(crawlerDatas,crawlerRecord,page);
        } else if (isUrlMatch(page.getRequest().getUrl(),wxappArticleCommentUrlRegular)){
            washComment(crawlerDatas,crawlerRecord,page);
        }
        return crawlerDatas;
    }

    private void washArticle(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord, HttpPage page){
        Json articleJson = new Json(page.getRawText());
        // article
        String releaseTime = articleJson.jsonPath($_type + ".data.time").get();
        String title = articleJson.jsonPath($_type + ".data.title").get();
        String content = articleJson.jsonPath($_type + ".data.desc").get();
        String contentId = articleJson.jsonPath($_type + ".data.id").get();
        String article_url = "https://www.xiaohongshu.com/discovery/item/"+contentId;

        //user
        String author = articleJson.jsonPath($_type + ".data.user.nickname").get();
        String authorId = articleJson.jsonPath($_type + ".data.user.id").get();
        String redId = articleJson.jsonPath($_type + ".data.user.red_id").get();
        String location = articleJson.jsonPath($_type + ".data.user.location").get();
        String desc = articleJson.jsonPath($_type + ".data.user.desc").get();
        String fans = articleJson.jsonPath($_type + ".data.user.fans").get();
        String follows = articleJson.jsonPath($_type + ".data.user.follows").get();
        String collected = articleJson.jsonPath($_type + ".data.user.collected").get();
        String liked = articleJson.jsonPath($_type + ".data.user.liked").get();
        String gender = articleJson.jsonPath($_type + ".data.user.gender").get();
        String notes = articleJson.jsonPath($_type + ".data.user.notes").get();
        String profile_url = "https://www.xiaohongshu.com/user/profile/"+authorId;

        //interaction
        String like = articleJson.jsonPath($_type + ".data.likes").get();
        String collects = articleJson.jsonPath($_type + ".data.collects").get();
        String shareCount = articleJson.jsonPath($_type + ".data.shareCount").get();
        String comments = articleJson.jsonPath($_type + ".data.comments").get();

        try {
            CrawlerData crawlerArticleData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), contentId))
                    .releaseTime(dateToTimestamp(releaseTime))
                    .content(page.getRawText())
                    .url(article_url)
                    .resultLabelTag(article)
                    .isEndTimeFromSchedulerTime(false)
                    .copyBizTags()
                    .build();
            crawlerArticleData.tagsCreator().bizTags().addDomain(domain());
            crawlerArticleData.tagsCreator().bizTags().addSite(site);
            crawlerArticleData.tagsCreator().bizTags().addSiteBiz(site_biz);
            crawlerDatas.add(crawlerArticleData);

//            CrawlerData crawlerArticleInteractionData = CrawlerData.builder()
//                    .data(crawlerRecord, page)
//                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, interaction.enumVal(), contentId))
//                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), contentId))
//                    .releaseTime(dateToTimestamp(releaseTime))
//                    .addContentKV(Field_I_Likes,like)
//                    .addContentKV(Field_I_Collection,collects)
//                    .addContentKV(Field_I_Forwards,shareCount)
//                    .addContentKV(Field_I_Comments,comments)
//                    .url(article_url)
//                    .resultLabelTag(interaction)
//                    .isEndTimeFromSchedulerTime(false)
//                    .build();
//            crawlerArticleData.tagsCreator().bizTags().addDomain(domain());
//            crawlerArticleData.tagsCreator().bizTags().addSite(site);
//            crawlerArticleData.tagsCreator().bizTags().addSiteBiz(site_biz);
//            crawlerDatas.add(crawlerArticleInteractionData);
        } catch (Exception e) {
            logger.error("xhs article wash data {} has error {}",page.getRawText(),e.getMessage());
        }
    }

    private void washComment(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord, HttpPage page){
        Json commentsJson = new Json(page.getRawText());
        List<String> comments = commentsJson.jsonPath($_type + ".data.comments").all();
        if (comments != null && comments.size() > 0){
            for (String comment : comments) {
                Json commentJson = new Json(comment);
                String targetNoteId = commentJson.jsonPath($_type + ".targetNoteId").get();
                List<String> subComments = commentJson.jsonPath($_type + ".subComments").all();
                processCommentRecord(crawlerDatas,crawlerRecord,page,commentJson,targetNoteId);
                if (subComments != null && subComments.size() > 0){
                    for (String subComment : subComments) {
                        processCommentRecord(crawlerDatas,crawlerRecord,page,new Json(subComment),targetNoteId);
                    }
                }
            }
        }

    }

    private void processCommentRecord(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord,
                                      HttpPage page,Json commentJson,String noteId){

        try {

            String releaseTime = commentJson.jsonPath($_type + ".time").get();
            String author = commentJson.jsonPath($_type + ".user.nickname").get();
            String authorId = commentJson.jsonPath($_type + ".user.id").get();
            String commentId = commentJson.jsonPath($_type + ".id").get();
            String likes = commentJson.jsonPath($_type + ".likes").get();
            String content = commentJson.jsonPath($_type + ".content").get();
            String article_url = "https://www.xiaohongshu.com/discovery/item/"+noteId;
            CrawlerData crawlerArticleData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, CrawlerEnum.CrawlerDataType.comment.enumVal(), commentId))
                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), noteId))
                    .releaseTime(dateToTimestamp(releaseTime))
                    .content(commentJson.get())
                    .url(article_url)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                    .build();

            crawlerArticleData.tagsCreator().bizTags().addDomain(domain());
            crawlerArticleData.tagsCreator().bizTags().addSite(site);
            crawlerArticleData.tagsCreator().bizTags().addSiteBiz(site_biz);
            crawlerDatas.add(crawlerArticleData);

//            CrawlerData crawlerArticleInteractionData = CrawlerData.builder()
//                    .data(crawlerRecord, page)
//                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, interaction.enumVal(), commentId))
//                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, CrawlerEnum.CrawlerDataType.comment.enumVal(), commentId))
//                    .releaseTime(dateToTimestamp(releaseTime))
//                    .addContentKV(Field_I_Likes,likes)
//                    .url(article_url)
//                    .resultLabelTag(interaction)
//                    .build();
//            crawlerArticleInteractionData.tagsCreator().bizTags().addDomain(domain());
//            crawlerArticleInteractionData.tagsCreator().bizTags().addSite(site);
//            crawlerArticleInteractionData.tagsCreator().bizTags().addSiteBiz(site_biz);
//            crawlerDatas.add(crawlerArticleInteractionData);
        } catch (ParseException e) {
            logger.error("xhs article comment wash data {} has error {}",commentJson.get(),e.getMessage());
        }
    }

    @Override
    public String domain() {
        return "xhs";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(searchStartUrl); //start url--只是为了进入脚本没有实际意义
        addUrlRegular(appSearchUrlRegular); //app 查询接口
        addUrlRegular(wxappArticleUrlRegular); //微信小程序文章入口
        addUrlRegular(wxappArticleCommentUrlRegular); // 微信小程序文章评论入口
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        String recordSite = crawlerRecord.tagsCreator().bizTags().site();
        if ( site.equals(recordSite)){
            return true;
        }
        return false;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {

        if(supportSourceRecords != null && supportSourceRecords.size() > 0 ){
            for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
                if (supportSourceRecord.getHttpRequest().getUrl().contains("/crawler/oauth/api/v1/xhs/userOauthInfos")){
                    if (authorInfos == null || authorInfos.size() < 1){
                        synchronized (lock){
                            if (authorInfos == null || authorInfos.size() < 1){
                                authorInfos = new LinkedBlockingQueue<>();
                                initAuthorInfos(supportSourceRecord); //初始 认证信息
                            }
                        }
                    }
                }
            }
        }


        String requestUrl = requestRecord.getHttpRequest().getUrl();


        if (isUrlMatch(requestUrl,appSearchUrlRegular)){ //search video
            fillSearchCurrentRecord(requestRecord);
        } else {
            downloadSleep();
            requestRecord.setNeedWashPage(true);
            String authorInfo = fillThreadLocalAuthorInfo(); //填充认证信息
            fillCurrentRecordRequestHeader(requestRecord,authorInfo);
            logger.info("xhs current thread local author info {}",authorInfo);

           if (isUrlMatch(requestUrl,wxappArticleCommentUrlRegular)){
               fillCurrentRecordRequestProxy(requestRecord);
            }
        }
        requestRecord.getHttpConfig().setResponseTextGenerateHtml(false);
        return super.prepareRequest(requestRecord, supportSourceRecords);
    }

    private void initAuthorInfos(CrawlerRequestRecord supportSourceRecord){
        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            Json rawText = new Json(httpPage.getRawText());
            String status = rawText.jsonPath($_type + ".status").get();
            List<String> contents = rawText.jsonPath($_type + ".content").all();

            if ("0".equals(status) && contents != null && contents.size() > 0){

                List<Map> userOauthInfos = new Json(contents.get(0)).toList(Map.class);
                for (Map userOauthInfo : userOauthInfos) {
                    String oauthInfo = String.valueOf(userOauthInfo.get("oauthInfo"));
                    authorInfos.put(oauthInfo);
                }
            }

        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private void downloadSleep(){
        Random rand = new Random();
        long sleepTime = (long)((rand.nextFloat()+2)*2000L);
        logger.info("xhs download sleep time {}",sleepTime);
        try {
            Thread.sleep(sleepTime);
        } catch (InterruptedException e) {
            logger.error(e.getMessage());
        }
    }

    private String fillThreadLocalAuthorInfo(){
        String authorInfo = getLocalAuthInfo();
        if (StringUtils.isBlank(authorInfo)){
            try {
                authorInfo = authorInfos.poll(3, TimeUnit.SECONDS);
                if (StringUtils.isBlank(authorInfo)){
                    logger.error("Unable to obtain authentication information. The current task will not continue!");
                    throw new RuntimeException("Unable to obtain authentication information. The current task will not continue!");
                }
                updateLocalAuthInfo(authorInfo);
            } catch (InterruptedException e) {
                logger.error(e.getMessage());
            }
        }
        return authorInfo;
    }

    private void fillSearchCurrentRecord(CrawlerRequestRecord requestRecord){
        requestRecord.setReleaseTime(System.currentTimeMillis());
        requestRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
        long[] dateRange = new long[2];
        dateRange[0] = 1612627200000L; //20190929
        dateRange[1] = 1644163200000L;
        requestRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(-1,dateRange));
        requestRecord.setDownload(false);
        requestRecord.setNeedWashPage(false);
        requestRecord.setNeedParsedPage(true);
    }

//    private void fillArticleCurrentRecord(CrawlerRequestRecord requestRecord){
//        // 设置 评论去重时间
//        requestRecord.setReleaseTime(System.currentTimeMillis());
//    }

    private void  fillCurrentRecordRequestHeader(CrawlerRequestRecord requestRecord,String authorInfo){
        Map authorInfoMap = new Json(authorInfo).toObject(Map.class);
        Map<String, String> headers = requestRecord.getHttpRequest().getHeaders();
        if (headers != null && headers.size() > 0){
            headers.put("charset", "utf-8");
            headers.put("Accept-Encoding", "gzip");
            headers.put("referer", authorInfoMap.get(headerReferer).toString());
            headers.put("device-fingerprint",authorInfoMap.get(headerDeviceFingerprint).toString());
            headers.put("authorization", authorInfoMap.get(headerAuthorization).toString());
            headers.put("content-type", "application/json");
            headers.put("User-Agent", "Mozilla/5.0 (Linux; Android 6.0; DIG-AL00 Build/HUAWEIDIG-AL00; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/55.0.2883.91 Mobile Safari/537.36 MicroMessenger/7.0.3.1400(0x27000334) Process/appbrand0 NetType/WIFI Language/zh_CN");
            headers.put("Host", "www.xiaohongshu.com");
            headers.put("Connection", "Keep-Alive");
        }
    }

    private void  fillCurrentRecordRequestProxy(CrawlerRequestRecord requestRecord){
        requestRecord.getHttpConfig().setProxy(proxy);
        requestRecord.getHttpConfig().setUseProxy(true);
    }

    public static void main(String[] args) {
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord){

//        int count = 1;
//        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
//        if (crawlerBusinessTags.hasKVTag("download_retry_count")){
//            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
//            if (count >= 20){
//                logger.error("xhs download he number of retries exceeds the limit" +
//                        ",request url {},download detail {}",crawlerRecord.getHttpRequest().getUrl());
//                return;
//            }
//        }
//
//        count++;
//        crawlerBusinessTags.addCustomKV("download_retry_count",count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .build();

        if(crawlerRecord.tagsCreator().requestTags().hasRequestType(turnPageItem)){
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(turnPage);
            crawlerRequestRecord.tagsCreator().requestTags().addRequestType(turnPageItem);
        }
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());

        crawlerRequestRecords.add(crawlerRequestRecord);
    }


    public long dateToTimestamp(String dataStr) throws ParseException {
        String regEx="[^0-9]+"; //正则表达式，用于匹配非数字串，+号用于匹配出多个非数字串
        Pattern pattern = Pattern.compile(regEx);

        if (dataStr.equals("刚刚")){
            return System.currentTimeMillis();
        } else if (Pattern.matches("\\d*秒前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*1000L);
        } else if (Pattern.matches("\\d*分钟前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*1000L);
        } else if (Pattern.matches("\\d*小时前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*60*1000L);
        } else if (Pattern.matches("\\d*小时\\d*分钟前",dataStr)){
            int hourNumber = Integer.valueOf(pattern.split(dataStr)[0]);
            int secondNumber = Integer.valueOf(pattern.split(dataStr)[1]);
            long totalMillis = hourNumber*60*60*1000L + secondNumber*60*1000L;
            return (System.currentTimeMillis() - totalMillis);
        } else if (dataStr.startsWith("今天")){
            String currentTime = DateFormatUtils.format(System.currentTimeMillis(), TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("今天", currentTime);
        } else if (dataStr.startsWith("昨天")){
            String yesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("昨天", yesterdayTime);
        } else if (dataStr.startsWith("前天")){
            String beforeYesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 2*60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("前天", beforeYesterdayTime);
        } else if (Pattern.matches("\\d{2}月\\d{2}[日]*", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"年"+dataStr;
        } else if (Pattern.matches("\\d{2}-\\d{2}", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"-"+dataStr;
        }
        return DateUtils.parseDateStrictly(dataStr, TimeForamtEnum.allFormats()).getTime();
    }

    public enum TimeForamtEnum {
        format1("yyyy年MM月dd日"),
        format1_1("yyyy年MM月dd日 HH:mm:ss"),
        format1_2("yyyy年MM月dd日 HH:mm"),
        format1_3("yyyy年MM月dd日HH:mm"),
        format1_4("yyyy年MM月dd日 HH点mm分"),
        format1_5("yyyy年MM月dd日 HH点mm分ss秒"),
        format1_6("yyyy年MM月dd日HH点mm分"),
        format1_7("yyyy年MM月dd日HH点mm分ss秒"),


        format4("yyyy-MM-dd HH:mm:ss"),
        format5("yyyy-MM-dd HH:mm"),
        format6("yyyy-MM-dd"),

        format7("yyyy/MM/dd HH:mm:ss"),
        format7_1("MM/dd/yyyy HH:mm:ss"),
        format8("yyyy/MM/dd HH:mm"),
        format9("yyyy/MM/dd"),

        format10("yyyy.MM.dd HH:mm:ss"),
        format11("yyyy.MM.dd HH:mm"),
        format12("yyyy.MM.dd"),

        format13("EEE MMM d HH:mm:ss +0800 yyyy");

        private String format;

        private TimeForamtEnum(String format) {
            this.format = format;
        }

        public static String[] allFormats() {
            TimeForamtEnum[] timeForamtEnums = TimeForamtEnum.values();
            String[] formats = new String[timeForamtEnums.length];
            for (int count = 0; count < timeForamtEnums.length; count++) {
                formats[count] = timeForamtEnums[count].format;
            }
            return formats;
        }

        public String getFormat() {
            return format;
        }
    }


}
