package com.chance.cc.crawler.development.scripts.xiaohongshu.applets;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.security.NoSuchAlgorithmException;
import java.text.ParseException;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.comment;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Field_Domain_Result_Json;
import static org.codehaus.groovy.runtime.EncodingGroovyMethods.md5;

/**
 * @author bx
 * @date 2021/1/8 0008 12:11
 *
 * 百度小程序
 */
public class XHSAppletsCommentCrawlerScript extends CrawlerCommonScript {

    private static final Logger logger = LoggerFactory.getLogger(XHSAppletsCommentCrawlerScript.class);

    public static final String site= "xhs-bd-comment";

    public static final String noteCommentUrlRegular = "https://www.xiaohongshu.com/fe_api/burdock/baidu/v2/notes/\\S*/comments\\S*";
    public static final String noteFilterNoteCommentUrlRegular = "https://www.xiaohongshu.com/fe_api/burdock/baidu/v2/notes/\\S*/comments\\?endId=&hot=no&pageSize=50";
    public static final String noteCommentUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/baidu/v2/notes/%s/comments?endId=%s&hot=no&pageSize=50";
    public static final String webNoteUrlFormat = "https://www.xiaohongshu.com/discovery/item/%s";

    public static final String noteCommentItemUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/baidu/v2/notes/%s/comments";

    public static final String genSwanIdUrlFormat = "https://minipro.baidu.com/ma/user/swanid?uid=%s&pkgname=com.baidu.searchbox";
    public static final String randomUIDChar = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-";

    public static final String genASIDUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/baidu/v2/shield/get_asid";

    public static final String startListUrl = "https://www.xiaohongshu.com/fe_api/burdock/baidu/v2/notes/start/comments";

    private LinkedBlockingQueue<String> asids = new LinkedBlockingQueue<>(); //小程序标识队列

    private Downloader pageDownloader;

    private static Proxy proxy = new Proxy();

    static {
        //代理配置
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }

    @Override
    public void beforeDownload(CrawlerRecordContext context) {
        if (pageDownloader == null){
            synchronized (this){
                if (pageDownloader == null){
                    pageDownloader = context.getPageDownloader();
                }
            }
        }
        super.beforeDownload(context);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {

        List<CrawlerRequestRecord> crawlerRequestRecords = new ArrayList<>();

        if (!page.isDownloadSuccess()){
            logger.error("xhs comment request download has error, status code {},error info [{}] ,will retry",
                    page.getStatusCode(),page.getRawText());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        if (page.getStatusCode() != 200){

            if (isContinue(page)){
                addASID(page.getRequest().getHeaders().get("asid"));
            } else {
                addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            }
            crawlerRecord.setNeedWashPage(false);
            return crawlerRequestRecords;
        }


        try {
            Json jsonPage = page.getJson();
            List<String> comments = jsonPage.jsonPath("data.comments").all();
            String noteId = jsonPage.jsonPath("data.targetNoteId").get();

            if (comments != null && comments.size() > 0){
                String lastCommentDetail = comments.get(comments.size() - 1);
                Json dataJson = new Json(lastCommentDetail);
                String lastCommentId = dataJson.jsonPath("id").get();

                //生成下一页
                genNoteCommentNextPage(crawlerRecord,lastCommentId,noteId,crawlerRequestRecords);
            } else {
                crawlerRecord.setNeedWashPage(false);
            }
        } catch (Exception e) {
            logger.error("xha comment download response content parse error:{}, response data:{}",e.getMessage(),page.getRawText());
            crawlerRecord.setNeedWashPage(false);
        }
        addASID(page.getRequest().getHeaders().get("asid"));
        return crawlerRequestRecords;

    }

    private void genNoteCommentNextPage(CrawlerRequestRecord crawlerRecord,String lastCommentId,
                                        String noteId, List<CrawlerRequestRecord> pipelineRecords){
        if (StringUtils.isNotBlank(lastCommentId)){

            String noteUrl = String.format(noteCommentUrlFormat, noteId,lastCommentId);

            CrawlerRequestRecord crawlerNextPageCommentRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(noteUrl)
                    .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .needWashed(true)
                    .build();
            if (crawlerNextPageCommentRequestRecord.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)){
                crawlerNextPageCommentRequestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(Tag_Field_Domain_Result_Json);
            }
            if (isUrlMatch(crawlerRecord.getHttpRequest().getUrl(),noteFilterNoteCommentUrlRegular)){
                //第二页不去重，防止过高点赞 排在首页 而非最新评论
                crawlerNextPageCommentRequestRecord.tagsCreator().requestTags().removeRequestType(filter);
            }
            pipelineRecords.add(crawlerNextPageCommentRequestRecord);
        }
    }

    private boolean isContinue(HttpPage httpPage){

        if (httpPage.getStatusCode() == 404){ //笔记已删除
            logger.info("applets xhs note:{} is Deleted, response content:{}"
                    ,httpPage.getRequest().getUrl(),httpPage.getRawText());
            return true;
        }

        if (httpPage.getStatusCode() == 423){ //笔记审核中
            logger.info("applets xhs note:{} is under review, response content:{}"
                    ,httpPage.getRequest().getUrl(),httpPage.getRawText());
            return true;
        }
        if (httpPage.getStatusCode() == 461){ //验证码
            logger.info("applets xhs asid:{} Verification code appears,will retry, response content:{}"
                    ,httpPage.getRequest().getHeaders().get("asid"),httpPage.getRawText());
            return false;
        }

        logger.info("applets xhs asid:{} unknown code {} appears,will retry, response content:{}"
                ,httpPage.getRequest().getHeaders().get("asid"),httpPage.getStatusCode(),httpPage.getRawText());
        return false;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {

        List<CrawlerData> crawlerDatas = new ArrayList<>();

        washComment(crawlerDatas,crawlerRecord,page);

        return crawlerDatas;
    }

    private void washComment(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord, HttpPage page){
        Json commentsJson = new Json(page.getRawText());
        List<String> comments = commentsJson.jsonPath($_type + ".data.comments").all();
        String commentsTotal = commentsJson.jsonPath($_type + ".data.commentsTotal").get();
        if (comments != null && comments.size() > 0){
            for (String comment : comments) {
                Json commentJson = new Json(comment);
                String targetNoteId = commentJson.jsonPath($_type + ".targetNoteId").get();
                List<String> subComments = commentJson.jsonPath($_type + ".subComments").all();
                processCommentRecord(crawlerDatas,crawlerRecord,page,commentJson,targetNoteId);
                if (subComments != null && subComments.size() > 0){
                    for (String subComment : subComments) {
                        processCommentRecord(crawlerDatas,crawlerRecord,page,new Json(subComment),targetNoteId);
                    }
                }
            }
        }

        //主贴id -- 累积 更新上次评论数
        if(crawlerRecord.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)){
            String resultJson = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(Tag_Field_Domain_Result_Json);
            CrawlerData crawlerData = JSON.parseObject(resultJson, CrawlerData.class);
            Map map = JSON.parseObject(crawlerData.getCrawlerContent(), Map.class);
            map.put("currentComments",commentsTotal);
            map.put("lastComments",commentsTotal);
            crawlerData.setCrawlerContent(JSON.toJSONString(map));
            crawlerDatas.add(crawlerData);
        }

    }

    private void processCommentRecord(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord,
                                      HttpPage page,Json commentJson,String noteId){

        try {

            String releaseTime = commentJson.jsonPath($_type + ".time").get();
            String commentId = commentJson.jsonPath($_type + ".id").get();
            String article_url = "https://www.xiaohongshu.com/discovery/item/"+noteId;

            CrawlerData crawlerArticleData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, comment.enumVal(), commentId))
                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(),"xhs-bd-article", article.enumVal(), noteId))
                    .releaseTime(dateToTimestamp(releaseTime))
                    .content(commentJson.get())
                    .url(article_url)
                    .resultLabelTag(comment)
                    .requestLabelTag(result)
                    .requestLabelTag(filter)
                    .isEndTimeFromSchedulerTime(false)
                    .flowInPipelineTag("article_comment")
                    .build();
            crawlerArticleData.setFilterPipelineResult(true);
            crawlerArticleData.tagsCreator().bizTags().addDomain(domain());
            crawlerArticleData.tagsCreator().bizTags().addSite(site);
            if (crawlerArticleData.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)){
                crawlerArticleData.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(Tag_Field_Domain_Result_Json);
            }
            crawlerDatas.add(crawlerArticleData);
        } catch (ParseException e) {
            logger.error("xhs article comment wash data {} has error {}",commentJson.get(),e.getMessage());
        }
    }

    @Override
    public String domain() {
        return "xhs";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(noteCommentUrlRegular); //app 查询接口
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {

        String recordSite = crawlerRecord.tagsCreator().bizTags().site();
        if (site.equals(recordSite)){
            return true;
        }
        return false;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {

        if (startListUrl.equals(requestRecord.getHttpRequest().getUrl())){
            requestRecord.setDownload(false);
            requestRecord.setNeedWashPage(false);
            requestRecord.setNeedParsedPage(false);
            requestRecord.setSkipPipeline(true);

        } else {
            String asid = getASID();
            requestRecord.getHttpRequest().addHeader("asid",asid);
            requestRecord.getHttpRequest().addHeader("X-Sign",getXSign(requestRecord.getHttpRequest().getUrl()));
            requestRecord.getHttpRequest().addHeader("X-B3-TraceId", RandomStringUtils.random(16,"0123456789abcdef"));
            requestRecord.getHttpRequest().addHeader("Host","www.xiaohongshu.com");
        }
        HttpConfig httpConfig = requestRecord.getHttpConfig();
        httpConfig.setProxy(proxy);
        httpConfig.setUseProxy(false);
        return super.prepareRequest(requestRecord, supportSourceRecords);
    }
    private String getASID(){
        try {
            String asid = asids.poll(3, TimeUnit.SECONDS);

            if (StringUtils.isBlank(asid)){ //register asid
                asid = registerASID(proxy);
                logger.info("generator asid:{} ...",asid);
            } else {
                logger.info("borrow asid:{} ...",asid);
            }
            return asid;
        } catch (InterruptedException e) {
            logger.error(e.getMessage());
        }
        return null;
    }

    private void addASID(String asid){
        logger.info("return asid:{} ...",asid);
        asids.add(asid);
    }

    private String getXSign(String url) {
        String urlPath = url.substring(url.indexOf("/fe_api/"));

        String x_sign = null;
        try {
            x_sign = "X" + md5(urlPath + "WSUDD");
        } catch (NoSuchAlgorithmException e) {
            logger.error(e.getMessage());
        }
        return x_sign;
    }

    private String registerASID(Proxy proxy){
        String swanStr = swanId();
        HttpPage httpPage = null;
        String asid = null;
        try {
            HttpRequest request = new HttpRequest();

            request.setUrl(genASIDUrlFormat);
            HttpConfig asidHttpConfig = HttpConfig.me("gen_asid");
            if(proxy != null){
                asidHttpConfig.setProxy(proxy);
                asidHttpConfig.setUseProxy(false);
            }
            request.setMethod(HttpConstant.Method.POST);
            request.addHeader("X-Sign","X2b57136ed2cac4f60e05aec832825a97");
            request.setRequestBody(HttpRequestBody.json(swanStr,"utf-8"));
            httpPage = pageDownloader.download(request, asidHttpConfig);

            if (!httpPage.isDownloadSuccess()) {
                throw new Exception("register asid download is not ok ...");
            }
            asid = httpPage.getJson().jsonPath("data").get();

        } catch (Exception e) {
            logger.error("register asid download error:{}, download response: {},will retry",e.getMessage(),httpPage == null ? "" : httpPage.getRawText());
            asid = registerASID(proxy);
        }
        return asid;

    }

    private String swanId(){
        HttpPage httpPage = null;
        String swanStr = null;
        try {
            HttpRequest request = new HttpRequest();

            request.setUrl(String.format(genSwanIdUrlFormat, RandomStringUtils.random(57,randomUIDChar)));
            HttpConfig swanIdHttpConfig = HttpConfig.me("gen_swan_id");
            if(proxy != null){
                swanIdHttpConfig.setProxy(proxy);
                swanIdHttpConfig.setUseProxy(false);
            }
            request.setMethod(HttpConstant.Method.POST);
            Map<String,Object> params = new HashMap<>();
            params.put("data","{\"ma_id\":\"KuRdr9OR39BqyAGIg7mYK7Bytityu0Vi\"}");
            request.setRequestBody(HttpRequestBody.form(params,"utf-8"));
            httpPage = pageDownloader.download(request, swanIdHttpConfig);

            if (!httpPage.isDownloadSuccess()) {
                throw new Exception("gen swanId download is not ok ...");
            }
            Map<String,String> swanMap = new HashMap<>();
            String swanId = httpPage.getJson().jsonPath("data.swanid").get();
            String signature = httpPage.getJson().jsonPath("data.swanid_signature").get();
            swanMap.put("swanId",swanId);
            swanMap.put("swanIdSignature",signature);
            swanStr = JSON.toJSONString(swanMap);
        } catch (Exception e) {
            logger.error("gen swan id download error:{}, download response: {},will retry",e.getMessage(),httpPage == null ? "" : httpPage.getRawText());
            swanStr = swanId();
        }
        return swanStr;
    }

    public static void main(String[] args) {
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord){

//        int count = 1;
//        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
//        if (crawlerBusinessTags.hasKVTag("download_retry_count")){
//            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
//            if (count >= 100){
//                logger.error("xhs search for time descending download he number of retries exceeds the limit" +
//                        ",request url {},download detail {}",crawlerRecord.getHttpRequest().getUrl());
//                return;
//            }
//        }
//
//        count++;
//        crawlerBusinessTags.addCustomKV("download_retry_count",count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .build();
        crawlerRequestRecords.add(crawlerRequestRecord);

        if(crawlerRecord.tagsCreator().requestTags().hasRequestType(turnPageItem)){
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(turnPage);
            crawlerRequestRecord.tagsCreator().requestTags().addRequestType(turnPageItem);
        }
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
    }


    public long dateToTimestamp(String dataStr) throws ParseException {
        String regEx="[^0-9]+"; //正则表达式，用于匹配非数字串，+号用于匹配出多个非数字串
        Pattern pattern = Pattern.compile(regEx);

        if (dataStr.equals("刚刚")){
            return System.currentTimeMillis();
        } else if (Pattern.matches("\\d*秒前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*1000L);
        } else if (Pattern.matches("\\d*分钟前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*1000L);
        } else if (Pattern.matches("\\d*小时前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*60*1000L);
        } else if (Pattern.matches("\\d*小时\\d*分钟前",dataStr)){
            int hourNumber = Integer.valueOf(pattern.split(dataStr)[0]);
            int secondNumber = Integer.valueOf(pattern.split(dataStr)[1]);
            long totalMillis = hourNumber*60*60*1000L + secondNumber*60*1000L;
            return (System.currentTimeMillis() - totalMillis);
        } else if (dataStr.startsWith("今天")){
            String currentTime = DateFormatUtils.format(System.currentTimeMillis(), TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("今天", currentTime);
        } else if (dataStr.startsWith("昨天")){
            String yesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("昨天", yesterdayTime);
        } else if (dataStr.startsWith("前天")){
            String beforeYesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 2*60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("前天", beforeYesterdayTime);
        } else if (Pattern.matches("\\d{2}月\\d{2}[日]*", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"年"+dataStr;
        } else if (Pattern.matches("\\d{2}-\\d{2}", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"-"+dataStr;
        }
        return DateUtils.parseDateStrictly(dataStr, TimeForamtEnum.allFormats()).getTime();
    }

    public enum TimeForamtEnum {
        format1("yyyy年MM月dd日"),
        format1_1("yyyy年MM月dd日 HH:mm:ss"),
        format1_2("yyyy年MM月dd日 HH:mm"),
        format1_3("yyyy年MM月dd日HH:mm"),
        format1_4("yyyy年MM月dd日 HH点mm分"),
        format1_5("yyyy年MM月dd日 HH点mm分ss秒"),
        format1_6("yyyy年MM月dd日HH点mm分"),
        format1_7("yyyy年MM月dd日HH点mm分ss秒"),


        format4("yyyy-MM-dd HH:mm:ss"),
        format5("yyyy-MM-dd HH:mm"),
        format6("yyyy-MM-dd"),

        format7("yyyy/MM/dd HH:mm:ss"),
        format7_1("MM/dd/yyyy HH:mm:ss"),
        format8("yyyy/MM/dd HH:mm"),
        format9("yyyy/MM/dd"),

        format10("yyyy.MM.dd HH:mm:ss"),
        format11("yyyy.MM.dd HH:mm"),
        format12("yyyy.MM.dd"),

        format13("EEE MMM d HH:mm:ss +0800 yyyy");

        private String format;

        private TimeForamtEnum(String format) {
            this.format = format;
        }

        public static String[] allFormats() {
            TimeForamtEnum[] timeForamtEnums = TimeForamtEnum.values();
            String[] formats = new String[timeForamtEnums.length];
            for (int count = 0; count < timeForamtEnums.length; count++) {
                formats[count] = timeForamtEnums[count].format;
            }
            return formats;
        }

        public String getFormat() {
            return format;
        }
    }


}
