package com.chance.cc.crawler.development.scripts.yishengzhan;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.tags.CategoryTag;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import org.apache.commons.lang.CharEncoding;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.List;
import java.util.UUID;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.turnPage;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.turnPageItem;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;

/**
 * @author lt
 * @version 1.0
 * @date 2021-06-23 13:05:26
 * @email okprog@sina.com
 */
public class YSZCrawlerScript extends CrawlerCommonScript {

    private Logger logger = LoggerFactory.getLogger(YSZCrawlerScript.class);

    public static final String domain = "yishengzhan";
    public static final String scriptSite = "news";

    public static final String keysRegex = "https?://\\S*v1/meta/" + domain + "/keys\\S*";
    public static final String listUrl = "https://api.yishengzhan.cn/gw/content/searchArticle";
    public static final String articleUrl = "https://api.yishengzhan.cn/gw/content/getArticleInfo";
    public static final String commentUrl = "https://api.yishengzhan.cn/gw/content/getCommentList";
    public static final String listUrlRegex = "https://api\\.yishengzhan\\.cn/gw/content/searchArticle";
    public static final String articleUrlRegex = "https://api\\.yishengzhan\\.cn/gw/content/getArticleInfo\\S*";
    public static final String commentUrlRegex = "https://api\\.yishengzhan\\.cn/gw/content/getCommentList";
    public static final String articleUrlFormat = "http://web.yishengzhan.cn/#/articleDetails?articleId=%s&source=7";

    private static final String encode = "encode";
    private static final String decode = "decode";
    private static final String nodeServerApi = "http://192.168.1.210:8899/encrypt/yishengzhan?method=";


    private static Downloader downloader;


    @Override
    public void beforeDownload(CrawlerRecordContext context) {
        if (null == downloader){
            downloader = context.getPageDownloader();
        }
        super.beforeDownload(context);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        if (doHttpPageCheck(crawlerRequestRecord,httpPage)){
            addCrawlerRecords(parsedLinks,crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }
        String lastRequestUrl = lastRequest.getUrl();
        if (lastRequestUrl.matches(listUrlRegex)){
            return parseListLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(articleUrlRegex)){
            return parseArticleLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(commentUrlRegex)){
            return parseCommentLink(crawlerRequestRecord,httpPage,parsedLinks);
        }
        return null;
    }

    private List<CrawlerRequestRecord> parseCommentLink(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String rawText = null;
        try {
            rawText = nodeServerEncrypt(httpPage.getRawText(),decode);
        } catch (Exception e) {
            logger.error("comment result decode error");
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }

        JSONObject pageObj = JSONObject.parseObject(rawText);
        if (200 != pageObj.getIntValue("code")){
            logger.error("{} comment page download result is not ok,page is {}",domain, rawText);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }
        JSONObject contentObj = pageObj.getJSONObject("body");
        boolean isEnd = contentObj.getBooleanValue("isEnd");
        if (!isEnd){
            HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
            String body = new String(httpRequest.getRequestBody().getBody());
            JSONObject reqBodyJson = null;
            try {
                reqBodyJson = JSONObject.parseObject(nodeServerEncrypt(body,decode));
            } catch (Exception e) {
                logger.error("{} comment body decode error: {}",domain, body);
                return parsedLinks;
            }
            int limit = reqBodyJson.getJSONObject("body").getIntValue("limit");
            int start = reqBodyJson.getJSONObject("body").getIntValue("start");
            start = start + limit;
            reqBodyJson.getJSONObject("body").put("start",start);
            CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(commentUrl)
                    .recordKey(commentUrl + System.currentTimeMillis())
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .resultLabelTag(comment)
                    .resultLabelTag(interaction)
                    .needWashed(true)
                    .build();
            HttpRequest cmtHttpRequest = commentRecord.getHttpRequest();
            cmtHttpRequest.setMethod(HttpConstant.Method.POST);

            try {
                String encodeParam = nodeServerEncrypt(reqBodyJson.toJSONString(),encode);
                HttpRequestBody articleRequestBody = HttpRequestBody.json(encodeParam, CharEncoding.UTF_8);
                cmtHttpRequest.setRequestBody(articleRequestBody);
                parsedLinks.add(commentRecord);
            } catch (Exception e) {
                logger.error("{} comment list param encode error",domain);
            }

        }

        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseArticleLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String rawText = null;
        try {
            rawText = nodeServerEncrypt(httpPage.getRawText(),decode);
        } catch (Exception e) {
            logger.error("article result decode error");
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }
        JSONObject pageObj = null;
        try {
            pageObj = JSONObject.parseObject(rawText);
        } catch (Exception e) {
            logger.error(rawText);
        }
        if (200 != pageObj.getIntValue("code")){
            logger.error("{} article page download result is not ok,page is {}",domain, rawText);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }
        JSONObject contentObj = pageObj.getJSONObject("body");
        String articleId = contentObj.getString("articleId");
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("articleId",articleId);

        int commNum = contentObj.getIntValue("totalCommNum");
        if (commNum > 0){
            //判断是否采集评论
            CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().scheduleTags().getCategoryTag();
            if (categoryTag.getLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal()) != null) {
                if (!crawlerRequestRecord.tagsCreator().bizTags().hasKVTag("comment_record_filter_info")) {
                    logger.error("{} crawler comment need to filter information!",domain);
                    return parsedLinks;
                }
                crawlerRequestRecord.tagsCreator().resultTags().addResultDataType(comment);
                crawlerRequestRecord.tagsCreator().resultTags().addResultDataType(interaction);
                KVTag filterInfoTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
                CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);

                CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(commentUrl)
                        .recordKey(commentUrl + System.currentTimeMillis())
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .resultLabelTag(comment)
                        .resultLabelTag(interaction)
                        .notFilterRecord()
                        .needWashed(true)
                        .build();
                commentRecord.setFilter(filterInfoRecord.getFilter());
                commentRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                HttpRequest cmtHttpRequest = commentRecord.getHttpRequest();
                cmtHttpRequest.setMethod(HttpConstant.Method.POST);
                JSONObject cmtJson = new JSONObject();
                cmtJson.put("start",0);
                cmtJson.put("limit",50);
                cmtJson.put("order",0);
                cmtJson.put("commentId",1000000000000000000L);
                cmtJson.put("type",1);
                cmtJson.put("typeId",articleId);
                try {
                    String encodeParam = nodeServerEncrypt(baseRequestBodyInit(cmtJson),encode);
                    HttpRequestBody articleRequestBody = HttpRequestBody.json(encodeParam, CharEncoding.UTF_8);
                    cmtHttpRequest.setRequestBody(articleRequestBody);
                    parsedLinks.add(commentRecord);
                } catch (Exception e) {
                    logger.error("{} comment list param encode error",domain);
                }
            }
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String rawText = null;
        try {
            rawText = nodeServerEncrypt(httpPage.getRawText(),decode);
        } catch (Exception e) {
            logger.error("list result decode error");
            return parsedLinks;
        }

        JSONObject pageObj = JSONObject.parseObject(rawText);
        if (200 != pageObj.getIntValue("code")){
            logger.error("{} list page download result is not ok,page is {}",domain, rawText);
            return parsedLinks;
        }
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        HttpRequestBody requestBody = httpRequest.getRequestBody();
        String body = new String(requestBody.getBody());
        JSONObject reqBodyJson = null;
        try {
            reqBodyJson = JSONObject.parseObject(nodeServerEncrypt(body,decode));
        } catch (Exception e) {
            logger.error("{} list body decode error: {}",domain, body);
            return parsedLinks;
        }
        boolean isEnd = pageObj.getJSONObject("body").getBooleanValue("isEnd");
        if (!isEnd){
            try {
                int limit = reqBodyJson.getJSONObject("body").getIntValue("limit");
                int start = reqBodyJson.getJSONObject("body").getIntValue("start");
                start = start + limit;
                reqBodyJson.getJSONObject("body").put("start",start);
                CrawlerRequestRecord nextPageRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(listUrl)
                        .recordKey(listUrl + System.currentTimeMillis())
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                HttpRequest nextHttpRequest = nextPageRecord.getHttpRequest();
                nextHttpRequest.setMethod(HttpConstant.Method.POST);
                String encodeParam = nodeServerEncrypt(reqBodyJson.toJSONString(),encode);
                HttpRequestBody nextRequestBody = HttpRequestBody.json(encodeParam, CharEncoding.UTF_8);
                nextHttpRequest.setRequestBody(nextRequestBody);
            } catch (Exception e) {
                logger.error(e.getMessage());
            }
        }
        JSONArray itemStrings = pageObj.getJSONObject("body").getJSONArray("list");
        for (Object itemStr : itemStrings) {
            try {
                JSONObject itemObj = (JSONObject)itemStr;
                String articleId = itemObj.getString("artid");
                long releaseTime = itemObj.getLongValue("artDate") * 1000;
                CrawlerRequestRecord articleRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(articleUrl)
                        .recordKey(StringUtils.joinWith("-",articleUrl,articleId))
                        .releaseTime(releaseTime)
                        .copyBizTags()
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                        .needParsed(true)
                        .needWashed(true)
                        .build();
                HttpRequest articleHttpRequest = articleRecord.getHttpRequest();
                articleHttpRequest.setMethod(HttpConstant.Method.POST);
                JSONObject listJson = new JSONObject();
                listJson.put("articleId",articleId);
                listJson.put("source","7");
                String encodeParam = nodeServerEncrypt(baseRequestBodyInit(listJson),encode);
                HttpRequestBody articleRequestBody = HttpRequestBody.json(encodeParam, CharEncoding.UTF_8);
                articleHttpRequest.setRequestBody(articleRequestBody);
                parsedLinks.add(articleRecord);
            } catch (Exception e) {
                logger.error("item record encode error");
            }
        }
        return parsedLinks;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String httpRequestUrl = httpRequest.getUrl();
        String articleKey = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleId");
        String rawText = httpPage.getRawText();
        if (StringUtils.isBlank(rawText)){
            logger.error("httpPage is empty !");
            return crawlerDataList;
        }
        try {
            rawText = nodeServerEncrypt(httpPage.getRawText(),decode);
        } catch (Exception e) {
            logger.error("wash result decode error");
            return crawlerDataList;
        }

        JSONObject pageObj = JSONObject.parseObject(rawText);
        String site = crawlerRequestRecord.tagsCreator().bizTags().site();
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(article)){
            washArticle(crawlerRequestRecord,httpPage,pageObj,crawlerDataList,site,articleKey);
        }
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(comment)){
            washComment(crawlerRequestRecord,httpPage,pageObj,crawlerDataList,site,articleKey);
        }

        return crawlerDataList;
    }

    private void washArticle(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, JSONObject pageObj, List<CrawlerData> crawlerDataList, String site, String articleKey) {
        JSONObject contentObj = pageObj.getJSONObject("body");
        String title = contentObj.getString("title");
        String author = contentObj.getString("author");
        String source = contentObj.getString("source");
        String authorId = contentObj.getString("userName");
        String htmlSource = contentObj.getString("content");
        long releaseTime = contentObj.getLongValue("onlineDate") * 1000;
        Html html = new Html(htmlSource);
        List<String> contents = html.xpath("//section//text()").all();
        StringBuffer sbContent = new StringBuffer();
        for (String content : contents) {
            sbContent.append(content);
        }
        List<String> images = html.xpath("//p//img/@src").all();
        StringBuffer sbImage = new StringBuffer();
        for (String image : images) {
            sbImage.append(image).append("\\x01");
        }
        String likes = contentObj.getString("supported");
        String comments = contentObj.getString("totalCommNum");
        String collections = contentObj.getString("collected");
        CrawlerData crawlerAData = CrawlerData.builder()
                .data(crawlerRequestRecord,httpPage)
                .url(String.format(articleUrlFormat,articleKey))
                .dataId(StringUtils.joinWith("-",domain(),site,article.enumVal(),articleKey))
                .releaseTime(releaseTime)
                .addContentKV(Field_Title,title)
                .addContentKV(Field_Author,author)
                .addContentKV(Field_Author_Id,authorId)
                .addContentKV(Field_Source,source)
                .addContentKV(Field_Content,sbContent.toString())
                .addContentKV(Field_Images,sbImage.toString())
                .resultLabelTag(article)
                .build();
        crawlerDataList.add(crawlerAData);
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
            CrawlerData crawlerInteractionData = CrawlerData.builder()
                    .data(crawlerRequestRecord,httpPage)
                    .url(String.format(articleUrlFormat,articleKey))
                    .dataId(StringUtils.joinWith("-",domain(),site,interaction.enumVal(),articleKey))
                    .parentId(StringUtils.joinWith("-",domain(),site,article.enumVal(),articleKey))
                    .releaseTime(releaseTime)
                    .addContentKV(Field_I_Comments,comments)
                    .addContentKV(Field_I_Likes,likes)
                    .addContentKV(Field_I_Collection,collections)
                    .resultLabelTag(interaction)
                    .build();
            crawlerDataList.add(crawlerInteractionData);
        }
    }

    private void washComment(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, JSONObject pageObj, List<CrawlerData> crawlerDataList, String site, String articleKey) {
        JSONObject contentObj = pageObj.getJSONObject("body");
        JSONArray cmtJsons = contentObj.getJSONArray("list");
        for (Object cmtJson : cmtJsons) {
            JSONObject cmtObj = (JSONObject)cmtJson;
            String commentId = cmtObj.getString("commentId");
            long releaseTime = cmtObj.getLongValue("commentTime") * 1000;
            String content = cmtObj.getString("content");
            String author = cmtObj.getString("nickName");
            String authorId = cmtObj.getString("uid");
            String comments = cmtObj.getString("num");
            String likes = cmtObj.getString("praise");
            CrawlerData crawlerCData = CrawlerData.builder()
                    .data(crawlerRequestRecord,httpPage)
                    .url(String.format(articleUrlFormat,articleKey))
                    .dataId(StringUtils.joinWith("-",domain(),site,comment.enumVal(),commentId))
                    .parentId(StringUtils.joinWith("-",domain(),site,article.enumVal(),articleKey))
                    .releaseTime(releaseTime)
                    .addContentKV(Field_Author,author)
                    .addContentKV(Field_Author_Id,authorId)
                    .addContentKV(Field_Content,content)
                    .resultLabelTag(comment)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .build();
            crawlerDataList.add(crawlerCData);
            if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
                CrawlerData crawlerInteractionData = CrawlerData.builder()
                        .data(crawlerRequestRecord,httpPage)
                        .url(String.format(articleUrlFormat,articleKey))
                        .dataId(StringUtils.joinWith("-",domain(),site,interaction.enumVal(),commentId))
                        .parentId(StringUtils.joinWith("-",domain(),site,comment.enumVal(),commentId))
                        .releaseTime(releaseTime)
                        .addContentKV(Field_I_Comments,comments)
                        .addContentKV(Field_I_Likes,likes)
                        .resultLabelTag(interaction)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .build();
                crawlerDataList.add(crawlerInteractionData);
            }
            JSONArray replyList = cmtObj.getJSONArray("replyList");
            if (null != replyList && replyList.size() > 0){
                for (Object reply : replyList) {
                    JSONObject repObj = (JSONObject)reply;
                    String rId = repObj.getString("commentId");
                    long rReleaseTime = repObj.getLongValue("time") * 1000;
                    String rContent = repObj.getString("content");
                    String rAuthor = repObj.getString("nickName");
                    String rAuthorId = repObj.getString("uid");
                    String rComments = repObj.getString("num");
                    String rLikes = repObj.getString("praise");
                    CrawlerData crawlerRData = CrawlerData.builder()
                            .data(crawlerRequestRecord,httpPage)
                            .url(String.format(articleUrlFormat,articleKey))
                            .dataId(StringUtils.joinWith("-",domain(),site,comment.enumVal(),rId))
                            .parentId(StringUtils.joinWith("-",domain(),site,article.enumVal(),articleKey))
                            .releaseTime(rReleaseTime)
                            .addContentKV(Field_Author,rAuthor)
                            .addContentKV(Field_Author_Id,rAuthorId)
                            .addContentKV(Field_Content,rContent)
                            .resultLabelTag(comment)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .build();
                    crawlerDataList.add(crawlerRData);
                    if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
                        CrawlerData crawlerInteractionData = CrawlerData.builder()
                                .data(crawlerRequestRecord,httpPage)
                                .url(String.format(articleUrlFormat,articleKey))
                                .dataId(StringUtils.joinWith("-",domain(),site,interaction.enumVal(),rId))
                                .parentId(StringUtils.joinWith("-",domain(),site,comment.enumVal(),rId))
                                .releaseTime(rReleaseTime)
                                .addContentKV(Field_I_Comments,rComments)
                                .addContentKV(Field_I_Likes,rLikes)
                                .resultLabelTag(interaction)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                .build();
                        crawlerDataList.add(crawlerInteractionData);
                    }
                }
            }
        }
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();
        HttpRequest httpRequest = requestRecord.getHttpRequest();
        if (null == supportSourceRecords || supportSourceRecords.size() == 0){
            return super.prepareRequest(requestRecord, supportSourceRecords);
        }
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            String supportUrl = supportSourceRecord.getHttpRequest().getUrl();
            if (supportUrl.matches(keysRegex)){
                initKeywordsRecord(allItemRecords,requestRecord,supportSourceRecord);
            }
        }
        return allItemRecords;
    }

    private void initKeywordsRecord(List<CrawlerRecord> allItemRecords, CrawlerRequestRecord requestRecord, CrawlerRequestRecord keywordRecord) {
        try {
            JSONObject jsonObject = JSONObject.parseObject(keywordRecord.getInternalDownloadPage().getRawText());
            if (jsonObject.getIntValue("status") == 0) {
                JSONArray objects = jsonObject.getJSONArray("content");
                for (Object object : objects) {
                    String keyword = ((JSONObject) object).getString("keyword");
                    CrawlerRequestRecord listRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(requestRecord)
                            .httpUrl(listUrl)
                            .recordKey(listUrl + System.currentTimeMillis())
                            .releaseTime(System.currentTimeMillis())
                            .notFilterRecord()
                            .copyBizTags()
                            .build();
                    HttpRequest httpRequest = listRecord.getHttpRequest();
                    httpRequest.setMethod(HttpConstant.Method.POST);
                    JSONObject listJson = new JSONObject();
                    listJson.put("content",keyword);
                    listJson.put("start",0);
                    listJson.put("limit",50);
                    String encodeParam = nodeServerEncrypt(baseRequestBodyInit(listJson),encode);
                    HttpRequestBody requestBody = HttpRequestBody.json(encodeParam, CharEncoding.UTF_8);
                    httpRequest.setRequestBody(requestBody);

                    listRecord.tagsCreator().bizTags().addKeywords(keyword);
                    allItemRecords.add(listRecord);
                }
            }
        } catch (Exception e) {
            logger.error("from keywords init urls failed");
            logger.error(e.getMessage(), e);
        }
    }

    private String nodeServerEncrypt(String str, String method) throws Exception {
        HttpPage httpPage = null;
        int times = 0;
        while (null == httpPage || StringUtils.isBlank(httpPage.getRawText())){
            times ++;
            if (times > 20){
                throw new Exception("node server encrypt error: more than 20 times request failed");
            }
            try {
                HttpConfig httpConfig = HttpConfig.me("node_encrypt");
                HttpRequest httpRequest = new HttpRequest();
                httpRequest.setUrl(nodeServerApi + method);
                httpRequest.setMethod(HttpConstant.Method.POST);
                HttpRequestBody httpRequestBody = HttpRequestBody.custom(str.getBytes(), HttpRequestBody.ContentType.JSON, CharEncoding.UTF_8);
                httpRequest.setRequestBody(httpRequestBody);
                httpPage = downloader.download(httpRequest,httpConfig);
            } catch (Exception e) {
                logger.error(e.getMessage());
            }
        }
        return httpPage.getRawText();
    }


    @Override
    public void initUrlRegulars() {
        addUrlRegular(listUrlRegex);
        addUrlRegular(articleUrlRegex);
        addUrlRegular(commentUrlRegex);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        String crawlerSite = crawlerRequestRecord.tagsCreator().bizTags().site();
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }

    public static String baseRequestBodyInit(JSONObject body){
        String uuid = UUID.randomUUID().toString();
        int times = (int) (System.currentTimeMillis() / 1000);
        JSONObject jsonObject = new JSONObject();
        jsonObject.put("brand","");
        jsonObject.put("guid",uuid);
        jsonObject.put("model","");
        jsonObject.put("platform","PC");
        jsonObject.put("sysVersion","");
        jsonObject.put("time",times);
        jsonObject.put("uid",0);
        jsonObject.put("version","4.8.0");
        jsonObject.put("session","");
        jsonObject.put("body",body);
        return jsonObject.toJSONString();
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord){

        int count = 1;
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        if (crawlerBusinessTags.hasKVTag("download_retry_count")){
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
            if (count >= 100){
                logger.error("{} download has number of retries exceeds the limit" +
                        ",request url {}",domain,crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        count++;
        crawlerBusinessTags.addCustomKV("download_retry_count",count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .build();
        crawlerRequestRecords.add(crawlerRequestRecord);

        if(crawlerRecord.tagsCreator().requestTags().hasRequestType(turnPageItem)){
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(turnPage);
            crawlerRequestRecord.tagsCreator().requestTags().addRequestType(turnPageItem);
        }
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
    }

    /**
     * 检查页面下载是否成功、完整
     *
     * @param crawlerRequestRecord last record
     * @param httpPage             page
     * @return boolean for page check
     */
    private boolean doHttpPageCheck(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = lastRequest.getUrl();
        int statusCode = httpPage.getStatusCode();
        if (statusCode != 200) {
            logger.error("download page {} error, status code is {}", lastRequestUrl, statusCode);
            return true;
        }
        if (!httpPage.isDownloadSuccess()) {
            logger.error("download page failed, check your link {}", lastRequestUrl);
            return true;
        }
        if (org.apache.commons.lang3.StringUtils.isBlank(httpPage.getRawText())) {
            logger.error("download page empty, check your link {}", lastRequestUrl);
            return true;
        }
        return false;
    }
}
