package com.chance.cc.crawler.development.scripts.autohome.forum;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.CategoryTag;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Field_Images;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.*;

/**
 * @author lt
 * @version 1.0
 * @date 2021-06-29 13:59:25
 * @email okprog@sina.com
 */
public class AutoHomeForumCrawlerScript extends CrawlerCommonScript {

    private Logger logger = LoggerFactory.getLogger(AutoHomeForumCrawlerScript.class);

    private static String listUrlsFormat = "https://club.autohome.com.cn/frontapi/data/page/club_get_topics_list?" +
            "page_num=%s&page_size=50&club_bbs_type=c&club_bbs_id=%s&club_order_type=%s"; //page   bbsId  order
    private static final String viewsUrlFormat = "https://club.autohome.com.cn/frontapi/getclicksandreplys?topicids=%s";
    private static final String likesUrlFormat = "https://club.api.autohome.com.cn/web/zan/list?input=%s-";
    private static final String authorInfoFormat = "https://club.autohome.com.cn/frontnc/user/getdetailusertpl/%s-0";
    private static final String homeUrlFormat = "https://www.autohome.com.cn/%s/";
    private static final String followsUrlFormat = "https://i.autohome.com.cn/%s";
    private static final String commentsFormat = "https://club.autohome.com.cn/frontapi/comment/getcommentwithpagination?topicId=%s&replyId=%s&pageIndex=1&pageSize=50";

    private static final String indexRegex = "https?://www\\.autohome\\.com\\.cn/";
    private static final String homeRegex = "https?://www\\.autohome\\.com\\.cn/\\d*/";
    private static final String listRegex = "https://club\\.autohome\\.com\\.cn/frontapi/data/page/club_get_topics_list\\S*";
    private static final String articleRegex = "https?://club\\.autohome\\.com\\.cn/bbs/thread/\\S*/\\d*-\\d*\\.html\\S*";
    private static final String articleFirstRegex = "https?://club\\.autohome\\.com\\.cn/bbs/thread/\\S*/\\d*-1\\.html\\S*";
    private static final String articleCommentsRegex = "https?://club\\.autohome\\.com\\.cn/frontapi/comment/getcommentwithpagination\\S*";
    private static final String followsUrlRegex = "https://i\\.autohome\\.com\\.cn/\\d*";
    private static final String keysRegex = "https?://\\S*v1/meta/autohome/keys\\S*";
    private static final String parseFontRegex = "https?://\\S*/crawler/font/api/v1/parseTTFont";

    private static final String domain = "autohome";
    private static final String scriptSite = "forum";
    private static final String OrderType = "order_type";
    private static final String parseFontUrl = "http://192.168.1.217:9599/crawler/font/api/v1/parseTTFont";

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<CrawlerRequestRecord>();
        if (doHttpPageCheck(crawlerRequestRecord,httpPage)) {
            logger.error("{} request download has error, status code {},error info [{}] ,will retry", domain,
                    httpPage.getStatusCode(), httpPage.getRawText());
            addCrawlerRecords(parsedLinks, crawlerRequestRecord,true);
            crawlerRequestRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return parsedLinks;
        }
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = httpRequest.getUrl();
        if (lastRequestUrl.matches(homeRegex)) {
            return parseHomeLinks(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if (lastRequestUrl.matches(listRegex)) {
            return parseListLinks(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if (lastRequestUrl.matches(articleRegex)) {
            return parseArticleLinks(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if (lastRequestUrl.matches(articleCommentsRegex)) {
            return parseCommentLinks(crawlerRequestRecord, httpPage, parsedLinks);
        }
        return null;
    }

    private List<CrawlerRequestRecord> parseCommentLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        boolean flag = false;
        try {
            JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
            if (jsonObject.getIntValue("returncode") != 0){
                logger.info("comment json download failed");
                flag = true;
            }
        } catch (Exception e) {
            logger.info("comment json parse failed");
            flag = true;
        }
        if (flag){
            logger.error("{} comment links download has error, status code {},error info [{}] ,will retry", domain,
                    httpPage.getStatusCode(), httpPage.getRawText());
            addCrawlerRecords(parsedLinks, crawlerRequestRecord,false);
            crawlerRequestRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return parsedLinks;
        }
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String ttfUrl = categoryTag.getKVTagStrVal("ttfUrl");
        List<String> allContent = castList(categoryTag.getKVTag("comContents").getVal() ,String.class);
        Set<String> hexList = new HashSet<>();
        for (String text : allContent) {
            // 判断出非中文的单个字符
            text = text.trim();
            int[] codes = StringUtils.toCodePoints(text);
            if (codes.length == 1){
                char[] chars = Character.toChars(codes[0]);
                if (!isChinese(chars[0])){
                    hexList.add(Integer.toHexString(codes[0]).toUpperCase());
                }
            }
        }
        if (StringUtils.isNotBlank(ttfUrl) && hexList.size() > 0){
            crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("cmtHexList",hexList);
            //内置下载获取ttfMap
            CrawlerRequestRecord parseFontRecord = CrawlerRequestRecord.builder()
                    .startPageRequest(domain(), CrawlerEnum.CrawlerRequestType.internalDownload)
                    .httpUrl(parseFontUrl)
                    .recordKey(parseFontUrl)
                    .needParsed(false)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .httpConfig(HttpConfig.me(domain()))
                    .build();

            HttpRequest ttfRequest = new HttpRequest();
            ttfRequest.setUrl(ttfUrl);
            ttfRequest.addHeader("Origin","https://club.autohome.com.cn");
            ttfRequest.addHeader("Referer","https://club.autohome.com.cn/");
            ttfRequest.addHeader("User-Agent",getRandomUA());

            HttpConfig httpConfig = crawlerRequestRecord.getHttpConfig();
            httpConfig.setResponseTextGenerateHtml(false);
            HttpRequest httpRequest = parseFontRecord.getHttpRequest();
            Map<String,Object> params = new HashMap<>();
            params.put("httpRequest",ttfRequest);
            params.put("httpConfig",httpConfig);
            params.put("needParseList",hexList);
            params.put("domain",domain());
            httpRequest.setMethod(HttpConstant.Method.POST);
            httpRequest.setRequestBody(HttpRequestBody.json(JSON.toJSONString(params),"utf-8"));
            parsedLinks.add(parseFontRecord);
        }
        //author info
        String authorInfoUrl = String.format(authorInfoFormat,categoryTag.getKVTagStrVal("authorId"));
        CrawlerRequestRecord itemInternalInfoRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .recordKey(authorInfoUrl)
                .httpUrl(authorInfoUrl)
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .needParsed(false)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();

        itemInternalInfoRecord.getHttpRequest().addHeader("Host","club.autohome.com.cn");
        itemInternalInfoRecord.getHttpRequest().addHeader("Referer", categoryTag.getKVTagStrVal("topicUrl"));
        itemInternalInfoRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
        itemInternalInfoRecord.getHttpRequest().setResponseCharset("UTF-8");
        parsedLinks.add(itemInternalInfoRecord);
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseArticleLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        if (httpPage.getRawText().contains("尊敬的用户您好，您的访问出现异常，为确认本次访问为正常用户行为")) {
            logger.error("{} article page download has error, status code {},error info [尊敬的用户您好，您的访问出现异常，为确认本次访问为正常用户行为] ,will retry", domain,
                    httpPage.getStatusCode());
            addCrawlerRecords(parsedLinks, crawlerRequestRecord,true);
            crawlerRequestRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return parsedLinks;
        }
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String httpRequestUrl = httpRequest.getUrl();
        String topicId = httpRequestUrl.substring(httpRequestUrl.lastIndexOf("/") + 1).split("-")[0];
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("topicId",topicId);
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("topicUrl",httpRequestUrl);
        int currentPage = Integer.parseInt(httpRequestUrl.substring(httpRequestUrl.lastIndexOf("-") + 1).split("\\.")[0]);
        String pageBaseUrl = httpRequestUrl.split(topicId)[0];
        String ttfUrl = "";
        Matcher ttfMatcher = Pattern.compile("url\\('//k3\\.autoimg\\.cn/g\\d*/\\w*/\\w*/\\S*/\\S*\\.\\.ttf'\\)\\s*format").matcher(httpPage.getRawText());
        while (ttfMatcher.find()) {
            ttfUrl = "https:" + ttfMatcher.group(0).split("'")[1];
        }
        String articleAuthorId = "";
        Matcher mtUid = Pattern.compile("topicMemberId:\\s*\\d*").matcher(httpPage.getRawText());
        while(mtUid.find()){
            articleAuthorId = mtUid.group(0).split(":")[1].trim();
        }
        if (StringUtils.isBlank(articleAuthorId)){
            articleAuthorId = httpPage.getHtml().xpath("//*[@id=\"js-sticky-user\"]/@data-user-id").get();
        }
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("articleAuthorId",articleAuthorId);
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().scheduleTags().getCategoryTag();
        CrawlerRecord filterInfoRecord = null;
        if (categoryTag.getLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal()) != null) {
            if (!crawlerRequestRecord.tagsCreator().bizTags().hasKVTag("comment_record_filter_info")) {
                logger.error("{} crawler comment need to filter information!",domain);
                return parsedLinks;
            }
            KVTag filterInfoTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
            filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);
        }
        /*第一页翻最后一页*/
        if (1 == currentPage) {
            /*通过总页数来翻页*/
            int totalPages = getTotalPages(httpPage);
            if (null != filterInfoRecord && totalPages > currentPage){
                String articlePageUrl = pageBaseUrl + topicId + "-" + totalPages + ".html";
                CrawlerRequestRecord turnPageRecord = CrawlerRequestRecord.builder()
                        .startPageRequest(domain,turnPage)
                        .recordKey(articlePageUrl)
                        .httpUrl(articlePageUrl)
                        .httpHeads(httpRequest.getHeaders())
                        .httpConfig(crawlerRequestRecord.getHttpConfig())
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .build();
                CategoryTag bizTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
                turnPageRecord.tagsCreator().addCategoryTag(bizTag);
                CategoryTag scheduleTag = crawlerRequestRecord.tagsCreator().scheduleTags().getCategoryTag();
                turnPageRecord.tagsCreator().addCategoryTag(scheduleTag);
                turnPageRecord.setFilter(filterInfoRecord.getFilter());
                turnPageRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                parsedLinks.add(turnPageRecord);
            }
            /*内下载链接*/
            genInternalDownloadRecords(crawlerRequestRecord, parsedLinks, httpRequestUrl, topicId, articleAuthorId);
            //字符解密
            genArticleTTFRecord(crawlerRequestRecord, httpPage, parsedLinks, ttfUrl);
        } else if (currentPage > 2){
            //进行自减翻页
            int pageNum = currentPage - 1;
            String articlePageUrl = pageBaseUrl + topicId + "-" + pageNum + ".html";
            CrawlerRequestRecord turnPageRequest = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .recordKey(articlePageUrl)
                    .httpUrl(articlePageUrl)
                    .httpHeads(httpRequest.getHeaders())
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.turnPageItem)
                    .needWashed(false)
                    .copyBizTags()
                    .build();
            parsedLinks.add(turnPageRequest);
        }

        // 解析每一页的楼层列表
        if (null != filterInfoRecord){
            List<Selectable> commentNodes = httpPage.getHtml().xpath("//ul[@class=\"reply-wrap\"]/li").nodes();
            Collections.reverse(commentNodes);
            for (Selectable commentNode : commentNodes) {
                genCommentRecords(crawlerRequestRecord, parsedLinks, httpRequestUrl, topicId, commentNode);
            }
        }

        return parsedLinks;
    }

    private void genCommentRecords(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> parsedLinks, String httpRequestUrl, String topicId, Selectable commentNode) {
        String commentId= commentNode.xpath("./@data-reply-id").get();
        if (StringUtils.isBlank(commentId)){
            return;
        }
        String commentUrl = String.format(commentsFormat,topicId,commentId);
        String authorId = commentNode.xpath("./@data-member-id").get();
        String floor = commentNode.xpath("./@data-floor").get();
        String replyTime = commentNode.xpath("./div/div[@class=\"reply\"]/div/span[@class=\"reply-static-text fn-fl\"]/strong/text() | ./div[@class=\"reply\"]/div/div/div/span[@class=\"reply-static-text fn-fl\"]/strong/text()").get();
        long releaseTime = crawlerRequestRecord.getReleaseTime();
        try {
            releaseTime = DateUtils.parseDate(replyTime,"yyyy-MM-dd HH:mm:ss").getTime();
        } catch (Exception e) {
            logger.warn("parse comment date error");
            return;
        }

        try {
            CrawlerRequestRecord itemCommentRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRequestRecord)
                    .recordKey(commentUrl)
                    .httpUrl(commentUrl)
                    .releaseTime(releaseTime)
                    .resultLabelTag(comment)
                    .resultLabelTag(interaction)
                    .copyBizTags()
                    .build();
            List<String> allContent = commentNode.xpath(".//div[@class=\"reply\"]//div[@class=\"reply-detail\"]//text()").all();
            itemCommentRecord.tagsCreator().bizTags().addCustomKV("comContents",allContent);
            itemCommentRecord.tagsCreator().bizTags().addCustomKV("authorId",authorId);
            itemCommentRecord.tagsCreator().bizTags().addCustomKV("commentId",commentId);
            itemCommentRecord.tagsCreator().bizTags().addCustomKV("author","");
            itemCommentRecord.tagsCreator().bizTags().addCustomKV("floor",floor);
            itemCommentRecord.getHttpRequest().addHeader("Host","club.autohome.com.cn");
            itemCommentRecord.getHttpRequest().addHeader("Referer",httpRequestUrl);
            itemCommentRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
            itemCommentRecord.getHttpRequest().setResponseCharset("UTF-8");
            itemCommentRecord.tagsCreator().bizTags().getCategoryTag().removeLabelTag("download_retry_count");
            parsedLinks.add(itemCommentRecord);
        } catch (Exception e) {
            logger.error(e.getMessage(),"parse comment error");
        }
    }

    private void genArticleTTFRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, String ttfUrl) {
        List<String> contents = httpPage.getHtml().xpath("//div[@class=\"post-container\"]//text() | //div[@class=\"post-container post-container--qa\"]//text()").all();
        Set<String> hexList = new HashSet<>();
        for (String text : contents) {
            // 判断出非中文的单个字符
            text = text.trim();
            int[] codes = StringUtils.toCodePoints(text);
            if (codes.length == 1){
                char[] chars = Character.toChars(codes[0]);
                if (!isChinese(chars[0])){
                    hexList.add(Integer.toHexString(codes[0]).toUpperCase());
                }
            }
        }
        if (StringUtils.isNotBlank(ttfUrl) && hexList.size() > 0){
            crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("ttfUrl",ttfUrl);
            crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("articleContents",contents);
            crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("hexList",hexList);
            //内置下载获取ttfMap
            CrawlerRequestRecord parseFontRecord = CrawlerRequestRecord.builder()
                    .startPageRequest(domain(), CrawlerEnum.CrawlerRequestType.internalDownload)
                    .httpUrl(parseFontUrl)
                    .recordKey(parseFontUrl)
                    .needParsed(false)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .httpConfig(HttpConfig.me(domain()))
                    .build();

            HttpRequest ttfRequest = new HttpRequest();
            ttfRequest.setUrl(ttfUrl);
            ttfRequest.addHeader("Origin","https://club.autohome.com.cn");
            ttfRequest.addHeader("Referer","https://club.autohome.com.cn/");
            ttfRequest.addHeader("User-Agent",getRandomUA());

            HttpConfig httpConfig = crawlerRequestRecord.getHttpConfig();
            httpConfig.setResponseTextGenerateHtml(false);
            HttpRequest httpRequest = parseFontRecord.getHttpRequest();
            Map<String,Object> params = new HashMap<>();
            params.put("httpRequest",ttfRequest);
            params.put("httpConfig",httpConfig);
            params.put("needParseList",hexList);
            params.put("domain",domain());
            httpRequest.setMethod(HttpConstant.Method.POST);
            httpRequest.setRequestBody(HttpRequestBody.json(JSON.toJSONString(params),"utf-8"));
            parsedLinks.add(parseFontRecord);
        }
    }

    private void genInternalDownloadRecords(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> parsedLinks, String httpRequestUrl, String topicId, String authorId) {
        // views and replies
        String viewsUrl = String.format(viewsUrlFormat,topicId);
        CrawlerRequestRecord itemInternalViewsRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .recordKey(viewsUrl)
                .httpUrl(viewsUrl)
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .needParsed(false)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        itemInternalViewsRecord.getHttpRequest().addHeader("Host","club.autohome.com.cn");
        itemInternalViewsRecord.getHttpRequest().addHeader("Referer",httpRequestUrl);
        itemInternalViewsRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
        itemInternalViewsRecord.getHttpRequest().setResponseCharset("UTF-8");
        parsedLinks.add(itemInternalViewsRecord);
        //likes
        String likesUrl = String.format(likesUrlFormat, topicId);
        CrawlerRequestRecord itemInternalLikesRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .recordKey(likesUrl)
                .httpUrl(likesUrl)
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .needParsed(false)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        itemInternalLikesRecord.getHttpRequest().addHeader("Host","club.api.autohome.com.cn");
        itemInternalLikesRecord.getHttpRequest().addHeader("Referer",httpRequestUrl);
        itemInternalLikesRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
        itemInternalLikesRecord.getHttpRequest().setResponseCharset("UTF-8");
        parsedLinks.add(itemInternalLikesRecord);
        //author info
        String authorInfoUrl = String.format(authorInfoFormat,authorId);
        CrawlerRequestRecord itemInternalInfoRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .recordKey(authorInfoUrl)
                .httpUrl(authorInfoUrl)
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .needParsed(false)
                .needWashed(true)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        itemInternalInfoRecord.getHttpRequest().addHeader("Host","club.autohome.com.cn");
        itemInternalInfoRecord.getHttpRequest().addHeader("Referer",httpRequestUrl);
        itemInternalInfoRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
        itemInternalInfoRecord.getHttpRequest().setResponseCharset("UTF-8");
        parsedLinks.add(itemInternalInfoRecord);

        //follows
        String followsUrl = String.format(followsUrlFormat,authorId);
        CrawlerRequestRecord followsInfoRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .recordKey(followsUrl)
                .httpUrl(followsUrl)
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .needParsed(false)
                .needWashed(true)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        followsInfoRecord.getHttpRequest().addHeader("Host","i.autohome.com.cn");
        followsInfoRecord.getHttpRequest().addHeader("Referer","https://club.autohome.com.cn/");
        followsInfoRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
        followsInfoRecord.getHttpRequest().setResponseCharset("UTF-8");
        parsedLinks.add(followsInfoRecord);
    }

    private int getTotalPages(HttpPage httpPage) {
        List<String> strings = httpPage.getHtml().xpath("//div[@class=\"athm-page__editor\"]/text()").all();
        StringBuilder pages = new StringBuilder();
        for (String string : strings) {
            pages.append(string.trim());
        }
        String page = pages.toString();
        String pageNum = page.replace("/", "").replace("页", "");
        int totalPages = 0;
        try {
            return Integer.parseInt(pageNum);
        } catch (NumberFormatException e) {
            return 1;
        }
    }

    private List<CrawlerRequestRecord> parseListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String orderType = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(OrderType);

        JSONObject pageObj = JSONObject.parseObject(httpPage.getRawText());
        if (null != pageObj && pageObj.getIntValue("returncode") == 0) {
            HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
            Map<String, Object> urlParams = getUrlParams(httpRequest.getUrl());
            if (null == urlParams) {
                logger.error("{} parse list url params get [null], list url is [{}]", domain, httpRequest.getUrl());
                return parsedLinks;
            }
            int currentPageIndex = Integer.parseInt((String) urlParams.get("page_num"));
            String forumKey = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("forumKey");
            String nextPageUrl = String.format(listUrlsFormat, (currentPageIndex + 1), forumKey, orderType);

            CrawlerRequestRecord turnPageRequest = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .recordKey(nextPageUrl)
                    .httpUrl(nextPageUrl)
                    .httpHeads(httpRequest.getHeaders())
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .build();
            parsedLinks.add(turnPageRequest);

            JSONArray jsonArray = pageObj.getJSONObject("result").getJSONArray("items");
            for (Object itemObject : jsonArray) {
                try {
                    JSONObject itemJsonObject = (JSONObject) itemObject;
                    String itemUrl = itemJsonObject.getString("pc_url");
                    if (StringUtils.isBlank(itemUrl)){
                        logger.error("pc_url null");
                        continue;
                    }
                    if (itemUrl.startsWith("http:")){
                        itemUrl = itemUrl.replace("http:","https:");
                    }
                    String title = itemJsonObject.getString("title");
                    String postdate = "";
                    switch (orderType) {
                        case "2":
                            postdate = itemJsonObject.getString("publish_time");
                            break;
                        case "1":
                            postdate = itemJsonObject.getString("club_topic_lastPostDate");
                            break;
                        default:
                            logger.error("can not get item date");
                            continue;
                    }

                    long releaseTime = DateUtils.parseDate(postdate, "yyyy/MM/dd HH:mm:ss").getTime();
                    CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                            .itemPageRequest(crawlerRequestRecord)
                            .recordKey(itemUrl)
                            .httpUrl(itemUrl)
                            .releaseTime(releaseTime)
                            .resultLabelTag(article)
                            .resultLabelTag(interaction)
                            .copyBizTags()
                            .build();
                    itemRecord.getHttpRequest().addHeader("Host", "club.autohome.com.cn");
                    itemRecord.getHttpRequest().addHeader("User-Agent", getRandomUA());
                    itemRecord.getHttpRequest().setResponseCharset("UTF-8");
                    itemRecord.tagsCreator().bizTags().addCustomKV("title",title);
                    parsedLinks.add(itemRecord);
                } catch (Exception e) {
                    logger.error(e.getMessage());
                }
            }
        } else {
            logger.error("{} list URL download failed , download page is [{}]", domain, httpPage.getRawText());
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseHomeLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = lastRequest.getUrl();
        String orderType = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(OrderType);
        if (StringUtils.isBlank(orderType)) {
            logger.error("{} need [order_type] to fill forum url, it can't be empty", domain);
            return parsedLinks;
        }
        Html html = httpPage.getHtml();
        String brand = html.xpath("//div[@class=\"container\"]/div/a[2]/text()|//div[@class=\"path\"]/a[3]/text()").get();
        String carSeries = html.xpath("//div[@class=\"athm-sub-nav__car__name\"]//h1//text()|//div[@class=\"subnav-title-name\"]/a/text()").get();
        String forumKey = lastRequestUrl.split("cn/")[1].replace("/", "");
        String forumUrl = String.format(listUrlsFormat, 1, forumKey, orderType);
        CrawlerRequestRecord homeRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(forumUrl)
                .recordKey(forumUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .build();
        List<Map<String, String>> series = new ArrayList<>();
        Map<String, String> seriesMap = new HashMap<>();
        seriesMap.put("series_name", carSeries);
        seriesMap.put("series_url", lastRequestUrl);
        seriesMap.put("series_id", forumKey);
        series.add(seriesMap);
        homeRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Series, series);
        homeRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Brand, brand);
        homeRecord.tagsCreator().bizTags().addCustomKV("forumKey", forumKey);
        HttpRequest httpRequest = homeRecord.getHttpRequest();
        httpRequest.addHeader("Host", "club.autohome.com.cn");
        httpRequest.addHeader("User-Agent", getRandomUA());
        parsedLinks.add(homeRecord);
        return parsedLinks;
    }

    @Override
    public void afterInternalDownload(CrawlerRequestRecord crawlerRecord, List<CrawlerRequestRecord> internalDownloadRecords, List<CrawlerRequestRecord> links) {
        HttpRequest httpRequest = crawlerRecord.getHttpRequest();
        //获取回复数 放入extra
        for (CrawlerRequestRecord internalDownloadRecord : internalDownloadRecords) {
            HttpRequest internalDownloadRecordHttpRequest = internalDownloadRecord.getHttpRequest();
            String internalRequestUrl = internalDownloadRecordHttpRequest.getUrl();
            HttpPage downloadPage = internalDownloadRecord.getInternalDownloadPage();
            if (downloadPage.isDownloadSuccess()) {
                if (internalRequestUrl.matches(parseFontRegex)) {
                    JSONObject pageObj = JSONObject.parseObject(downloadPage.getRawText());
                    if (pageObj.getIntValue("status") == 0) {
                        JSONObject fonts = pageObj.getJSONObject("content");
                        crawlerRecord.tagsCreator().bizTags().addCustomKV("fonts", fonts);
                    } else {
                        addCrawlerRecords(links,crawlerRecord,false);
                        crawlerRecord.setNeedWashPage(false);
                        return;
                    }
                } else {
                    String urlSplit = internalRequestUrl.split("cn/")[1];
                    //获取views 和 reply
                    if (urlSplit.startsWith("frontapi")) {
                        try {
                            JSONObject jsonObject = JSONObject.parseObject(downloadPage.getRawText());
                            JSONArray results = jsonObject.getJSONArray("result");
                            JSONObject resultObject = (JSONObject) results.get(0);
                            String comments = resultObject.getString("replys");
                            String allComments = resultObject.getString("allreplys");
                            String views = resultObject.getString("views");
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("comments", comments);
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("allComments", allComments);
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("views", views);
                        } catch (Exception e) {
                            logger.error(e.getMessage(), "get views failed");
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("comments", "0");
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("allComments", "0");
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("views", "0");
                        }
                    }
                    //获取likes
                    if (urlSplit.startsWith("web")) {
                        try {
                            JSONObject jsonObject = JSONObject.parseObject(downloadPage.getRawText().split("\\[")[1].split("]")[0]);
                            String likes = jsonObject.getString("z");
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("likes", likes);
                        } catch (Exception e) {
                            String likes = "0";
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("likes", likes);
                            logger.debug(downloadPage.getRawText());
                        }
                    }
                    //获取作者信息
                    if (urlSplit.startsWith("frontnc")) {
                        try {
                            Html infoHtml = downloadPage.getHtml();
                            String author = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-info\"]//a[@class=\"name\"]/text()").get();
                            String signTime = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-profile\"]/div/text()").get();
                            String authorAddr = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-profile\"]/a/text()").get();
                            String jingHua = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-about-count\"]/a[@class=\"count-item\"][2]/strong/text()").get();
                            String topicCount = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-about-count\"]/a[@class=\"count-item\"][1]/strong/text()").get();
                            String replyCount = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-about-count\"]/a[@class=\"count-item\"][3]/strong/text()").get();
                            String identification = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-profile-rz\"]//a[@class=\"profile-cars-item\"]/@title").get();
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("author", author);
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("signTime", signTime);
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("authorAddr", authorAddr);
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("jingHua", jingHua);
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("topicCount", topicCount);
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("replyCount", replyCount);
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("identification", identification);
                        } catch (Exception e) {
                            logger.warn(e.getMessage(), "get user info failed");
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("author", "");
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("signTime", "");
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("authorAddr", "");
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("jingHua", "0");
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("topicCount", "0");
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("replyCount", "0");
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("identification", "");
                        }

                    }
                    if (internalRequestUrl.matches(followsUrlRegex)) {
                        try {
                            Html infoHtml = downloadPage.getHtml();
                            String follows = infoHtml.xpath("//div[@class=\"user-lv\"]/a[3]/span/text()").get();
                            if (StringUtils.isBlank(follows)) {
                                follows = "0";
                            }
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("follows", follows);
                        } catch (Exception e) {
                            crawlerRecord.tagsCreator().bizTags().addCustomKV("follows", "0");
                        }
                    }
                }
            } else {
                if (internalRequestUrl.matches(followsUrlRegex)) {
                    crawlerRecord.tagsCreator().bizTags().addCustomKV("follows", "0");
                } else {
                    addCrawlerRecords(links,crawlerRecord,false);
                    crawlerRecord.setNeedWashPage(false);
                }
            }
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(article)){
            washArticle(crawlerDataList,crawlerRecord,page);
        }
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(comment)){
            washComment(crawlerDataList,crawlerRecord,page);
        }
        return crawlerDataList;
    }

    private void washArticle(List<CrawlerData> crawlerDataList, CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String topicId = categoryTag.getKVTagStrVal("topicId");
        Html html = httpPage.getHtml();
        List<String> contents = html.xpath("//div[@class=\"post-container\"]//text() | //div[@class=\"post-container post-container--qa\"]//text()").all();
        StringBuffer content = new StringBuffer();
        KVTag kvTag = categoryTag.getKVTag("fonts");
        for (String text : contents) {
            text = text.trim();
            int[] codes = StringUtils.toCodePoints(text);
            //只有一个字符
            if (codes.length == 1){
                char[] chars = Character.toChars(codes[0]);
                //判断是否是正常中文字符
                if (!isChinese(chars[0]) && null != kvTag){
                    String code = Integer.toHexString(codes[0]).toUpperCase();
                    JSONObject fonts = (JSONObject) kvTag.getVal();
                    JSONArray fontsArray = fonts.getJSONArray(code);
                    //判断是否存在该字符的中文解密
                    if (null != fontsArray && fontsArray.size() > 0){
                        JSONObject fontObj = fontsArray.getJSONObject(0);
                        String str = fontObj.getString("textCode");
                        content.append(str);
                        continue;
                    }
                    content.append(text);
                    continue;
                }
                content.append(text);
                continue;
            }
            content.append(text);
        }
        List<String> images = html.xpath("//div[@class=\"post-container\"]/div[@class=\"tz-picture\"]/img/@data-src | //div[@class=\"post-container \"]/div[@class=\"tz-picture\"]/img/@data-src").all();
        StringBuffer imgs = new StringBuffer();
        for (String image : images) {
            imgs.append(image).append("\\0x1");
        }

        String articleTime = html.xpath("//span[@class=\"post-handle-publish\"]/strong/text() | //div[@class=\"post-site\"]/div[@class=\"post-site-txt\"]/strong/text()|//span[@class=\"publish-time\"]/text()|//div[@class=\"post-info\"]/span[2]/strong/text()").get();
//        if (StringUtils.isBlank(articleTime)){
//            if (StringUtils.isNotBlank(html.xpath("//span[@class=\"publish-time\"]/text()").get())){
//                articleTime = html.xpath("//span[@class=\"publish-time\"]/text()").get();
//            }
//            if (StringUtils.isBlank(articleTime)){
//                articleTime = html.xpath("//div[@class=\"post-info\"]/span[2]/strong/text()").get();
//            }
//        }

        String elite = html.xpath("//span[@class=\"stamp-text\"]/text()").get();
        String isElite = "否";
        if (StringUtils.isNotBlank(elite)){
            isElite = "是";
        }

        String forumName = html.xpath("//div[@class=\"name-wrap\"]/div/a/text()").get();

        List<String> allTags = html.xpath("//div[@class=\"post-site\"]/div[@class=\"post-site-tags\"]/a/text()").all();
        List<String> titles = html.xpath("//div[@class=\"post-title\"]//text()").all();
        StringBuilder title = new StringBuilder();
        for (String str : titles) {
            title.append(str.trim());
        }
        String videoWrap = html.xpath("//div[@id=\"videoWrap\"]/@class").get();
        String isVideo = "否";
        if (StringUtils.isNotBlank(videoWrap)){
            isVideo = "是";
        }
        try {
            String site = categoryTag.getKVTagStrVal("site");
            //更新去重信息
            String replyCount = categoryTag.getKVTagStrVal("replyCount") == null ? "0" : categoryTag.getKVTagStrVal("replyCount");
            if (null != replyCount && replyCount.endsWith("万")){
                if (replyCount.contains(".")){
                    replyCount = String.valueOf((int)Double.parseDouble(replyCount.replace("万","")) * 10000);
                }else {
                    replyCount = replyCount.replace("万","0000");
                }
            }
            long releaseTime = 0;
            try {
                releaseTime = DateUtils.parseDate(articleTime, "yyyy-MM-dd HH:mm:ss").getTime();
            } catch (Exception e) {
                releaseTime = crawlerRequestRecord.getReleaseTime();
            }
            crawlerRequestRecord.setReleaseTime(releaseTime);
            CrawlerData crawlerArticleData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), topicId))
                    .url(httpRequest.getUrl())
                    .releaseTime(releaseTime)
                    .addContentKV(Field_Author, categoryTag.getKVTagStrVal("author"))
                    .addContentKV(Field_Content,content.toString().trim())
                    .addContentKV(Field_Floor, "楼主")
                    .addContentKV(Field_Title, unescapeHtml2J(title.toString()))
                    .addContentKV(Field_Author_Id, categoryTag.getKVTagStrVal("articleAuthorId"))
                    .addContentKV(Field_Author_Follows, categoryTag.getKVTagStrVal("follows"))
                    .addContentKV(Field_Author_Identification_Model, categoryTag.getKVTagStrVal("identification"))
                    .addContentKV(Field_Author_Topic_Count, categoryTag.getKVTagStrVal("topicCount"))
                    .addContentKV(Field_Author_Pick_Count, categoryTag.getKVTagStrVal("jingHua"))
                    .addContentKV(Field_Author_Reply_Count, replyCount)
                    .addContentKV(Field_Author_From, categoryTag.getKVTagStrVal("authorAddr"))
                    .addContentKV(Field_Author_Sign_In, categoryTag.getKVTagStrVal("signTime"))
                    .addContentKV(Field_Images, imgs.toString().trim())
                    .resultLabelTag(article)
                    .build();
            if (null != allTags && allTags.size() > 0){
                crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Topic_Type,allTags);
            }
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Is_Video, isVideo);
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Is_Elite, isElite);
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Forum_Name, forumName);
            crawlerDataList.add(crawlerArticleData);

            if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
                String comments = categoryTag.getKVTagStrVal("comments");
                String views = categoryTag.getKVTagStrVal("views");
                String likes = categoryTag.getKVTagStrVal("likes");

                CrawlerData crawlerData = CrawlerData.builder()
                        .data(crawlerRequestRecord,httpPage)
                        .dataId(StringUtils.joinWith("-",crawlerRequestRecord.getDomain(), site,interaction.enumVal(),topicId))
                        .parentId(StringUtils.joinWith("-",crawlerRequestRecord.getDomain(), site,article.enumVal(),topicId))
                        .url(httpRequest.getUrl())
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .addContentKV(Field_I_Comments,comments)
                        .addContentKV(Field_I_Views,views)
                        .addContentKV(Field_I_Likes,likes)
                        .resultLabelTag(interaction)
                        .build();
                crawlerDataList.add(crawlerData);
            }
        } catch (Exception e) {
            logger.error(e.getMessage(), e);
        }
    }

    private void washComment(List<CrawlerData> crawlerDataList, CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String commentId = categoryTag.getKVTagStrVal("commentId");
        String topicId = categoryTag.getKVTagStrVal("topicId");
        String itemUrl = categoryTag.getKVTagStrVal("topicUrl");
        List<String> allContent = castList(categoryTag.getKVTag("comContents").getVal(), String.class);
        StringBuffer content = new StringBuffer();
        KVTag kvTag = categoryTag.getKVTag("fonts");
        for (String text : allContent) {
            text = text.trim();
            int[] codes = StringUtils.toCodePoints(text);
            //只有一个字符
            if (codes.length == 1){
                char[] chars = Character.toChars(codes[0]);
                //判断是否是正常中文字符
                if (!isChinese(chars[0]) && null != kvTag){
                    String code = Integer.toHexString(codes[0]).toUpperCase();
                    JSONObject fonts = (JSONObject) kvTag.getVal();
                    JSONArray fontsArray = fonts.getJSONArray(code);
                    //判断是否存在该字符的中文解密
                    if (null != fontsArray && fontsArray.size() > 0){
                        JSONObject fontObj = fontsArray.getJSONObject(0);
                        String str = fontObj.getString("textCode");
                        content.append(str);
                        continue;
                    }
                    content.append(text);
                    continue;
                }
                content.append(text);
                continue;
            }
            content.append(text);
        }
        String replyCount = categoryTag.getKVTagStrVal("replyCount");
        if (null != replyCount && replyCount.endsWith("万")){
            if (replyCount.contains(".")){
                replyCount = String.valueOf((int)Double.parseDouble(replyCount.replace("万","")) * 10000);
            }else {
                replyCount = replyCount.replace("万","0000");
            }
        }
        String site = categoryTag.getKVTagStrVal("site");

        CrawlerData crawlerCommentData = CrawlerData.builder()
                .data(crawlerRequestRecord, httpPage)
                .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), topicId))
                .url(itemUrl)
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .addContentKV(Field_Author, categoryTag.getKVTagStrVal("author"))
                .addContentKV(Field_Content,content.toString().trim())
                .addContentKV(Field_Title, unescapeHtml2J(categoryTag.getKVTagStrVal("title")))
                .addContentKV(Field_Author_Id, categoryTag.getKVTagStrVal("authorId"))
                .addContentKV(Field_Floor, categoryTag.getKVTagStrVal("floor"))
                .addContentKV(Field_Author_Identification_Model, categoryTag.getKVTagStrVal("identification"))
                .addContentKV(Field_Author_Topic_Count, categoryTag.getKVTagStrVal("topicCount"))
                .addContentKV(Field_Author_Pick_Count, categoryTag.getKVTagStrVal("jingHua"))
                .addContentKV(Field_Author_Reply_Count, replyCount)
                .addContentKV(Field_Author_From, categoryTag.getKVTagStrVal("authorAddr"))
                .addContentKV(Field_Author_Sign_In, categoryTag.getKVTagStrVal("signTime"))
                .resultLabelTag(comment)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                .build();
        crawlerDataList.add(crawlerCommentData);


        try {
            JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
            JSONObject resultObj = jsonObject.getJSONObject("result");
            int commentCount = resultObj.getIntValue("rowCount");
            if (commentCount > 0){
                JSONArray comments = resultObj.getJSONArray("list");
                for (Object comment : comments) {
                    JSONObject commentObj = (JSONObject)comment;
                    String subCommentId = commentObj.getString("commentId");
                    String subAuthorId = commentObj.getString("memberId");
                    String subCommentDate = commentObj.getString("date");
                    JSONObject contentObj = (JSONObject) commentObj.getJSONArray("content").get(0);
                    String subContent = contentObj.getString("content");

                    //封装数据
                    CrawlerData crawlerSubCommentData = CrawlerData.builder()
                            .data(crawlerRequestRecord, httpPage)
                            .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, CrawlerEnum.CrawlerDataType.comment.enumVal(), subCommentId))
                            .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), topicId))
                            .url(itemUrl)
                            .releaseTime(DateUtils.parseDate(subCommentDate,"yyyy-MM-dd HH:mm:ss").getTime())
                            .addContentKV(Field_Content,subContent)
                            .addContentKV(Field_Author_Id, subAuthorId)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                            .build();
                    crawlerDataList.add(crawlerSubCommentData);
                }
            }
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(),site , interaction.enumVal(), commentId))
                    .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                    .url(itemUrl)
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .addContentKV(Field_I_Comments,String.valueOf(commentCount))
                    .resultLabelTag(interaction)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .build();
            crawlerDataList.add(crawlerData);
        } catch (Exception e) {
            logger.warn("read api result error , state code is : {}, http page is : {}" , httpPage.getStatusCode(), httpPage.getRawText());
        }
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();
        Map<String, Object> extras = requestRecord.getHttpRequest().getExtras();
        String path = "";
        if(extras != null && StringUtils.isNotBlank(path = extras.get("path").toString())){
            initArticleListRecord(requestRecord,allItemRecords,path);
            return allItemRecords;
        }


        if (supportSourceRecords == null || supportSourceRecords.size() < 1) {
            return super.prepareRequest(requestRecord, supportSourceRecords);
        }
        CrawlerRequestRecord keywordRecord = null;
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            if (supportSourceRecord.getHttpRequest().getUrl().matches(keysRegex)) {
                keywordRecord = supportSourceRecord;
            }
        }
        if (null == keywordRecord) {
            return super.prepareRequest(requestRecord, supportSourceRecords);
        }
        String keywordUrl = keywordRecord.getHttpRequest().getUrl();
        if (keywordUrl.matches(keysRegex)) {
            initKeywordsRecord(requestRecord, allItemRecords, keywordRecord);
        }

        if (allItemRecords.isEmpty()) {
            return super.prepareRequest(requestRecord, supportSourceRecords);
        }
        return allItemRecords;

    }

    private void initArticleListRecord(CrawlerRequestRecord requestRecord, List<CrawlerRecord> allItemRecords,String path){
        if(StringUtils.isBlank(path)){
            logger.error("path can not is null!");
            return;
        }

        try{
            BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(path), "UTF-8"));

            String readLine = null;
            while ((readLine = bufferedReader.readLine()) != null) {
                if(StringUtils.isBlank(readLine)){
                    continue;
                }

                CrawlerData crawlerData = JSONObject.parseObject(readLine, CrawlerData.class);
                String requestUrl = crawlerData.getUrl();
                CategoryTag categoryTag = crawlerData.tagsCreator().bizTags().getCategoryTag();
                Map seriesMap = categoryTag.getKVTagObjVal("series", Map.class);
                String brand = categoryTag.getKVTagObjVal("brand",JSONObject.class).getString("brand_name");
                String forumKey = categoryTag.getKVTagStrVal("carSeriesId");
                Long releaseTime = categoryTag.getKVTagObjVal("time", Long.class);

                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(requestRecord)
                        .httpUrl(requestUrl)
                        .releaseTime(releaseTime)
                        .copyBizTags()
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                        .build();
                requestRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Series, Arrays.asList(seriesMap));
                requestRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Brand,brand);
                requestRecord.tagsCreator().bizTags().addCustomKV("forumKey",forumKey);
                allItemRecords.add(itemRecord);
            }
        }catch (Exception e){
            logger.error(e.getMessage());
        }

    }

    private void initKeywordsRecord(CrawlerRequestRecord requestRecord, List<CrawlerRecord> allItemRecords, CrawlerRequestRecord keywordRecord) {
        try {
            JSONObject jsonObject = JSONObject.parseObject(keywordRecord.getInternalDownloadPage().getRawText());
            if (jsonObject.getIntValue("status") == 0) {
                JSONArray contents = jsonObject.getJSONArray("content");
                for (Object content : contents) {
                    String keyword = ((JSONObject) content).getString("keyword");
                    String homeUrl = String.format(homeUrlFormat, keyword);
                    CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(requestRecord)
                            .httpUrl(homeUrl)
                            .recordKey(homeUrl)
                            .releaseTime(System.currentTimeMillis())
                            .needWashed(false)
                            .needParsed(true)
                            .notFilterRecord()
                            .copyBizTags()
                            .build();
                    crawlerRequestRecord.getHttpRequest().setMethod(HttpConstant.Method.GET);
                    allItemRecords.add(crawlerRequestRecord);
                }
            }

        } catch (Exception e) {
            logger.error(e.getMessage(), e);
        }
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord,Boolean isRetryCount) {
        if (isRetryCount){
            int count = 1;
            CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
            if (crawlerBusinessTags.hasKVTag("download_retry_count")) {
                count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
                if (count >= 20) {
                    logger.error("{} request download he number of retries exceeds the limit" +
                            ",request url {}", domain, crawlerRecord.getHttpRequest().getUrl());
                    return;
                }
            }

            count++;
            crawlerBusinessTags.addCustomKV("download_retry_count", count);
        }
        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .releaseTime(crawlerRecord.getReleaseTime())
                .copyBizTags()
                .copyResultTags()
                .notFilterRecord()
                .build();

        if (crawlerRecord.tagsCreator().requestTags().hasRequestType(turnPageItem)) {
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(turnPage);
            crawlerRequestRecord.tagsCreator().requestTags().addRequestType(turnPageItem);
        }
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());

        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(homeRegex);
        addUrlRegular(listRegex);
        addUrlRegular(articleRegex);
        addUrlRegular(articleCommentsRegex);
        addUrlRegular(keysRegex);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        String crawlerSite = crawlerRequestRecord.tagsCreator().bizTags().site();
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }

    /***
     * 判断字符是否为中文
     * @param ch 需要判断的字符
     * @return 中文返回true，非中文返回false
     */
    private static boolean isChinese(char ch) {
        //获取此字符的UniCodeBlock
        Character.UnicodeBlock ub = Character.UnicodeBlock.of(ch);
        //  GENERAL_PUNCTUATION 判断中文的“号
        //  CJK_SYMBOLS_AND_PUNCTUATION 判断中文的。号
        //  HALFWIDTH_AND_FULLWIDTH_FORMS 判断中文的，号
        if (ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS
                || ub == Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS
                || ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A
                || ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B
                || ub == Character.UnicodeBlock.CJK_SYMBOLS_AND_PUNCTUATION         // 判断中文的。号
                || ub == Character.UnicodeBlock.HALFWIDTH_AND_FULLWIDTH_FORMS       // 判断中文的，号
                || ub == Character.UnicodeBlock.GENERAL_PUNCTUATION                 // 判断中文的“号
        ) {
//            System.out.println(ch + " 是中文");
            return true;
        }
        return false;
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA() {
        return agentList.get(RandomUtils.nextInt(0, agentList.size() - 1));
    }

    /**
     * 将url参数转换成map
     *
     * @param url http://*.*.com?aa=11&bb=22&cc=33
     * @return map
     */
    private Map<String, Object> getUrlParams(String url) {
        Map<String, Object> map = new HashMap<String, Object>(0);
        String param = null;
        if (url.contains("?")) {
            param = url.split("\\?")[1];
        }
        if (StringUtils.isBlank(param)) {
            return null;
        }
        String[] params = param.split("&");
        for (String s : params) {
            String[] p = s.split("=");
            if (p.length == 2) {
                map.put(p[0], p[1]);
            }
        }
        return map;
    }


    public static String unescapeHtml2J(String str) {
        int times = 0;
        if (null == str){
            return null;
        }
        while (str.contains("&") && str.contains(";")) {
            str = StringEscapeUtils.unescapeHtml(str);
            times++;
            if (times > 5) {
                break;
            }
        }
        return str;
    }

    /**
     * obj to list<String>
     * @param obj
     * @param clazz
     * @param <T>
     * @return
     */
    public static <T> List<T> castList(Object obj, Class<T> clazz){
        List<T> result = new ArrayList<T>();
        if(obj instanceof List<?>)
        {
            for (Object o : (List<?>) obj)
            {
                result.add(clazz.cast(o));
            }
            return result;
        }
        return null;
    }

    /**
     * 检查页面下载是否成功、完整
     *
     * @param crawlerRequestRecord last record
     * @param httpPage             page
     * @return boolean for page check
     */
    private boolean doHttpPageCheck(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = lastRequest.getUrl();
        int statusCode = httpPage.getStatusCode();
        if (statusCode != 200) {
            logger.error("download page {} error, status code is {}", lastRequestUrl, statusCode);
            return true;
        }
        if (!httpPage.isDownloadSuccess()) {
            logger.error("download page failed, check your link {}", lastRequestUrl);
            return true;
        }
        if (StringUtils.isBlank(httpPage.getRawText())) {
            logger.error("download page empty, check your link {}", lastRequestUrl);
            return true;
        }
        return false;
    }
}
