package com.chance.cc.crawler.development.scripts.autohome.forum;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.CategoryTag;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.ParseException;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.*;

/**
 * @author lt
 * @version 1.0
 * @date 2020-11-18 09:48:59
 * @email okprog@sina.com
 */
public class AutoHomeForumReplyCrawlerScript extends CrawlerCommonScript {

    private Logger logger = LoggerFactory.getLogger(AutoHomeForumFabuCrawlerScript.class);

    private static final String listUrlFormat = "https://club.autohome.com.cn/o/bbs/forum-c-%s-%s.html?qaType=-1";
    private static final String viewsUrlFormat = "https://club.autohome.com.cn/frontapi/getclicksandreplys?topicids=%s";
    private static final String likesUrlFormat = "https://club.api.autohome.com.cn/web/zan/list?input=%s-";
    private static final String authorInfoFormat = "https://club.autohome.com.cn/frontnc/user/getdetailusertpl/%s-0";
    private static final String commentsFormat = "https://club.autohome.com.cn/frontapi/comment/getcommentwithpagination?topicId=%s&replyId=%s&pageIndex=1&pageSize=50";
    private static final String followsUrlFormat = "https://i.autohome.com.cn/%s";
    private static final String homeUrlFormat = "https://www.autohome.com.cn/%s/";
    private static final String baseUrl = "https://club.autohome.com.cn";

    private static final String indexRegex = "https?://www\\.autohome\\.com\\.cn/";
    private static final String homeRegex = "https?://www\\.autohome\\.com\\.cn/\\d*/";
    private static final String listRegex = "https?://club\\.autohome\\.com\\.cn/o/bbs/forum-c-\\d*-\\d*\\.html\\S*";
    private static final String articleRegex = "https?://club\\.autohome\\.com\\.cn/bbs/thread/\\S*/\\d*-\\d*\\.html\\S*";
    private static final String articleFirstRegex = "https?://club\\.autohome\\.com\\.cn/bbs/thread/\\S*/\\d*-1\\.html\\S*";
    private static final String articleCommentsRegex = "https?://club\\.autohome\\.com\\.cn/frontapi/comment/getcommentwithpagination\\S*";
    private static final String followsUrlRegex = "https://i\\.autohome\\.com\\.cn/\\d*";
    private static final String keysRegex = "https?://\\S*v1/meta/autohome/keys\\S*";
    private static final String parseFontRegex = "https?://\\S*/crawler/font/api/v1/parseTTFont";

    private static final String scriptSite = "forum_reply";

    private List<String> userAgents = new ArrayList<>();

    private AtomicInteger userAgentIndex = new AtomicInteger(0);

    private Object lock = new Object();

    @Override
    public String domain() {
        return "autohome";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(homeRegex);
        addUrlRegular(listRegex);
        addUrlRegular(articleRegex);
        addUrlRegular(articleCommentsRegex);
        addUrlRegular(keysRegex);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String crawlerSite = categoryTag.getKVTagStrVal("site");
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();
        if (supportSourceRecords == null || supportSourceRecords.size() <1){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        CrawlerRequestRecord keywordRecord = null;
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            if (supportSourceRecord.getHttpRequest().getUrl().contains("fake-useragent.herokuapp.com/browsers")){
                initUserAgents(supportSourceRecord); //初始user agents
            } else if (supportSourceRecord.getHttpRequest().getUrl().matches(keysRegex)){
                keywordRecord = supportSourceRecord;
            }
        }
        
        String keywordUrl = keywordRecord.getHttpRequest().getUrl();
        if (keywordUrl.matches(keysRegex)){
            try {
                JSONObject jsonObject = JSONObject.parseObject(keywordRecord.getInternalDownloadPage().getRawText());
                if (jsonObject.getIntValue("status") == 0){
                    JSONArray contents = jsonObject.getJSONArray("content");
                    for (Object content : contents) {
                        String keyword = ((JSONObject) content).getString("keyword");
                        String homeUrl = String.format(homeUrlFormat,keyword);

                        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                                .turnPageRequest(requestRecord)
                                .httpUrl(homeUrl)
                                .recordKey(homeUrl)
                                .releaseTime(System.currentTimeMillis())
                                .needWashed(false)
                                .needParsed(true)
                                .notFilterRecord()
                                .copyBizTags()
                                .build();
                        crawlerRequestRecord.getHttpRequest().setMethod(HttpConstant.Method.GET);
                        allItemRecords.add(crawlerRequestRecord);
                    }
                }

            }catch (Exception e){
                logger.error(e.getMessage(),e);
            }
        }
        
        if (allItemRecords.isEmpty()){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        return allItemRecords;

    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        int statusCode = httpPage.getStatusCode();
        String lastRequestUrl = lastRequest.getUrl();
        if (StringUtils.isBlank(httpPage.getRawText()) || !httpPage.isDownloadSuccess() || statusCode != 200){
            Map<String, Object> extras = lastRequest.getExtras();
            int downloadTimes = 1;
            if (null == extras){
                extras = new HashMap<>();
                extras.put("downloadTimes",downloadTimes);
            }else {
                try {
                    downloadTimes = Integer.parseInt((String) extras.get("downloadTimes"));
                    extras.put("downloadTimes",downloadTimes + 1);
                } catch (Exception e) {
                    extras.put("downloadTimes",downloadTimes);
                }
            }
            if (null != httpPage.getRawText() && httpPage.getRawText().contains("主楼已被删除") || downloadTimes > 10){
                logger.error("页面不存在：{},本页面下载次数：{}",statusCode,downloadTimes);
                return parsedLinks;
            }

            parsedLinks.add(crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            crawlerRequestRecord.getHttpRequest().addHeader("User-Agent",getOneUserAgent());
            logger.error("页面下载状态：{}，状态码：{}，内容为空：{}，实行回推",httpPage.isDownloadSuccess(),statusCode,StringUtils.isBlank(httpPage.getRawText()));
            return parsedLinks;
        }
        if (lastRequestUrl.matches(homeRegex)){
            return parseHomeLinks(crawlerRequestRecord, parsedLinks, lastRequestUrl,httpPage);
        }
        if (lastRequestUrl.matches(listRegex)){
            return parseListLinks(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if (lastRequestUrl.matches(articleRegex)){
            return parseArticleLinks(crawlerRequestRecord, httpPage, parsedLinks, lastRequest, lastRequestUrl);
        }
        if (lastRequestUrl.matches(articleCommentsRegex)){
            return parseCommentLinks(crawlerRequestRecord,httpPage, parsedLinks, lastRequest);
        }

        return null;
    }

    private List<CrawlerRequestRecord> parseCommentLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, HttpRequest lastRequest) {
        boolean flag = false;
        try {
            JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
            if (jsonObject.getIntValue("returncode") != 0){
                logger.info("comment json download failed");
                flag = true;
            }
        } catch (Exception e) {
            logger.info("comment json parse failed");
            flag = true;
        }
        if (flag){
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            parsedLinks.add(crawlerRequestRecord);
            return parsedLinks;
        }

        Map<String, Object> extras = lastRequest.getExtras();
        crawlerRequestRecord.setNeedWashPage(true);
        String ttfUrl = (String) extras.get("ttfUrl");
        List<String> allContent = (List<String>) extras.get("allContent");
        Set<String> hexList = new HashSet<>();
        for (String text : allContent) {
            // 判断出非中文的单个字符
            text = text.trim();
            int[] codes = StringUtils.toCodePoints(text);
            if (codes.length == 1){
                char[] chars = Character.toChars(codes[0]);
                if (!isChinese(chars[0])){
                    hexList.add(Integer.toHexString(codes[0]).toUpperCase());
                }
            }
        }
        if (StringUtils.isNotBlank(ttfUrl) && hexList.size() > 0){
            extras.put("ttfUrl",ttfUrl);
            extras.put("comContents",allContent);
            extras.put("hexList",hexList);
            //内置下载获取ttfMap
            String parseFontUrl = "http://192.168.1.217:9599/crawler/font/api/v1/parseTTFont";
            CrawlerRequestRecord parseFontRecord = CrawlerRequestRecord.builder()
                    .startPageRequest(domain(), CrawlerEnum.CrawlerRequestType.internalDownload)
                    .httpUrl(parseFontUrl)
                    .recordKey(parseFontUrl)
                    .needParsed(false)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .httpConfig(HttpConfig.me(domain()))
                    .build();

            HttpRequest ttfRequest = new HttpRequest();
            ttfRequest.setUrl(ttfUrl);
            ttfRequest.addHeader("Origin","https://club.autohome.com.cn");
            ttfRequest.addHeader("Referer","https://club.autohome.com.cn/");
            ttfRequest.addHeader("User-Agent",getOneUserAgent());

            HttpConfig httpConfig = crawlerRequestRecord.getHttpConfig();
            httpConfig.setResponseTextGenerateHtml(false);
            HttpRequest httpRequest = parseFontRecord.getHttpRequest();
            Map<String,Object> params = new HashMap<>();
            params.put("httpRequest",ttfRequest);
            params.put("httpConfig",httpConfig);
            params.put("needParseList",hexList);
            params.put("domain",domain());
            httpRequest.setMethod(HttpConstant.Method.POST);
            httpRequest.setRequestBody(HttpRequestBody.json(JSON.toJSONString(params),"utf-8"));
            parsedLinks.add(parseFontRecord);
        }
        //author info
        String authorInfoUrl = String.format(authorInfoFormat,extras.get("authorId"));
        CrawlerRequestRecord itemInternalInfoRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .recordKey(authorInfoUrl)
                .httpUrl(authorInfoUrl)
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .needParsed(false)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();

        itemInternalInfoRecord.getHttpRequest().setExtras(copyExtras(extras));
        itemInternalInfoRecord.getHttpRequest().addHeader("Host","club.autohome.com.cn");
        itemInternalInfoRecord.getHttpRequest().addHeader("Referer", (String) extras.get("topicUrl"));
        itemInternalInfoRecord.getHttpRequest().addHeader("User-Agent",getOneUserAgent());
        itemInternalInfoRecord.getHttpRequest().setResponseCharset("UTF-8");
        itemInternalInfoRecord.getHttpConfig().setUserAgent(getOneUserAgent());
        parsedLinks.add(itemInternalInfoRecord);
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseArticleLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, HttpRequest lastRequest, String lastRequestUrl) {
        if (httpPage.getRawText().contains("尊敬的用户您好，您的访问出现异常，为确认本次访问为正常用户行为")){
            HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
            httpRequest.addHeader("User-Agent",getOneUserAgent());
            parsedLinks.add(crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            logger.warn("文章详情: {} 访问出现异常，需要重新请求",httpPage.getRequest().getUrl());
            return parsedLinks;
        }
        crawlerRequestRecord.setNeedWashPage(true);
        Map<String, Object> listExtras = lastRequest.getExtras();
        Matcher ttfMatcher = Pattern.compile("url\\('//k3\\.autoimg\\.cn/g\\d*/\\w*/\\w*/\\S*/\\S*\\.\\.ttf'\\)\\s*format").matcher(httpPage.getRawText());
        String ttfUrl = "";
        while (ttfMatcher.find()){
            ttfUrl = "https:" + ttfMatcher.group(0).split("'")[1];
        }
        //先找翻页
        int currentPage = Integer.parseInt(lastRequestUrl.substring(lastRequestUrl.lastIndexOf("-") + 1).split("\\.")[0]);
        String topicId = (String) listExtras.get("topicId");
        String url = lastRequestUrl.split(topicId)[0];
        if (1 == currentPage){
            List<String> strings = httpPage.getHtml().xpath("//div[@class=\"athm-page__editor\"]/text()").all();
            StringBuilder pages = new StringBuilder();
            for (String string : strings) {
                pages.append(string.trim());
            }
            String page = pages.toString();
            String pageNum = page.replace("/", "").replace("页", "");
            int totalPages = 0;
            try {
                totalPages = Integer.parseInt(pageNum);
            } catch (NumberFormatException e) {
                totalPages = 1;
            }
            if (totalPages > currentPage){
                String articlePageUrl = url + topicId + "-" + totalPages + ".html";
//                String articlePageUrl = String.format(listUrlFormat,topicId,totalPages);
                CrawlerRequestRecord turnPageRequest = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .recordKey(articlePageUrl)
                        .httpUrl(articlePageUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.turnPageItem)
                        .needWashed(false)
                        .copyBizTags()
                        .notFilterRecord()
                        .build();
                turnPageRequest.getHttpRequest().setExtras(copyExtras(listExtras));
                turnPageRequest.getHttpRequest().getExtras().put("downloadTimes",0);
                parsedLinks.add(turnPageRequest);
            }
            if (lastRequestUrl.matches(articleFirstRegex)){
                // views and replies
                String viewsUrl = String.format(viewsUrlFormat,listExtras.get("topicId"));
                CrawlerRequestRecord itemInternalViewsRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .recordKey(viewsUrl)
                        .httpUrl(viewsUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .needParsed(false)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                        .build();
                itemInternalViewsRecord.getHttpRequest().setExtras(copyExtras(listExtras));
                itemInternalViewsRecord.getHttpRequest().addHeader("Host","club.autohome.com.cn");
                itemInternalViewsRecord.getHttpRequest().addHeader("Referer",lastRequestUrl);
                itemInternalViewsRecord.getHttpRequest().addHeader("User-Agent",getOneUserAgent());
                itemInternalViewsRecord.getHttpRequest().setResponseCharset("UTF-8");
                itemInternalViewsRecord.getHttpConfig().setUserAgent(getOneUserAgent());
                parsedLinks.add(itemInternalViewsRecord);
                //likes
                String likesUrl = String.format(likesUrlFormat, listExtras.get("topicId"));
                CrawlerRequestRecord itemInternalLikesRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .recordKey(likesUrl)
                        .httpUrl(likesUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .needParsed(false)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                        .build();
                itemInternalLikesRecord.getHttpRequest().setExtras(copyExtras(listExtras));
                itemInternalLikesRecord.getHttpRequest().addHeader("Host","club.api.autohome.com.cn");
                itemInternalLikesRecord.getHttpRequest().addHeader("Referer",lastRequestUrl);
                itemInternalLikesRecord.getHttpRequest().addHeader("User-Agent",getOneUserAgent());
                itemInternalLikesRecord.getHttpRequest().setResponseCharset("UTF-8");
                itemInternalLikesRecord.getHttpConfig().setUserAgent(getOneUserAgent());
                parsedLinks.add(itemInternalLikesRecord);
                //author info
                String authorInfoUrl = String.format(authorInfoFormat,listExtras.get("authorId"));
                CrawlerRequestRecord itemInternalInfoRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .recordKey(authorInfoUrl)
                        .httpUrl(authorInfoUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .needParsed(false)
                        .needWashed(true)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                        .build();
                itemInternalInfoRecord.getHttpRequest().setExtras(copyExtras(listExtras));
                itemInternalInfoRecord.getHttpRequest().addHeader("Host","club.autohome.com.cn");
                itemInternalInfoRecord.getHttpRequest().addHeader("Referer",lastRequestUrl);
                itemInternalInfoRecord.getHttpRequest().addHeader("User-Agent",getOneUserAgent());
                itemInternalInfoRecord.getHttpRequest().setResponseCharset("UTF-8");
                itemInternalInfoRecord.getHttpConfig().setUserAgent(getOneUserAgent());
                parsedLinks.add(itemInternalInfoRecord);

                //follows
                String followsUrl = String.format(followsUrlFormat,listExtras.get("authorId"));
                CrawlerRequestRecord followsInfoRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .recordKey(followsUrl)
                        .httpUrl(followsUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .needParsed(false)
                        .needWashed(true)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                        .build();
                followsInfoRecord.getHttpRequest().setExtras(copyExtras(listExtras));
                followsInfoRecord.getHttpRequest().addHeader("Host","i.autohome.com.cn");
                followsInfoRecord.getHttpRequest().addHeader("Referer","https://club.autohome.com.cn/");
                followsInfoRecord.getHttpRequest().addHeader("User-Agent",getOneUserAgent());
                followsInfoRecord.getHttpRequest().setResponseCharset("UTF-8");
                followsInfoRecord.getHttpConfig().setUserAgent(getOneUserAgent());
                parsedLinks.add(followsInfoRecord);
            }
            //字符解密
            List<String> contents = httpPage.getHtml().xpath("//div[@class=\"post-container\"]//text() | //div[@class=\"post-container post-container--qa\"]//text()").all();
            Set<String> hexList = new HashSet<>();
            for (String text : contents) {
                // 判断出非中文的单个字符
                text = text.trim();
                int[] codes = StringUtils.toCodePoints(text);
                if (codes.length == 1){
                    char[] chars = Character.toChars(codes[0]);
                    if (!isChinese(chars[0])){
                        hexList.add(Integer.toHexString(codes[0]).toUpperCase());
                    }
                }
            }
            if (StringUtils.isNotBlank(ttfUrl) && hexList.size() > 0){
                listExtras.put("ttfUrl",ttfUrl);
                listExtras.put("articleContents",contents);
                listExtras.put("hexList",hexList);
                //内置下载获取ttfMap
                String parseFontUrl = "http://192.168.1.217:9599/crawler/font/api/v1/parseTTFont";
                CrawlerRequestRecord parseFontRecord = CrawlerRequestRecord.builder()
                        .startPageRequest(domain(), CrawlerEnum.CrawlerRequestType.internalDownload)
                        .httpUrl(parseFontUrl)
                        .recordKey(parseFontUrl)
                        .needParsed(false)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                        .httpConfig(HttpConfig.me(domain()))
                        .build();

                HttpRequest ttfRequest = new HttpRequest();
                ttfRequest.setUrl(ttfUrl);
                ttfRequest.addHeader("Origin","https://club.autohome.com.cn");
                ttfRequest.addHeader("Referer","https://club.autohome.com.cn/");
                ttfRequest.addHeader("User-Agent",getOneUserAgent());

                HttpConfig httpConfig = crawlerRequestRecord.getHttpConfig();
                httpConfig.setResponseTextGenerateHtml(false);
                HttpRequest httpRequest = parseFontRecord.getHttpRequest();
                Map<String,Object> params = new HashMap<>();
                params.put("httpRequest",ttfRequest);
                params.put("httpConfig",httpConfig);
                params.put("needParseList",hexList);
                params.put("domain",domain());
                httpRequest.setMethod(HttpConstant.Method.POST);
                httpRequest.setRequestBody(HttpRequestBody.json(JSON.toJSONString(params),"utf-8"));
                parsedLinks.add(parseFontRecord);
            }
        }
        if (currentPage > 2){
            //进行自减翻页
            int pageNum = currentPage - 1;
            String articlePageUrl = url + topicId + "-" + pageNum + ".html";
            CrawlerRequestRecord turnPageRequest = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .recordKey(articlePageUrl)
                    .httpUrl(articlePageUrl)
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.turnPageItem)
                    .needWashed(false)
                    .copyBizTags()
                    .build();
            turnPageRequest.getHttpRequest().setExtras(copyExtras(listExtras));
            turnPageRequest.getHttpRequest().getExtras().put("downloadTimes",0);
            parsedLinks.add(turnPageRequest);
        }


        // 解析每一页的楼层列表
        List<Selectable> commentNodes = httpPage.getHtml().xpath("//ul[@class=\"reply-wrap\"]/li").nodes();
        Collections.reverse(commentNodes);
        for (Selectable commentNode : commentNodes) {
            Map<String, Object> lastExtras = copyExtras(listExtras);
            lastExtras.put("ttfUrl",ttfUrl);
            String commentId= commentNode.xpath("./@data-reply-id").get();
            if (StringUtils.isBlank(commentId)){
                continue;
            }
            String commentUrl = String.format(commentsFormat,listExtras.get("topicId"),commentId);
            String authorId = commentNode.xpath("./@data-member-id").get();
            String floor = commentNode.xpath("./@data-floor").get();
            String replyTime = commentNode.xpath("./div/div[@class=\"reply\"]/div/span[@class=\"reply-static-text fn-fl\"]/strong/text() | ./div[@class=\"reply\"]/div/div/div/span[@class=\"reply-static-text fn-fl\"]/strong/text()").get();
            long releaseTime = crawlerRequestRecord.getReleaseTime();
            try {
                releaseTime = DateUtils.parseDate(replyTime,"yyyy-MM-dd HH:mm:ss").getTime();
            } catch (Exception e) {
                logger.warn("parse comment date error");
            }

            try {
                CrawlerRequestRecord itemCommentRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .recordKey(commentUrl)
                        .httpUrl(commentUrl)
                        .releaseTime(releaseTime)
                        .resultLabelTag(comment)
                        .resultLabelTag(interaction)
                        .copyBizTags()
                        .build();
                List<String> allContent = commentNode.xpath("./div[@class=\"fn-cont-right\"]/div[@class=\"reply\"]//div[@class=\"reply-detail\"]//text()").all();
                lastExtras.put("allContent",allContent);
                lastExtras.put("authorId",authorId);
                lastExtras.put("topicUrl",lastRequestUrl);
                lastExtras.put("commentId",commentId);
                lastExtras.put("author","");
                lastExtras.put("floor",floor);
                lastExtras.put("downloadTimes",0);
                itemCommentRecord.getHttpRequest().setExtras(lastExtras);
                itemCommentRecord.getHttpRequest().addHeader("Host","club.autohome.com.cn");
                itemCommentRecord.getHttpRequest().addHeader("Referer",lastRequestUrl);
                itemCommentRecord.getHttpRequest().addHeader("User-Agent",getOneUserAgent());
                itemCommentRecord.getHttpRequest().setResponseCharset("UTF-8");
                itemCommentRecord.getHttpConfig().setUserAgent(getOneUserAgent());
                parsedLinks.add(itemCommentRecord);
            } catch (Exception e) {
                logger.error(e.getMessage(),"parse comment error");
            }
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String nextHref = httpPage.getHtml().xpath("//a[@class=\"afpage\"]/@href").get();
        if (null != nextHref){
            String nextPageUrl = baseUrl + nextHref;
            CrawlerRequestRecord turnPageRequest = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .recordKey(nextPageUrl)
                    .httpUrl(nextPageUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .build();
            parsedLinks.add(turnPageRequest);
        }

        // 获取列表
        List<Selectable> selectables = httpPage.getHtml().xpath("//dl[@class=\"list_dl\" and @lang]").nodes();
        for (Selectable selectable : selectables) {
            String itemUrl = baseUrl + selectable.xpath("./dt/a/@href").get();
            String topicId = itemUrl.substring(itemUrl.lastIndexOf("/") + 1).split("-")[0];
            String title = selectable.xpath("./dt/a/text()").get();
            if (title.contains("\n")){
                title = title.split("\\n")[1];
            }
            title = title.trim();
            String author = selectable.xpath("./dd[1]/a/text()").get();
            String authorId = selectable.xpath("./dd[1]/a/@href").get();
            authorId = authorId.substring(authorId.lastIndexOf("/") + 1);
            String topicDate = selectable.xpath("./dd/span[@class=\"tdate\"]/text()").get();
            String lastReplyTime = selectable.xpath("./dd/span[@class=\"ttime\"]/text()").get();
            String video = selectable.xpath("./dd[@class=\"outvideo\"]/span/a/@href").get();
            String isVideo = video == null ? "否" : "是";

            try {
                long releaseTime = DateUtils.parseDate(lastReplyTime, "yyyy-MM-dd HH:mm").getTime();

                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .recordKey(itemUrl)
                        .httpUrl(itemUrl)
                        .releaseTime(releaseTime)
                        .resultLabelTag(article)
                        .resultLabelTag(interaction)
                        .copyBizTags()
                        .build();
                Map<String,Object> extras = new HashMap<>();
                extras.put("topicId",topicId);
                extras.put("title",title);
                extras.put("author",author);
                extras.put("authorId",authorId);
                extras.put("topicDate",topicDate);
                extras.put("isVideo",isVideo);
                extras.put("lastReplyTime",lastReplyTime);
                extras.put("itemUrl",itemUrl);
                itemRecord.getHttpRequest().setExtras(extras);

                itemRecord.getHttpRequest().addHeader("Host","club.autohome.com.cn");
                itemRecord.getHttpRequest().addHeader("Referer",crawlerRequestRecord.getHttpRequest().getUrl());
                itemRecord.getHttpRequest().addHeader("User-Agent",getOneUserAgent());
                itemRecord.getHttpRequest().setResponseCharset("UTF-8");
                itemRecord.getHttpRequest().setMethod(HttpConstant.Method.GET);
                itemRecord.getHttpConfig().setUserAgent(getOneUserAgent());
                parsedLinks.add(itemRecord);
            } catch (ParseException e) {
                logger.error(e.getMessage());
            }
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseHomeLinks(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> parsedLinks, String lastRequestUrl,HttpPage httpPage) {
        Html html = httpPage.getHtml();
        String brand = html.xpath("//div[@class=\"container\"]/div/a[2]/text()").get();
        String carSeries = html.xpath("//div[@class=\"athm-sub-nav__car__name\"]//h1//text()|//div[@class=\"subnav\"]/div/div/a//text()").get();
        carSeries = unescapeHtml2J(carSeries);
        String key = lastRequestUrl.split("cn/")[1].replace("/","");

        String forumUrl = String.format(listUrlFormat,key,1);
        CrawlerRequestRecord homeRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(forumUrl)
                .recordKey(forumUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .build();
        List<Map<String, String>> series = new ArrayList<>();
        Map<String, String> seriesMap = new HashMap<>();
        seriesMap.put("series_name",carSeries);
        seriesMap.put("series_url",lastRequestUrl);
        seriesMap.put("series_id",key);
        series.add(seriesMap);
        homeRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Series,series);
        homeRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Brand,brand);
        parsedLinks.add(homeRecord);
        return parsedLinks;
    }

    @Override
    public void afterInternalDownload(CrawlerRequestRecord crawlerRecord, List<CrawlerRequestRecord> internalDownloadRecords, List<CrawlerRequestRecord> links) {
        //获取回复数 放入extra
        Map<String, Object> extras = crawlerRecord.getHttpRequest().getExtras();
        for (CrawlerRequestRecord internalDownloadRecord : internalDownloadRecords) {
            HttpRequest internalDownloadRecordHttpRequest = internalDownloadRecord.getHttpRequest();
            String internalRequestUrl = internalDownloadRecordHttpRequest.getUrl();
            HttpPage downloadPage = internalDownloadRecord.getInternalDownloadPage();
            if (downloadPage.isDownloadSuccess()){
                if (internalRequestUrl.matches(parseFontRegex)){
                    JSONObject pageObj = JSONObject.parseObject(downloadPage.getRawText());
                    if (pageObj.getIntValue("status") == 0){
                        JSONObject fonts = pageObj.getJSONObject("content");
                        extras.put("fonts",fonts);
                    }else {
                        crawlerRecord.setNeedWashPage(false);
                        crawlerRecord.setNeedParsedPage(false);
                        CrawlerRequestRecord newRecord = CrawlerRequestRecord.builder()
                                .itemPageRequest(crawlerRecord)
                                .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                                .recordKey(crawlerRecord.getHttpRequest().getUrl())
                                .releaseTime(crawlerRecord.getReleaseTime())
                                .copyBizTags()
                                .needParsed(true)
                                .needWashed(true)
                                .notFilterRecord()
                                .build();
                        newRecord.getHttpRequest().setExtras(extras);
                        links.add(newRecord);
                    }
                }else {
                    String urlSplit = internalRequestUrl.split("cn/")[1];
                    //获取views 和 reply
                    if (urlSplit.startsWith("frontapi")){
                        try {
                            JSONObject jsonObject = JSONObject.parseObject(downloadPage.getRawText());
                            JSONArray results = jsonObject.getJSONArray("result");
                            JSONObject resultObject = (JSONObject) results.get(0);
                            String comments = resultObject.getString("replys");
                            String allComments = resultObject.getString("allreplys");
                            String views = resultObject.getString("views");
                            extras.put("comments",comments);
                            extras.put("allComments",allComments);
                            extras.put("views",views);
                        } catch (Exception e) {
                            logger.error(e.getMessage(),"get views failed");
                            extras.put("comments","0");
                            extras.put("allComments","0");
                            extras.put("views","0");
                            logger.debug(downloadPage.getRawText());
                        }
                    }
                    //获取likes
                    if (urlSplit.startsWith("web")){
                        try{
                            JSONObject jsonObject = JSONObject.parseObject(downloadPage.getRawText().split("\\[")[1].split("]")[0]);
                            String likes = jsonObject.getString("z");
                            extras.put("likes",likes);
                        } catch (Exception e) {
                            String likes = "0";
                            extras.put("likes",likes);
                            logger.debug(downloadPage.getRawText());
                        }
                    }
                    //获取作者信息
                    if (urlSplit.startsWith("frontnc")){
                        try {
                            Html infoHtml = downloadPage.getHtml();
                            String author = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-info\"]//a[@class=\"name\"]/text()").get();
                            String signTime = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-profile\"]/div/text()").get();
                            String authorAddr = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-profile\"]/a/text()").get();
                            String jingHua = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-about-count\"]/a[@class=\"count-item\"][2]/strong/text()").get();
                            String topicCount = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-about-count\"]/a[@class=\"count-item\"][1]/strong/text()").get();
                            String replyCount = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-about-count\"]/a[@class=\"count-item\"][3]/strong/text()").get();
                            String identification = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-profile-rz\"]//a[@class=\"profile-cars-item\"]/@title").get();
                            extras.put("author",author);
                            extras.put("signTime",signTime);
                            extras.put("authorAddr",authorAddr);
                            extras.put("jingHua",jingHua);
                            extras.put("topicCount",topicCount);
                            extras.put("replyCount",replyCount);
                            extras.put("identification",identification);
                        } catch (Exception e) {
                            logger.warn(e.getMessage(),"get user info failed");
                            extras.put("author","");
                            extras.put("signTime","");
                            extras.put("authorAddr","");
                            extras.put("jingHua","0");
                            extras.put("topicCount","0");
                            extras.put("replyCount","0");
                            extras.put("identification","");
                        }

                    }
                    if (internalRequestUrl.matches(followsUrlRegex)){
                        try {
                            Html infoHtml = downloadPage.getHtml();
                            String follows = infoHtml.xpath("//div[@class=\"user-lv\"]/a[3]/span/text()").get();
                            if (StringUtils.isBlank(follows)){
                                follows = "0";
                            }
                            extras.put("follows",follows);
                        } catch (Exception e) {
                            extras.put("follows","0");
                        }
                    }
                }
            }
            else {
                if (internalRequestUrl.matches(followsUrlRegex)){
                    extras.put("follows","0");
                }else {
                    crawlerRecord.setNeedWashPage(false);
                    crawlerRecord.setNeedParsedPage(false);
                    CrawlerRequestRecord newRecord = CrawlerRequestRecord.builder()
                            .itemPageRequest(crawlerRecord)
                            .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                            .recordKey(crawlerRecord.getHttpRequest().getUrl())
                            .releaseTime(crawlerRecord.getReleaseTime())
                            .copyBizTags()
                            .needParsed(true)
                            .needWashed(true)
                            .notFilterRecord()
                            .build();
                    newRecord.getHttpRequest().setExtras(extras);
                    links.add(newRecord);
                }
            }
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(article)){
            crawlerDataList.addAll(washArticle(crawlerRequestRecord,httpPage));

        }

        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
            crawlerDataList.addAll(washInteraction(crawlerRequestRecord,httpPage));
        }

        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(comment)){
            crawlerDataList.addAll(washComment(crawlerRequestRecord,httpPage));
        }

        return crawlerDataList;
    }

    public List<CrawlerData> washArticle(CrawlerRequestRecord crawlerRequestRecord,HttpPage httpPage) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String topicId = (String) extras.get("topicId");
        Html html = httpPage.getHtml();
        List<String> contents = html.xpath("//div[@class=\"post-container\"]//text() | //div[@class=\"post-container post-container--qa\"]//text()").all();
        StringBuffer content = new StringBuffer();
        JSONObject fonts = (JSONObject) extras.get("fonts");
        for (String text : contents) {
            text = text.trim();
            int[] codes = StringUtils.toCodePoints(text);
            //只有一个字符
            if (codes.length == 1){
                char[] chars = Character.toChars(codes[0]);
                //判断是否是正常中文字符
                if (!isChinese(chars[0])){
                    String code = Integer.toHexString(codes[0]).toUpperCase();
                    JSONArray fontsArray = fonts.getJSONArray(code);
                    //判断是否存在该字符的中文解密
                    if (null != fontsArray && fontsArray.size() > 0){
                        JSONObject fontObj = fontsArray.getJSONObject(0);
                        String str = fontObj.getString("textCode");
                        content.append(str);
                        continue;
                    }
                    content.append(text);
                    continue;
                }
                content.append(text);
                continue;
            }
            content.append(text);
        }
        List<String> images = html.xpath("//div[@class=\"post-container\"]/div[@class=\"tz-picture\"]/img/@data-src | //div[@class=\"post-container \"]/div[@class=\"tz-picture\"]/img/@data-src").all();
        StringBuffer imgs = new StringBuffer();
        for (String image : images) {
            imgs.append(image).append("\\0x1");
        }

        String articleTime = html.xpath("//div[@class=\"post-handle\"]/span[@class=\"post-handle-publish\"]/strong/text() | //div[@class=\"post-site\"]/div[@class=\"post-site-txt\"]/strong/text()").get();
        if (StringUtils.isBlank(articleTime)){
            if (StringUtils.isNotBlank(html.xpath("//span[@class=\"publish-time\"]/text()").get())){
                articleTime = html.xpath("//span[@class=\"publish-time\"]/text()").get();
            }
        }

        String elite = html.xpath("//span[@class=\"stamp-text\"]/text()").get();
        String isElite = "否";
        if (StringUtils.isNotBlank(elite)){
            isElite = "是";
        }

        String forumName = html.xpath("//div[@class=\"name-wrap\"]/div/a/text()").get();

        List<String> allTags = html.xpath("//div[@class=\"post-site\"]/div[@class=\"post-site-tags\"]/a/text()").all();

        try {
            String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
            //更新去重信息
            String replyCount = extras.get("replyCount") == null ? "0" : (String) extras.get("replyCount");
            if (replyCount.endsWith("万")){
                if (replyCount.contains(".")){
                    replyCount = String.valueOf((int)Double.parseDouble(replyCount.replace("万","")) * 10000);
                }else {
                    replyCount = replyCount.replace("万","0000");
                }
            }
            long releaseTime = DateUtils.parseDate(articleTime, "yyyy-MM-dd HH:mm:ss").getTime();
            crawlerRequestRecord.setReleaseTime(releaseTime);
            CrawlerData crawlerArticleData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), topicId))
                    .url(httpRequest.getUrl())
                    .releaseTime(releaseTime)
                    .addContentKV(Field_Author, (String) extras.get("author"))
                    .addContentKV(Field_Content,content.toString().trim())
                    .addContentKV(Field_Floor, "楼主")
                    .addContentKV(Field_Title, (String) extras.get("title"))
                    .addContentKV(Field_Author_Id, (String) extras.get("authorId"))
                    .addContentKV(Field_Author_Follows, (String) extras.get("follows"))
                    .addContentKV(Field_Author_Identification_Model, (String) extras.get("identification"))
                    .addContentKV(Field_Author_Topic_Count, (String) extras.get("topicCount"))
                    .addContentKV(Field_Author_Pick_Count, (String) extras.get("jingHua"))
                    .addContentKV(Field_Author_Reply_Count, replyCount)
                    .addContentKV(Field_Author_From, (String) extras.get("authorAddr"))
                    .addContentKV(Field_Author_Sign_In, (String) extras.get("signTime"))
                    .addContentKV(Field_Images, imgs.toString().trim())
                    .resultLabelTag(article)
                    .build();
            crawlerArticleData.tagsCreator().bizTags().addSiteBiz("forum");
            if (null != allTags && allTags.size() > 0){
                crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Topic_Type,allTags);
            }
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Is_Video, (String) extras.get("isVideo"));
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Is_Elite, isElite);
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Forum_Name, forumName);
            crawlerArticleDataList.add(crawlerArticleData);
        } catch (ParseException e) {
            logger.error(e.getMessage(), "parse date error");
        }
        return crawlerArticleDataList;
    }

    public List<CrawlerData> washInteraction(CrawlerRequestRecord crawlerRequestRecord,HttpPage httpPage) {
        List<CrawlerData> crawlerInteractionDataList = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = lastRequest.getExtras();
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        // 通过判断有无commentId区别评论互动和主贴互动
        Object commentId = extras.get("commentId");
        if (null != commentId){
            // 从返回的接口中获取评论数
            int commentCount = 0;
            try {
                JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
                commentCount = jsonObject.getIntValue("commentCount");
            } catch (Exception e) {
                logger.error(e.getMessage());
            }

            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(),site , interaction.enumVal(), commentId))
                    .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                    .url(lastRequest.getUrl())
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .addContentKV(Field_I_Comments,String.valueOf(commentCount))
                    .resultLabelTag(interaction)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .build();
            crawlerData.tagsCreator().bizTags().addSiteBiz("forum");
            crawlerData.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
            crawlerInteractionDataList.add(crawlerData);
            return crawlerInteractionDataList;
        }
        String comments = (String)extras.get("comments");
        String views = (String)extras.get("views");
        String topicId = (String)extras.get("topicId");
        String likes = (String)extras.get("likes");
        String itemUrl = (String)extras.get("itemUrl");

        CrawlerData crawlerData = CrawlerData.builder()
                .data(crawlerRequestRecord,httpPage)
                .dataId(StringUtils.joinWith("-",crawlerRequestRecord.getDomain(), site,interaction.enumVal(),topicId))
                .parentId(StringUtils.joinWith("-",crawlerRequestRecord.getDomain(), site,article.enumVal(),topicId))
                .url(itemUrl)
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .addContentKV(Field_I_Comments,comments)
                .addContentKV(Field_I_Views,views)
                .addContentKV(Field_I_Likes,likes)
                .resultLabelTag(interaction)
                .build();
        crawlerData.tagsCreator().bizTags().addSiteBiz("forum");
        crawlerInteractionDataList.add(crawlerData);
        return crawlerInteractionDataList;
    }

    public List<CrawlerData> washComment(CrawlerRequestRecord crawlerRequestRecord,HttpPage httpPage) {
        List<CrawlerData> crawlerCommentDataList = new ArrayList<>();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String commentId = (String) extras.get("commentId");
        String topicId = (String) extras.get("topicId");
        String itemUrl = (String) extras.get("itemUrl");
        List<String> allContent = castList(extras.get("allContent"), String.class);
        StringBuffer content = new StringBuffer();
        JSONObject fonts = (JSONObject) extras.get("fonts");
        for (String text : allContent) {
            // todo font decode
            text = text.trim();
            int[] codes = StringUtils.toCodePoints(text);
            //只有一个字符
            if (codes.length == 1){
                char[] chars = Character.toChars(codes[0]);
                //判断是否是正常中文字符
                if (!isChinese(chars[0])){
                    String code = Integer.toHexString(codes[0]).toUpperCase();
                    JSONArray fontsArray = fonts.getJSONArray(code);
                    //判断是否存在该字符的中文解密
                    if (null != fontsArray && fontsArray.size() > 0){
                        JSONObject fontObj = fontsArray.getJSONObject(0);
                        String str = fontObj.getString("textCode");
                        content.append(str);
                        continue;
                    }
                    content.append(text);
                    continue;
                }
                content.append(text);
                continue;
            }
            content.append(text).append(" ");
        }
        String replyCount = (String) extras.get("replyCount");
        if (replyCount.endsWith("万")){
            if (replyCount.contains(".")){
                replyCount = String.valueOf((int)Double.parseDouble(replyCount.replace("万","")) * 10000);
            }else {
                replyCount = replyCount.replace("万","0000");
            }
        }
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");

        CrawlerData crawlerCommentData = CrawlerData.builder()
                .data(crawlerRequestRecord, httpPage)
                .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), topicId))
                .url(itemUrl)
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .addContentKV(Field_Author, (String) extras.get("author"))
                .addContentKV(Field_Content,content.toString().trim())
                .addContentKV(Field_Title, unescapeHtml2J((String) extras.get("title")))
                .addContentKV(Field_Author_Id, (String) extras.get("authorId"))
                .addContentKV(Field_Floor, (String) extras.get("floor"))
                .addContentKV(Field_Author_Identification_Model, (String) extras.get("identification"))
                .addContentKV(Field_Author_Topic_Count, (String) extras.get("topicCount"))
                .addContentKV(Field_Author_Pick_Count, (String) extras.get("jingHua"))
                .addContentKV(Field_Author_Reply_Count, replyCount)
                .addContentKV(Field_Author_From, (String) extras.get("authorAddr"))
                .addContentKV(Field_Author_Sign_In, (String) extras.get("signTime"))
                .resultLabelTag(comment)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                .build();
        crawlerCommentData.tagsCreator().bizTags().addSiteBiz("forum");
        crawlerCommentData.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
        crawlerCommentDataList.add(crawlerCommentData);

        try {
            JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
            JSONObject resultObj = jsonObject.getJSONObject("result");
            int commentCount = resultObj.getIntValue("rowCount");
            if (commentCount > 0){
                JSONArray comments = resultObj.getJSONArray("list");
                for (Object comment : comments) {
                    JSONObject commentObj = (JSONObject)comment;
                    String subCommentId = commentObj.getString("commentId");
                    String subAuthorId = commentObj.getString("memberId");
                    String subCommentDate = commentObj.getString("date");
                    JSONObject contentObj = (JSONObject) commentObj.getJSONArray("content").get(0);
                    String subContent = contentObj.getString("content");

                    //封装数据
                   CrawlerData crawlerSubCommentData = CrawlerData.builder()
                            .data(crawlerRequestRecord, httpPage)
                            .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, CrawlerEnum.CrawlerDataType.comment.enumVal(), subCommentId))
                            .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), topicId))
                            .url(httpRequest.getUrl())
                            .releaseTime(DateUtils.parseDate(subCommentDate,"yyyy-MM-dd HH:mm:ss").getTime())
                            .addContentKV(Field_Content,subContent)
                            .addContentKV(Field_Author_Id, subAuthorId)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                            .build();
                    crawlerSubCommentData.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
                    crawlerSubCommentData.tagsCreator().bizTags().addSiteBiz("forum");
                    crawlerCommentDataList.add(crawlerSubCommentData);
                }
            }
        } catch (Exception e) {
            logger.warn("read api result error , state code is : {}, http page is : {}" , httpPage.getStatusCode(), httpPage.getRawText());
        }

        return crawlerCommentDataList;
    }
        @Override
    public void afterExecute(CrawlerRecordContext crawlerRecordContext) {

    }


    /***
     * 判断字符是否为中文
     * @param ch 需要判断的字符
     * @return 中文返回true，非中文返回false
     */
    private static boolean isChinese(char ch) {
        //获取此字符的UniCodeBlock
        Character.UnicodeBlock ub = Character.UnicodeBlock.of(ch);
        //  GENERAL_PUNCTUATION 判断中文的“号
        //  CJK_SYMBOLS_AND_PUNCTUATION 判断中文的。号
        //  HALFWIDTH_AND_FULLWIDTH_FORMS 判断中文的，号
        if (ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS
                || ub == Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS
                || ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A
                || ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B
                || ub == Character.UnicodeBlock.CJK_SYMBOLS_AND_PUNCTUATION         // 判断中文的。号
                || ub == Character.UnicodeBlock.HALFWIDTH_AND_FULLWIDTH_FORMS       // 判断中文的，号
                || ub == Character.UnicodeBlock.GENERAL_PUNCTUATION                 // 判断中文的“号
        ){
//            System.out.println(ch + " 是中文");
            return true;
        }
        return false;
    }

    /**
     * obj to list<String>
     * @param obj
     * @param clazz
     * @param <T>
     * @return
     */
    public static <T> List<T> castList(Object obj, Class<T> clazz){
        List<T> result = new ArrayList<T>();
        if(obj instanceof List<?>)
        {
            for (Object o : (List<?>) obj)
            {
                result.add(clazz.cast(o));
            }
            return result;
        }
        return null;
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/536.6");
        agentList.add("Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/536.6");
        agentList.add("Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.1");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA(){
        return agentList.get(RandomUtils.nextInt(0,agentList.size() - 1));
    }

    private void initUserAgents( CrawlerRequestRecord supportSourceRecord){

        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            Json rawText = new Json(httpPage.getRawText());
            if (supportSourceRecord.getHttpRequest().getUrl().contains("fake-useragent.herokuapp.com/browsers")){
                String browsers = rawText.jsonPath($_type + ".browsers").get();
                Map<String,Object> map = new Json(browsers).toObject(Map.class);
                for (Map.Entry<String, Object> entry : map.entrySet()) {
                    List<String> agents = (List<String>) entry.getValue();
                    userAgents.addAll(agents);
                }
            }
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private String getOneUserAgent(){

        synchronized (lock){
            if (userAgentIndex.get() >= userAgents.size()){
                userAgentIndex.set(0);
            }

            String userAgent = null;
            try {
                userAgent = userAgents.get(userAgentIndex.getAndIncrement());
            } catch (Exception e) {
//                logger.error("init useragent error, ua size " + userAgents.size());
                userAgent = getRandomUA();
            }
            return userAgent;
        }
    }

    public static Map<String, Object> copyExtras(Map<String,Object> inExtras){
        Map<String,Object> extras = new HashMap<>();
        for (Map.Entry<String, Object> entry : inExtras.entrySet()) {
            extras.put(entry.getKey(),entry.getValue());
        }
        return extras;
    }

    public static String unescapeHtml2J(String str){
        int times = 0;
        while (str.contains("&") && str.contains(";")){
            str = StringEscapeUtils.unescapeHtml(str);
            times ++;
            if (times > 5){
                break;
            }
        }
        return str;
    }

}
