package com.chance.cc.crawler.development.scripts.autohome.forum;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.tags.CategoryTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.ParseException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.interaction;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.*;

/**
 * @author lt
 * @version 1.0
 * @date 2020-11-16 09:51:57
 * @email okprog@sina.com
 */
public class AutoHomeForumFabuCrawlerScript extends CrawlerCommonScript {

    private Logger logger = LoggerFactory.getLogger(AutoHomeForumFabuCrawlerScript.class);

    private static String listUrlFormat = "https://club.autohome.com.cn/frontapi/topics/getByBbsId?" +
            "pageindex=#pageindex&pagesize=50&bbs=c&bbsid=#bbsid&fields=topicid%2Ctitle%2Cpost_memberid%2Cpost_membername%2C" +
            "postdate%2Cispoll%2Cispic%2Cisrefine%2Creplycount%2Cviewcount%2Cvideoid%2Cisvideo%2Cvideoinfo%2Cqainfo%2C" +
            "tags%2Ctopictype%2Cimgs%2Cjximgs%2Curl%2Cpiccount%2Cisjingxuan%2Cissolve%2Cliveid%2Clivecover%2Ctopicimgs&" +
            "orderby=topicid-";
    private static String listUrlsFormat = "https://club.autohome.com.cn/frontapi/data/page/club_get_topics_list?" +
            "page_num=%s&page_size=50&club_bbs_type=c&club_bbs_id=%s&club_order_type=2"; //page   bbsId
    private static final String viewsUrlFormat = "https://club.autohome.com.cn/frontapi/getclicksandreplys?topicids=%s";
    private static final String likesUrlFormat = "https://club.api.autohome.com.cn/web/zan/list?input=%s-";
    private static final String authorInfoFormat = "https://club.autohome.com.cn/frontnc/user/getdetailusertpl/%s-0";
    private static final String homeUrlFormat = "https://www.autohome.com.cn/%s/";
    private static final String followsUrlFormat = "https://i.autohome.com.cn/%s";
    private static final String baseUrl = "https://club.autohome.com.cn";

    private static final String indexRegex = "https?://www\\.autohome\\.com\\.cn/";
    private static final String homeRegex = "https?://www\\.autohome\\.com\\.cn/\\d*/";
    private static final String listRegex = "https://club\\.autohome\\.com\\.cn/frontapi/data/page/club_get_topics_list\\S*";
    private static final String articleRegex = "https?://club\\.autohome\\.com\\.cn/bbs/thread/\\S*/\\d*-\\d*\\.html";
    private static final String keysRegex = "https?://\\S*v1/meta/autohome/keys\\S*";
    private static final String followsUrlRegex = "https://i\\.autohome\\.com\\.cn/\\d*";
    private static final String articleFirstRegex = "https?://club\\.autohome\\.com\\.cn/bbs/thread/\\S*/\\d*-1\\.html\\S*";
    private static final String parseFontRegex = "https?://\\S*/crawler/font/api/v1/parseTTFont";

    private static final String scriptSite = "forum_release";


    @Override
    public String domain() {
        return "autohome";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(homeRegex);
        addUrlRegular(listRegex);
        addUrlRegular(articleRegex);
        addUrlRegular(keysRegex);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String crawlerSite = categoryTag.getKVTagStrVal("site");
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();
        if (supportSourceRecords == null || supportSourceRecords.size() <1){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        CrawlerRequestRecord keywordRecord = supportSourceRecords.get(0);
        String keywordUrl = keywordRecord.getHttpRequest().getUrl();
        if (keywordUrl.matches(keysRegex)){
            try {
                JSONObject jsonObject = JSONObject.parseObject(keywordRecord.getInternalDownloadPage().getRawText());
                if (jsonObject.getIntValue("status") == 0){
                    JSONArray contents = jsonObject.getJSONArray("content");
                    for (Object content : contents) {
                        String keyword = ((JSONObject) content).getString("keyword");
                        String homeUrl = String.format(homeUrlFormat,keyword);

                        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                                .turnPageRequest(requestRecord)
                                .httpUrl(homeUrl)
                                .recordKey(homeUrl)
                                .releaseTime(System.currentTimeMillis())
                                .needWashed(false)
                                .needParsed(true)
                                .notFilterRecord()
                                .copyBizTags()
                                .build();
                        crawlerRequestRecord.getHttpRequest().setMethod(HttpConstant.Method.GET);
                        allItemRecords.add(crawlerRequestRecord);
                    }
                }

            }catch (Exception e){
                logger.error(e.getMessage(),e);
            }
        }
        if (allItemRecords.isEmpty()){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        return allItemRecords;

    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<CrawlerRequestRecord>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        int statusCode = httpPage.getStatusCode();
        String lastRequestUrl = lastRequest.getUrl();
        if (StringUtils.isBlank(httpPage.getRawText()) || !httpPage.isDownloadSuccess() || statusCode != 200){
            Map<String, Object> extras = lastRequest.getExtras();
            int downloadTimes = 1;
            if (null == extras){
                extras = new HashMap<>();
                extras.put("downloadTimes",downloadTimes);
            }else {
                try {
                    downloadTimes = Integer.parseInt((String) extras.get("downloadTimes"));
                    extras.put("downloadTimes",downloadTimes + 1);
                } catch (Exception e) {
                    extras.put("downloadTimes",downloadTimes);
                }
            }
            if ((null != httpPage.getRawText() && httpPage.getRawText().contains("主楼已被删除")) || downloadTimes > 10){
                logger.error("页面不存在：" + statusCode);
                return parsedLinks;
            }

            parsedLinks.add(crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            return parsedLinks;
        }

        CrawlerBusinessTags bizTags = crawlerRequestRecord.tagsCreator().bizTags();
        CategoryTag categoryTag = bizTags.getCategoryTag();
        if (lastRequestUrl.matches(homeRegex)){
            return parseHomeLinks(crawlerRequestRecord, httpPage, parsedLinks, lastRequestUrl);
        }
        if (lastRequestUrl.matches(listRegex)){
            return parseListLinks(crawlerRequestRecord, httpPage, parsedLinks, categoryTag);
        }
        if (lastRequestUrl.matches(articleRegex)){
            return parseArticleLinks(crawlerRequestRecord, httpPage, parsedLinks, lastRequest, lastRequestUrl);
        }
        return null;
    }

    private List<CrawlerRequestRecord> parseArticleLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, HttpRequest lastRequest, String lastRequestUrl) {
        if (httpPage.getRawText().contains("尊敬的用户您好，您的访问出现异常，为确认本次访问为正常用户行为")){
            parsedLinks.add(crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            return parsedLinks;
        }
        crawlerRequestRecord.setNeedWashPage(true);
        Map<String, Object> extras = lastRequest.getExtras();
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(article) && crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
            //如果是第一页  需要内置下载获取互动量及用户信息，解密字符
            if (lastRequestUrl.matches(articleFirstRegex)){
                // views and replies
                String viewsUrl = String.format(viewsUrlFormat,extras.get("topicId"));
                CrawlerRequestRecord itemInternalViewsRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .recordKey(viewsUrl)
                        .httpUrl(viewsUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .needParsed(false)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                        .build();
                itemInternalViewsRecord.getHttpRequest().setExtras(extras);
                itemInternalViewsRecord.getHttpRequest().addHeader("Host","club.autohome.com.cn");
                itemInternalViewsRecord.getHttpRequest().addHeader("Referer",lastRequestUrl);
                itemInternalViewsRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
                itemInternalViewsRecord.getHttpRequest().setResponseCharset("UTF-8");
                parsedLinks.add(itemInternalViewsRecord);
                //likes
                String likesUrl = String.format(likesUrlFormat, extras.get("topicId"));
                CrawlerRequestRecord itemInternalLikesRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .recordKey(likesUrl)
                        .httpUrl(likesUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .needParsed(false)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                        .build();
                itemInternalLikesRecord.getHttpRequest().setExtras(extras);
                itemInternalLikesRecord.getHttpRequest().addHeader("Host","club.api.autohome.com.cn");
                itemInternalLikesRecord.getHttpRequest().addHeader("Referer",lastRequestUrl);
                itemInternalLikesRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
                itemInternalLikesRecord.getHttpRequest().setResponseCharset("UTF-8");
                parsedLinks.add(itemInternalLikesRecord);
                //author info
                String authorInfoUrl = String.format(authorInfoFormat,extras.get("authorId"));
                CrawlerRequestRecord itemInternalInfoRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .recordKey(authorInfoUrl)
                        .httpUrl(authorInfoUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .needParsed(false)
                        .needWashed(true)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                        .build();
                itemInternalInfoRecord.getHttpRequest().setExtras(extras);
                itemInternalInfoRecord.getHttpRequest().addHeader("Host","club.autohome.com.cn");
                itemInternalInfoRecord.getHttpRequest().addHeader("Referer",lastRequestUrl);
                itemInternalInfoRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
                itemInternalInfoRecord.getHttpRequest().setResponseCharset("UTF-8");
                parsedLinks.add(itemInternalInfoRecord);

                //follows
                String followsUrl = String.format(followsUrlFormat,extras.get("authorId"));
                CrawlerRequestRecord followsInfoRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .recordKey(followsUrl)
                        .httpUrl(followsUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .needParsed(false)
                        .needWashed(true)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                        .build();
                followsInfoRecord.getHttpRequest().setExtras(extras);
                followsInfoRecord.getHttpRequest().addHeader("Host","i.autohome.com.cn");
                followsInfoRecord.getHttpRequest().addHeader("Referer","https://club.autohome.com.cn/");
                followsInfoRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
                followsInfoRecord.getHttpRequest().setResponseCharset("UTF-8");
                parsedLinks.add(followsInfoRecord);
            }
        }
        //字符解密
        List<String> contents = httpPage.getHtml().xpath("//div[@class=\"post-container\"]//text() | //div[@class=\"post-container post-container--qa\"]//text()").all();
        Matcher ttfMatcher = Pattern.compile("url\\('//k3\\.autoimg\\.cn/g\\d*/\\w*/\\w*/\\S*/\\S*\\.\\.ttf'\\)\\s*format").matcher(httpPage.getRawText());
        String ttfUrl = "";
        while (ttfMatcher.find()){
            ttfUrl = "https:" + ttfMatcher.group(0).split("'")[1];
        }
        Set<String> hexList = new HashSet<>();
        for (String text : contents) {
            // 判断出非中文的单个字符
            text = text.trim();
            int[] codes = StringUtils.toCodePoints(text);
            if (codes.length == 1){
                char[] chars = Character.toChars(codes[0]);
                if (!isChinese(chars[0])){
                    hexList.add(Integer.toHexString(codes[0]).toUpperCase());
                }
            }
        }
        if (StringUtils.isNotBlank(ttfUrl) && hexList.size() > 0){
            extras.put("ttfUrl",ttfUrl);
            extras.put("articleContents",contents);
            extras.put("hexList",hexList);
            //内置下载获取ttfMap
            String parseFontUrl = "http://192.168.1.217:9599/crawler/font/api/v1/parseTTFont";
            CrawlerRequestRecord parseFontRecord = CrawlerRequestRecord.builder()
                    .startPageRequest(domain(), CrawlerEnum.CrawlerRequestType.internalDownload)
                    .httpUrl(parseFontUrl)
                    .recordKey(parseFontUrl)
                    .needParsed(false)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .httpConfig(HttpConfig.me(domain()))
                    .build();

            HttpRequest ttfRequest = new HttpRequest();
            ttfRequest.setUrl(ttfUrl);
            ttfRequest.addHeader("Origin","https://club.autohome.com.cn");
            ttfRequest.addHeader("Referer","https://club.autohome.com.cn/");
            ttfRequest.addHeader("User-Agent",getRandomUA());

            HttpConfig httpConfig = crawlerRequestRecord.getHttpConfig();
            httpConfig.setResponseTextGenerateHtml(false);
            HttpRequest httpRequest = parseFontRecord.getHttpRequest();
            Map<String,Object> params = new HashMap<>();
            params.put("httpRequest",ttfRequest);
            params.put("httpConfig",httpConfig);
            params.put("needParseList",hexList);
            params.put("domain",domain());
            httpRequest.setMethod(HttpConstant.Method.POST);
            httpRequest.setRequestBody(HttpRequestBody.json(JSON.toJSONString(params),"utf-8"));
            parsedLinks.add(parseFontRecord);
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, CategoryTag categoryTag) {
        JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
        if (null != jsonObject && jsonObject.getIntValue("returncode") == 0){
            HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
            Map<String, Object> urlParams = getUrlParams(httpRequest.getUrl());
            int currentPageIndex = Integer.parseInt((String)urlParams.get("page_num"));
            Map<String, Object> homeExtras = httpRequest.getExtras();
            String forumKey = (String) homeExtras.get("forumKey");
            String nextPageUrl = String.format(listUrlsFormat,(currentPageIndex + 1),forumKey);

            CrawlerRequestRecord turnPageRequest = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .recordKey(nextPageUrl)
                    .httpUrl(nextPageUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .build();
            turnPageRequest.getHttpRequest().setExtras(homeExtras);
            parsedLinks.add(turnPageRequest);


            JSONArray jsonArray = jsonObject.getJSONObject("result").getJSONArray("items");
            for (Object itemObject : jsonArray) {
                JSONObject itemJsonObject = (JSONObject) itemObject;
                String topicId = itemJsonObject.getString("biz_id");
                String itemUrl = itemJsonObject.getString("pc_url");
                String postdate = itemJsonObject.getString("publish_time");

                String title = itemJsonObject.getString("title");
                String authorId = itemJsonObject.getString("author_id");
                String author = itemJsonObject.getString("author_name");
                String comments = itemJsonObject.getString("reply_count");
                String views = itemJsonObject.getString("pv");
                String isVideo = itemJsonObject.getString("club_is_video");
                String isJingXuan = itemJsonObject.getString("topictype");
                String imgs = itemJsonObject.getString("imgList");

                try {
                    long releaseTime = DateUtils.parseDate(postdate, "yyyy/MM/dd HH:mm:ss").getTime();

                    CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                            .itemPageRequest(crawlerRequestRecord)
                            .recordKey(itemUrl)
                            .httpUrl(itemUrl)
                            .releaseTime(releaseTime)
                            .resultLabelTag(article)
                            .resultLabelTag(interaction)
                            .copyBizTags()
                            .build();
                    Map<String,Object> extras = new HashMap<>();
                    extras.put("topicId",topicId);
                    extras.put("title",title);
                    extras.put("author",author);
                    extras.put("authorId",authorId);
                    extras.put("comments",comments);
                    extras.put("views",views);
                    extras.put("isVideo",isVideo);
                    extras.put("isJingXuan",isJingXuan);
                    extras.put("imgs",imgs);
                    itemRecord.getHttpRequest().setExtras(extras);
                    itemRecord.getHttpRequest().addHeader("Host","club.autohome.com.cn");
                    itemRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
                    itemRecord.getHttpRequest().setResponseCharset("UTF-8");

                    parsedLinks.add(itemRecord);
                } catch (ParseException e) {
                    logger.error(e.getMessage());
                }
            }

        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseHomeLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, String lastRequestUrl) {
        Html html = httpPage.getHtml();
        String brand = html.xpath("//div[@class=\"container\"]/div/a[2]/text()|//div[@class=\"path\"]/a[3]/text()").get();
        String carSeries = html.xpath("//div[@class=\"athm-sub-nav__car__name\"]//h1//text()|//div[@class=\"subnav-title-name\"]/a/text()").get();
        String forumKey = lastRequestUrl.split("cn/")[1].replace("/","");
        String forumUrl = String.format(listUrlsFormat,1,forumKey);
        CrawlerRequestRecord homeRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(forumUrl)
                .recordKey(forumUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .build();
        List<Map<String, String>> series = new ArrayList<>();
        Map<String, String> seriesMap = new HashMap<>();
        seriesMap.put("series_name",carSeries);
        seriesMap.put("series_url",lastRequestUrl);
        seriesMap.put("series_id",forumKey);
        series.add(seriesMap);
        homeRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Series,series);
        homeRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Brand,brand);
        Map<String,Object> extras = new HashMap<>();
        extras.put("forumKey",forumKey);
        HttpRequest httpRequest = homeRecord.getHttpRequest();
        httpRequest.setExtras(extras);

        httpRequest.addHeader("Host","club.autohome.com.cn");
        httpRequest.addHeader("User-Agent",getRandomUA());
        parsedLinks.add(homeRecord);
        return parsedLinks;
    }

    @Override
    public void afterInternalDownload(CrawlerRequestRecord crawlerRecord, List<CrawlerRequestRecord> internalDownloadRecords, List<CrawlerRequestRecord> links) {
        //获取回复数 放入extra
        Map<String, Object> extras = crawlerRecord.getHttpRequest().getExtras();
        for (CrawlerRequestRecord internalDownloadRecord : internalDownloadRecords) {
            HttpRequest internalDownloadRecordHttpRequest = internalDownloadRecord.getHttpRequest();
            String internalRequestUrl = internalDownloadRecordHttpRequest.getUrl();
            HttpPage downloadPage = internalDownloadRecord.getInternalDownloadPage();
            if (downloadPage.isDownloadSuccess()){
                if (internalRequestUrl.matches(parseFontRegex)){
                    JSONObject pageObj = JSONObject.parseObject(downloadPage.getRawText());
                    if (pageObj.getIntValue("status") == 0){
                        JSONObject fonts = pageObj.getJSONObject("content");
                        extras.put("fonts",fonts);
                    }else {
                        crawlerRecord.setNeedWashPage(false);
                        crawlerRecord.setNeedParsedPage(false);
                        CrawlerRequestRecord newRecord = CrawlerRequestRecord.builder()
                                .itemPageRequest(crawlerRecord)
                                .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                                .recordKey(crawlerRecord.getHttpRequest().getUrl())
                                .releaseTime(crawlerRecord.getReleaseTime())
                                .copyBizTags()
                                .needParsed(true)
                                .needWashed(true)
                                .notFilterRecord()
                                .build();
                        newRecord.getHttpRequest().setExtras(extras);
                        links.add(newRecord);
                    }
                }else {
                    String urlSplit = internalRequestUrl.split("cn/")[1];
                    //获取views 和 reply
                    if (urlSplit.startsWith("frontapi")){
                        try {
                            JSONObject jsonObject = JSONObject.parseObject(downloadPage.getRawText());
                            JSONArray results = jsonObject.getJSONArray("result");
                            JSONObject resultObject = (JSONObject) results.get(0);
                            String comments = resultObject.getString("replys");
                            String allComments = resultObject.getString("allreplys");
                            String views = resultObject.getString("views");
                            extras.put("comments",comments);
                            extras.put("allComments",allComments);
                            extras.put("views",views);
                        } catch (Exception e) {
                            logger.error(e.getMessage(),"get views failed");
                            extras.put("comments","0");
                            extras.put("allComments","0");
                            extras.put("views","0");
                        }
                    }
                    //获取likes
                    if (urlSplit.startsWith("web")){
                        try{
                            JSONObject jsonObject = JSONObject.parseObject(downloadPage.getRawText().split("\\[")[1].split("]")[0]);
                            String likes = jsonObject.getString("z");
                            extras.put("likes",likes);
                        } catch (Exception e) {
                            String likes = "0";
                            extras.put("likes",likes);
                        }
                    }
                    //获取作者信息
                    if (urlSplit.startsWith("frontnc")){
                        try {
                            Html infoHtml = downloadPage.getHtml();
                            String author = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-info\"]//a[@class=\"name\"]/text()").get();
                            String signTime = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-profile\"]/div/text()").get();
                            String authorAddr = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-profile\"]/a/text()").get();
                            String jingHua = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-about-count\"]/a[@class=\"count-item\"][2]/strong/text()").get();
                            String topicCount = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-about-count\"]/a[@class=\"count-item\"][1]/strong/text()").get();
                            String replyCount = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-about-count\"]/a[@class=\"count-item\"][3]/strong/text()").get();
                            String identification = infoHtml.xpath("//div[@class=\"user fold\"]/div[@class=\"user-profile-rz\"]//a[@class=\"profile-cars-item\"]/@title").get();
                            extras.put("author",author);
                            extras.put("signTime",signTime);
                            extras.put("authorAddr",authorAddr);
                            extras.put("jingHua",jingHua);
                            extras.put("topicCount",topicCount);
                            extras.put("replyCount",replyCount);
                            extras.put("identification",identification);
                        } catch (Exception e) {
                            logger.warn(e.getMessage(),"get user info failed");
                            extras.put("author","");
                            extras.put("signTime","");
                            extras.put("authorAddr","");
                            extras.put("jingHua","0");
                            extras.put("topicCount","0");
                            extras.put("replyCount","0");
                            extras.put("identification","");
                        }
                    }
                    if (internalRequestUrl.matches(followsUrlRegex)){
                        try {
                            Html infoHtml = downloadPage.getHtml();
                            String follows = infoHtml.xpath("//div[@class=\"user-lv\"]/a[3]/span/text()").get();
                            if (StringUtils.isBlank(follows)){
                                follows = "0";
                            }
                            extras.put("follows",follows);
                        } catch (Exception e) {
                            extras.put("follows","0");
                        }
                    }
                }
            }
            else {
                if (internalRequestUrl.matches(followsUrlRegex)){
                    extras.put("follows","0");
                }else {
                    crawlerRecord.setNeedWashPage(false);
                    crawlerRecord.setNeedParsedPage(false);
                    CrawlerRequestRecord newRecord = CrawlerRequestRecord.builder()
                            .itemPageRequest(crawlerRecord)
                            .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                            .recordKey(crawlerRecord.getHttpRequest().getUrl())
                            .releaseTime(crawlerRecord.getReleaseTime())
                            .copyBizTags()
                            .needParsed(true)
                            .needWashed(true)
                            .notFilterRecord()
                            .build();
                    newRecord.getHttpRequest().setExtras(extras);
                    links.add(newRecord);
                }
            }
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        if (StringUtils.isBlank(httpPage.getRawText())){
            return null;
        }
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(article)){
            crawlerDataList.addAll(washArticle(crawlerRequestRecord,httpPage));

        }
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
            crawlerDataList.add(washInteraction(crawlerRequestRecord,httpPage));
        }
        return crawlerDataList;
    }

    public List<CrawlerData> washArticle(CrawlerRequestRecord crawlerRequestRecord,HttpPage httpPage) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String topicId = (String) extras.get("topicId");
        Html html = httpPage.getHtml();
        List<String> contents = html.xpath("//div[@class=\"post-container\"]//text() | //div[@class=\"post-container post-container--qa\"]//text()").all();
        StringBuffer content = new StringBuffer();
        JSONObject fonts = (JSONObject) extras.get("fonts");
        for (String text : contents) {
            text = text.trim();
            int[] codes = StringUtils.toCodePoints(text);
            //只有一个字符
            if (codes.length == 1){
                char[] chars = Character.toChars(codes[0]);
                //判断是否是正常中文字符
                if (!isChinese(chars[0])){
                    String code = Integer.toHexString(codes[0]).toUpperCase();
                    JSONArray fontsArray = fonts.getJSONArray(code);
                    //判断是否存在该字符的中文解密
                    if (null != fontsArray && fontsArray.size() > 0){
                        JSONObject fontObj = fontsArray.getJSONObject(0);
                        String str = fontObj.getString("textCode");
                        content.append(str);
                        continue;
                    }
                    content.append(text);
                    continue;
                }
                content.append(text);
                continue;
            }
            content.append(text);
        }
        List<String> images = html.xpath("//div[@class=\"post-container\"]/div[@class=\"tz-picture\"]/img/@data-src | //div[@class=\"post-container \"]/div[@class=\"tz-picture\"]/img/@data-src").all();
        StringBuffer imgs = new StringBuffer();
        for (String image : images) {
            imgs.append(image).append("\\0x1");
        }
        String elite = html.xpath("//span[@class=\"stamp-text\"]/text()").get();
        String isElite = "否";
        if (StringUtils.isNotBlank(elite)){
            isElite = "是";
        }
        String articleTime = html.xpath("//div[@class=\"post-handle\"]/span[@class=\"post-handle-publish\"]/strong/text() | //div[@class=\"post-site\"]/div[@class=\"post-site-txt\"]/strong/text()").get();
        if (StringUtils.isBlank(articleTime)){
            if (StringUtils.isNotBlank(html.xpath("//span[@class=\"publish-time\"]/text()").get())){
                articleTime = html.xpath("//span[@class=\"publish-time\"]/text()").get();
            }
        }
        List<String> allTags = html.xpath("//div[@class=\"post-site\"]/div[@class=\"post-site-tags\"]/a/text()").all();

        String forumName = html.xpath("//div[@class=\"name-wrap\"]/div/a/text()").get();
        try {
            String replyCount = (String) extras.get("replyCount");
            if (replyCount.endsWith("万")){
                if (replyCount.contains(".")){
                    replyCount = String.valueOf((int)Double.parseDouble(replyCount.replace("万","")) * 10000);
                }else {
                    replyCount = replyCount.replace("万","0000");
                }
            }
            String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
            CrawlerData crawlerArticleData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), topicId))
                    .url(httpRequest.getUrl())
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .addContentKV(Field_Author, (String) extras.get("author"))
                    .addContentKV(Field_Content,content.toString().trim())
                    .addContentKV(Field_Floor, "楼主")
                    .addContentKV(Field_Title, unescapeHtml2J((String) extras.get("title")))
                    .addContentKV(Field_Author_Id, (String) extras.get("authorId"))
                    .addContentKV(Field_Author_Follows, (String) extras.get("follows"))
                    .addContentKV(Field_Author_Identification_Model, (String) extras.get("identification"))
                    .addContentKV(Field_Author_Topic_Count, (String) extras.get("topicCount"))
                    .addContentKV(Field_Author_Pick_Count, (String) extras.get("jingHua"))
                    .addContentKV(Field_Author_Reply_Count, replyCount)
                    .addContentKV(Field_Author_From, (String) extras.get("authorAddr"))
                    .addContentKV(Field_Author_Sign_In, (String) extras.get("signTime"))
                    .addContentKV(Field_Images, imgs.toString().trim())
                    .resultLabelTag(article)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .build();
            crawlerArticleData.tagsCreator().bizTags().addSiteBiz("forum");
            if (null != allTags && allTags.size() > 0){
                crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Topic_Type,allTags);
            }
            String isVideo = "否";
            if (extras.get("isVideo").equals("1")){
                isVideo = "是";
            }
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Is_Video, isVideo);
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Is_Elite, isElite);
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Forum_Name, forumName);
            crawlerArticleDataList.add(crawlerArticleData);
        } catch (Exception e) {
            logger.error(e.getMessage(), "parse date error");
        }
        return crawlerArticleDataList;
    }

    public CrawlerData washInteraction(CrawlerRequestRecord crawlerRequestRecord,HttpPage httpPage){

        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        try {
            Map<String, Object> extras = lastRequest.getExtras();
            String comments = (String)extras.get("comments");
            String views = (String)extras.get("views");
            String topicId = (String)extras.get("topicId");
            String likes = (String)extras.get("likes");
            String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, interaction.enumVal(), topicId))
                    .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), topicId))
                    .url(lastRequest.getUrl())
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .addContentKV(Field_I_Comments, comments)
                    .addContentKV(Field_I_Views, views)
                    .addContentKV(Field_I_Likes, likes)
                    .resultLabelTag(interaction)
                    .build();
            crawlerData.tagsCreator().bizTags().addSiteBiz("forum");
            return crawlerData;
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
        return null;
    }

    @Override
    public void afterExecute(CrawlerRecordContext crawlerRecordContext) {

    }

    /***
     * 判断字符是否为中文
     * @param ch 需要判断的字符
     * @return 中文返回true，非中文返回false
     */
    private static boolean isChinese(char ch) {
        //获取此字符的UniCodeBlock
        Character.UnicodeBlock ub = Character.UnicodeBlock.of(ch);
        //  GENERAL_PUNCTUATION 判断中文的“号
        //  CJK_SYMBOLS_AND_PUNCTUATION 判断中文的。号
        //  HALFWIDTH_AND_FULLWIDTH_FORMS 判断中文的，号
        if (ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS
                || ub == Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS
                || ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A
                || ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B
                || ub == Character.UnicodeBlock.CJK_SYMBOLS_AND_PUNCTUATION         // 判断中文的。号
                || ub == Character.UnicodeBlock.HALFWIDTH_AND_FULLWIDTH_FORMS       // 判断中文的，号
                || ub == Character.UnicodeBlock.GENERAL_PUNCTUATION                 // 判断中文的“号
        ){
//            System.out.println(ch + " 是中文");
            return true;
        }
        return false;
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA(){
        return agentList.get(RandomUtils.nextInt(0,agentList.size() - 1));
    }

    /**
     * 将url参数转换成map
     * @param url http://*.*.com?aa=11&bb=22&cc=33
     * @return map
     */
    private Map<String, Object> getUrlParams(String url) {
        Map<String, Object> map = new HashMap<String, Object>(0);
        String param = null;
        if (url.contains("?")){
            param = url.split("\\?")[1];
        }
        if (StringUtils.isBlank(param)) {
            return null;
        }
        String[] params = param.split("&");
        for (String s : params) {
            String[] p = s.split("=");
            if (p.length == 2) {
                map.put(p[0], p[1]);
            }
        }
        return map;
    }


    public static String unescapeHtml2J(String str){
        int times = 0;
        while (str.contains("&") && str.contains(";")){
            str = StringEscapeUtils.unescapeHtml(str);
            times ++;
            if (times > 5){
                break;
            }
        }
        return str;
    }

}
