package com.chance.cc.crawler.development.scripts.bitauto.forum;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.CategoryTag;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.DigestUtils;

import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.text.ParseException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.*;

/**
 * @author lt
 * @version 1.0
 * @date 2020-12-23 13:19:08
 * @email okprog@sina.com
 */
public class BitAutoForumReplyCrawlerScript extends CrawlerCommonScript {

    private Logger logger = LoggerFactory.getLogger(BitAutoForumReplyCrawlerScript.class);
    private static String nowDomain = "bitauto";

    private static final String indexRegex = "https?://www\\.bitauto\\.com/";
    private static final String keysRegex = "https?://\\S*v1/meta/bitauto/keys\\S*";
    private static final String homeUrlRegex = "https?://car\\.\\w*\\.com/\\S*/";
    private static final String forumKeyUrlRegex = "https?://car\\.\\w*\\.com/web_forum/api/pc/forum/getSeoNameBySerialId\\S*";
    private static final String listUrlRegex = "https?://baa\\.\\w*\\.com/\\S*/index-0-0-\\d*.html";
    private static final String articleUrlRegex = "https?://baa\\.\\w*\\.com/\\S*/\\w*-\\d*-\\d*\\.html";
    private static final String parseFontRegex = "https?://\\S*/crawler/font/api/v1/parseTTFont";
    private static final String followsRegex = "https?://i\\.\\w*\\.com/u\\d*/";


    private static final String homeUlrFormat = "https://car.%s.com/%s/";
    private static final String listUrlFormat = "https://baa.%s.com/%s/index-0-0-%s.html";
    private static final String articleUrlFormat = "https://baa.%s.com/%s/%s-%s-%s.html";
    private static final String forumKeyUrlFormat = "https://car.#domain.com/web_forum/api/pc/forum/getSeoNameBySerialId?cid=508&param=%7B%22serialId%22%3A%22#forumId%22%7D";
    private static final String followsUrlFormat = "https://i.%s.com/u%s/";

    private static final String scriptSite = "forum_reply";

    @Override
    public String domain() {
        return "bitauto";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(keysRegex);
        addUrlRegular(homeUrlRegex);
        addUrlRegular(forumKeyUrlRegex);
        addUrlRegular(listUrlRegex);
        addUrlRegular(articleUrlRegex);
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();
        if (supportSourceRecords == null || supportSourceRecords.size() < 1) {
            return super.prepareRequest(requestRecord, supportSourceRecords);
        }
        CrawlerRequestRecord keywordRecord = supportSourceRecords.get(0);
        String keywordUrl = keywordRecord.getHttpRequest().getUrl();
        if (keywordUrl.matches(keysRegex)) {
            try {
                CrawlerRequestRecord indexRecord = supportSourceRecords.get(1);
                JSONObject jsonObject = JSONObject.parseObject(keywordRecord.getInternalDownloadPage().getRawText());
                HttpPage downloadPage = indexRecord.getInternalDownloadPage();
                String indexUrl = downloadPage.getHtml().xpath("//li[@data-ctitle=\"yiche\"]/a/@href").get();
                nowDomain = indexUrl.split("\\.")[1];

                if (jsonObject.getIntValue("status") == 0) {
                    JSONArray objects = jsonObject.getJSONArray("content");
                    for (Object object : objects) {
                        String keyword = ((JSONObject)object).getString("keyword");
                        String homeUlr = String.format(homeUlrFormat,nowDomain,keyword);
                        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                                .turnPageRequest(requestRecord)
                                .recordKey(homeUlr)
                                .httpUrl(homeUlr)
                                .releaseTime(System.currentTimeMillis())
                                .copyBizTags()
                                .copyScheduleTags()
                                .notFilterRecord()
                                .build();
                        crawlerRequestRecord.getHttpRequest().setMethod("GET");
                        allItemRecords.add(crawlerRequestRecord);
                    }
                }

            }catch (Exception e){
                logger.error(e.getMessage(),e);
            }
        }
        if (allItemRecords.isEmpty()){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        return allItemRecords;

    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String crawlerSite = categoryTag.getKVTagStrVal("site");
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<CrawlerRequestRecord>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        int statusCode = httpPage.getStatusCode();
        String lastRequestUrl = lastRequest.getUrl();
        if (StringUtils.isBlank(httpPage.getRawText()) || !httpPage.isDownloadSuccess() || (statusCode != 200 && statusCode != 404)){
            parsedLinks.add(crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            logger.error("下载状态：{}，内容为空：{}，请求状态码：{},执行回推",httpPage.isDownloadSuccess(),StringUtils.isBlank(httpPage.getRawText()),statusCode);
            return parsedLinks;
        }
        if (statusCode == 404){
            if (httpPage.getRawText().contains("很抱歉，您访问的页面不存在")){
                logger.error("页面不存在：" + statusCode);
                return parsedLinks;
            }
            parsedLinks.add(crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            logger.error("伪404状态码，执行回推");
            return parsedLinks;
        }
        if (lastRequestUrl.matches(homeUrlRegex)){
            return parseHomeLinks(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if (lastRequestUrl.matches(forumKeyUrlRegex)){
            return parseForumKey(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if (lastRequestUrl.matches(listUrlRegex)){
            return parseListLinks(crawlerRequestRecord, httpPage, parsedLinks, lastRequestUrl);
        }
        if (lastRequestUrl.matches(articleUrlRegex)){
            return parseCommentLinks(crawlerRequestRecord, httpPage, parsedLinks, lastRequest, lastRequestUrl);
        }
        return null;
    }

    private List<CrawlerRequestRecord> parseCommentLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, HttpRequest lastRequest, String lastRequestUrl) {
        crawlerRequestRecord.setNeedWashPage(true);
        Map<String, Object> extras = lastRequest.getExtras();
        String topicId = (String) extras.get("topicId");
        String forumKey = (String) extras.get("forumKey");
        String type = (String) extras.get("type");
        int currentPage = Integer.parseInt(lastRequestUrl.substring(lastRequestUrl.lastIndexOf("-") + 1).split("\\.")[0]);
        if (1 == currentPage){
            //第一页直接翻到最后页
            String pageStr = httpPage.getHtml().xpath("//div[@class=\"page-nation\"]/div/@data-pages").get();
            if (StringUtils.isNotBlank(pageStr)){
                int pageNum = Integer.parseInt(pageStr);
                String articlePageUrl = String.format(articleUrlFormat,nowDomain,forumKey,type,topicId,pageNum);
                CrawlerRequestRecord articlePageRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(articlePageUrl)
                        .recordKey(articlePageUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .needParsed(true)
                        .needWashed(true)
                        .resultLabelTag(interaction)
                        .resultLabelTag(comment)
                        .copyBizTags()
                        .copyScheduleTags()
                        .notFilterRecord()
                        .build();
                HttpRequest httpRequest = articlePageRecord.getHttpRequest();
                httpRequest.setExtras(copyExtras(extras));
                parsedLinks.add(articlePageRecord);
            }
            //第一页进行字体解密
            parseTTFLinks(crawlerRequestRecord, httpPage, parsedLinks, lastRequest, extras);
            //内链接获取粉丝数
            String authorId = httpPage.getHtml().xpath("//div[@data-isalltop]/@data-uid").get();
            String followsUrl = String.format(followsUrlFormat,nowDomain,authorId);
            CrawlerRequestRecord followsRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRequestRecord)
                    .httpUrl(followsUrl)
                    .recordKey(followsUrl)
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .needParsed(false)
                    .build();
            parsedLinks.add(followsRecord);
        }
        if (currentPage > 2){
            //大于二进行-1翻页
            int pageNum = currentPage - 1;
            String articlePageUrl = String.format(articleUrlFormat,nowDomain,forumKey,type,topicId,pageNum);
            CrawlerRequestRecord articlePageRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(articlePageUrl)
                    .recordKey(articlePageUrl)
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .needParsed(true)
                    .needWashed(true)
                    .resultLabelTag(comment)
                    .copyBizTags()
                    .copyScheduleTags()
                    .notFilterRecord()
                    .build();
            HttpRequest httpRequest = articlePageRecord.getHttpRequest();
            httpRequest.setExtras(copyExtras(extras));
            parsedLinks.add(articlePageRecord);
        }

        String commentsCount = httpPage.getHtml().xpath("//i[@id=\"huiNumber\"]/text()").get();
        if (Integer.parseInt(commentsCount) > 0){
            crawlerRequestRecord.tagsCreator().resultTags().addResultDataType(comment);
        }
        return parsedLinks;
    }

    private void parseTTFLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, HttpRequest lastRequest, Map<String, Object> extras) {
        //第一页进行字体解密
        Matcher ttfMt = Pattern.compile("/yc-pc/comment/font/yc-ft\\.ttf").matcher(httpPage.getRawText());
        String ttfUrl = "";
        while (ttfMt.find()){
            ttfUrl = "https://baa.yiche.com" + ttfMt.group(0);
        }
        if (StringUtils.isNotBlank(ttfUrl)){
            List<String> contents = httpPage.getHtml().xpath("//div[@data-isalltop]/div/div[@class=\"post-content\"]//div[@class=\"post-wrap\"]//p//text()").all();
            Set<String> hexList = new HashSet<>();
            for (String text : contents) {
                // 判断出非中文的单个字符
                text = text.trim();
                int[] codes = StringUtils.toCodePoints(text);
                if (codes.length == 1){
                    char[] chars = Character.toChars(codes[0]);
                    if (!isChinese(chars[0])){
                        hexList.add(Integer.toHexString(codes[0]).toUpperCase());
                    }
                }
            }
            if (StringUtils.isNotBlank(ttfUrl) && hexList.size() > 0){
                extras.put("ttfUrl",ttfUrl);
                extras.put("articleContents",contents);
                extras.put("hexList",hexList);
                //内置下载获取ttfMap
                String parseFontUrl = "http://192.168.1.217:9599/crawler/font/api/v1/parseTTFont";
                CrawlerRequestRecord parseFontRecord = CrawlerRequestRecord.builder()
                        .startPageRequest(domain(), CrawlerEnum.CrawlerRequestType.internalDownload)
                        .httpUrl(parseFontUrl)
                        .recordKey(parseFontUrl)
                        .needParsed(false)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                        .httpConfig(HttpConfig.me(domain()))
                        .build();

                HttpRequest ttfRequest = new HttpRequest();
                ttfRequest.setUrl(ttfUrl);
                ttfRequest.addHeader("Origin","https://baa.yiche.com");
                ttfRequest.addHeader("Referer",lastRequest.getUrl());
                ttfRequest.addHeader("User-Agent",getRandomUA());

                HttpConfig httpConfig = crawlerRequestRecord.getHttpConfig();
                httpConfig.setResponseTextGenerateHtml(false);
                HttpRequest httpRequest = parseFontRecord.getHttpRequest();
                Map<String,Object> params = new HashMap<>();
                params.put("httpRequest",ttfRequest);
                params.put("httpConfig",httpConfig);
                params.put("needParseList",hexList);
                params.put("domain",domain());
                httpRequest.setMethod(HttpConstant.Method.POST);
                httpRequest.setRequestBody(HttpRequestBody.json(JSON.toJSONString(params),"utf-8"));
                parsedLinks.add(parseFontRecord);
            }
        }
    }

    private List<CrawlerRequestRecord> parseListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, String lastRequestUrl) {
        String forumName = httpPage.getHtml().xpath("//h1[@class=\"top-des-title\"]/text()").get().trim();
        String forumKey = lastRequestUrl.split("com/")[1].split("/")[0];
        int currentPage = Integer.parseInt(lastRequestUrl.substring(lastRequestUrl.lastIndexOf("-") + 1).split("\\.")[0]);
        String nextPageUrl = String.format(listUrlFormat,nowDomain,forumKey,(currentPage + 1));
        CrawlerRequestRecord listRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(nextPageUrl)
                .recordKey(nextPageUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyScheduleTags()
                .build();
        parsedLinks.add(listRecord);


        List<Selectable> itemNodes = httpPage.getHtml()
                .xpath("//div[@class=\"power-list-top list-theme\"]/div/div[@class=\"col-panel\"]" +
                        "/div[@class=\"col-item col-4\"]/div[@class=\"tz-item tz-last-rep\"]" +
                        "/div[@class=\"tz-item-txt item-bot\"]").nodes();
        for (Selectable itemNode : itemNodes) {
            String itemUrl = itemNode.xpath("./a/@href").get();
            String articleKey = itemUrl.split("-")[1].split("\\.")[0];
            String type = itemUrl.split("-")[0].split(forumKey)[1].replace("/","");
            itemUrl = String.format(articleUrlFormat,nowDomain,forumKey,type,articleKey,1);

            String pubTime = itemNode.xpath("./a/text()").get().trim();
            try {
                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(itemUrl)
                        .recordKey(itemUrl)
                        .releaseTime(DateUtils.parseDate(pubTime,"yyyy-MM-dd HH:mm:ss").getTime())
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                        .needParsed(true)
                        .needWashed(true)
                        .copyBizTags()
                        .copyScheduleTags()
                        .build();
                Map<String,Object> extras = new HashMap<>();
                extras.put("topicId",articleKey);
                extras.put("forumKey",forumKey);
                extras.put("type",type);
                itemRecord.getHttpRequest().setExtras(extras);
                parsedLinks.add(itemRecord);
            } catch (ParseException e) {
                logger.error(e.getMessage(),"parse date error");
            }
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseForumKey(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        JSONObject pageObj = JSONObject.parseObject(httpPage.getRawText());
        int status = pageObj.getIntValue("status");
        String message = pageObj.getString("message");
        JSONObject dataObj = pageObj.getJSONObject("data");

        if (status == 1 && "success".equalsIgnoreCase(message) && null != dataObj){
            String forumKey = dataObj.getString("seoName");
            String forumUrl = String.format(listUrlFormat,nowDomain,forumKey,1);
            CrawlerRequestRecord forumIndexRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(forumUrl)
                    .recordKey(forumUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyScheduleTags()
                    .notFilterRecord()
                    .build();
            parsedLinks.add(forumIndexRecord);
        }else if (status != 1 || !"success".equalsIgnoreCase(message)){
            logger.warn("forum key download failed: " + pageObj);
            parsedLinks.add(crawlerRequestRecord);
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseHomeLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String lastRequestUrl = crawlerRequestRecord.getHttpRequest().getUrl();
        String seriesId = lastRequestUrl.split("com")[1].replaceAll("/","");
        Matcher forumIdMt = Pattern.compile("CarCommonCSID\\s*=\\s*\"\\d*\";").matcher(httpPage.getRawText());
        String forumId = httpPage.getHtml().xpath("//div[@class=\"big-img\"]/a[1]/@href").get().split("serial")[1].replaceAll("/","");
        while (forumIdMt.find()){
            forumId = forumIdMt.group(0).split("\"")[1];
        }
        String carSeries = httpPage.getHtml().xpath("//div[@class=\"cx-brand-info fl\"]/h1//em/text()").get();
        String brand = null;
        try {
            brand = httpPage.getHtml().xpath("//div[@class=\"yiche-breadcrumb\"]//div[@class=\"yiche-breadcrumb_item\"][3]/a/text()").get().trim();
        } catch (Exception e) {
            brand = "";
        }
        String forumKeyUrl = forumKeyUrlFormat.replace("#domain",nowDomain).replace("#forumId",forumId);
        CrawlerRequestRecord forumKeyRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(forumKeyUrl)
                .recordKey(forumKeyUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyScheduleTags()
                .notFilterRecord()
                .build();
        List<Map<String,String>> series = new ArrayList<>();
        Map<String,String> seriesInfo = new HashMap<>();
        seriesInfo.put("series_name",carSeries);
        seriesInfo.put("series_url",lastRequestUrl);
        seriesInfo.put("series_id",seriesId);
        series.add(seriesInfo);
        forumKeyRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Series,series);
        forumKeyRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Brand,brand);
        HttpRequest httpRequest = forumKeyRecord.getHttpRequest();
        String timestamp = String.valueOf(System.currentTimeMillis());
        String sign = generatorSignFromUrl(forumKeyUrl, timestamp);
        httpRequest.addHeader("Host","car."+nowDomain+".com");
        httpRequest.addHeader("Referer",lastRequestUrl);
        httpRequest.addHeader("User-Agent",getRandomUA());
        httpRequest.addHeader("x-platform","pc");
        httpRequest.addHeader("x-sign",sign);
        httpRequest.addHeader("x-timestamp",timestamp);
        httpRequest.addHeader("x-user-guid", UUID.randomUUID().toString());
        forumKeyRecord.getHttpConfig().setCircularRedirectsAllowed(true);
        parsedLinks.add(forumKeyRecord);

        return parsedLinks;
    }
    /**
     * @param url 请求Url
     * @return header的sign值
     */
    private String generatorSignFromUrl(String url, String timestamp){
        Map<String, Object> urlParams = getUrlParams(url);
        try {
            if (null != urlParams){
                String u = "19DDD1FBDFF065D3A4DA777D2D7A81EC";
                String cid = (String) urlParams.get("cid");
                String param = URLDecoder.decode((String) urlParams.get("param"),"utf-8");
                String s = "cid=" + cid + "&param=" + param + u + timestamp;
                return DigestUtils.md5DigestAsHex(s.getBytes());
            }
        } catch (UnsupportedEncodingException e) {
            logger.error(e.getMessage(),"url decode error");
        }
        return UUID.randomUUID().toString().replaceAll("-","");
    }

    @Override
    public void afterInternalDownload(CrawlerRequestRecord crawlerRecord, List<CrawlerRequestRecord> internalDownloadRecords, List<CrawlerRequestRecord> links) {
        //获取回复数 放入extra
        Map<String, Object> extras = crawlerRecord.getHttpRequest().getExtras();
        for (CrawlerRequestRecord internalDownloadRecord : internalDownloadRecords) {
            HttpRequest internalDownloadRecordHttpRequest = internalDownloadRecord.getHttpRequest();
            String internalRequestUrl = internalDownloadRecordHttpRequest.getUrl();
            HttpPage downloadPage = internalDownloadRecord.getInternalDownloadPage();
            if (downloadPage.isDownloadSuccess()){
                if (internalRequestUrl.matches(parseFontRegex)){
                    JSONObject pageObj = JSONObject.parseObject(downloadPage.getRawText());
                    if (pageObj.getIntValue("status") == 0){
                        JSONObject fonts = pageObj.getJSONObject("content");
                        extras.put("fonts",fonts);
                    }
                }
                if (internalRequestUrl.matches(followsRegex)){
                    String follows = downloadPage.getHtml().xpath("//span[@class=\"fansNumber\"]/@data-count").get();
                    extras.put("follows",follows);
                }
            }
            else {
                crawlerRecord.setNeedWashPage(false);
                crawlerRecord.setNeedParsedPage(false);
                CrawlerRequestRecord newRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRecord)
                        .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                        .recordKey(crawlerRecord.getHttpRequest().getUrl())
                        .releaseTime(crawlerRecord.getReleaseTime())
                        .copyBizTags()
                        .needParsed(true)
                        .needWashed(true)
                        .notFilterRecord()
                        .build();
                newRecord.getHttpRequest().setExtras(extras);
                links.add(newRecord);
            }
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        if (StringUtils.isBlank(httpPage.getRawText())){
            return null;
        }
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(article)){
            crawlerDataList.addAll(washArticle(crawlerRequestRecord,httpPage));

        }
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
            crawlerDataList.addAll(washInteraction(crawlerRequestRecord,httpPage));
        }
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(comment)){
            crawlerDataList.addAll(washComment(crawlerRequestRecord,httpPage));
        }
        return crawlerDataList;
    }

    public List<CrawlerData> washArticle(CrawlerRequestRecord crawlerRequestRecord,HttpPage httpPage) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = httpRequest.getUrl();
        Map<String, Object> extras = httpRequest.getExtras();
        String topicId = (String) extras.get("topicId");

        Html html = httpPage.getHtml();
        List<String> contents = html.xpath("//div[@data-isalltop]/div/div[@class=\"post-content\"]//div[@class=\"post-wrap\"]//p//text()").all();
        StringBuffer content = new StringBuffer();
        JSONObject fonts = (JSONObject) extras.get("fonts");
        for (String text : contents) {
            text = text.trim();
            int[] codes = StringUtils.toCodePoints(text);
            //只有一个字符
            if (codes.length == 1 && null != fonts){
                char[] chars = Character.toChars(codes[0]);
                //判断是否是正常中文字符
                if (!isChinese(chars[0])){
                    String code = Integer.toHexString(codes[0]).toUpperCase();
                    JSONArray fontsArray = fonts.getJSONArray(code);
                    //判断是否存在该字符的中文解密
                    if (null != fontsArray && fontsArray.size() > 0){
                        JSONObject fontObj = fontsArray.getJSONObject(0);
                        String str = fontObj.getString("textCode");
                        content.append(str);
                        continue;
                    }
                    content.append(text);
                    continue;
                }
                content.append(text);
                continue;
            }
            content.append(text);
        }
        List<String> images = html.xpath("//div[@data-isalltop]/div/div[@class=\"post-content\"]//div[@class=\"post-wrap\"]//img/@data-original").all();
        StringBuffer imgs = new StringBuffer();
        for (String image : images) {
            imgs.append(image).append("\\0x1");
        }

        String articleTime = html.xpath("//span[@class=\"post-time\"]/text()").get();
        if (StringUtils.isNotBlank(articleTime)){
            articleTime = articleTime.substring(3);
        }
        try {
            String title = html.xpath("//div[@data-isalltop]/div/div[@class=\"post-content\"]//h5//span/text()").get();
            String author = html.xpath("//div[@data-isalltop]/@data-name").get().trim();
            String authorId = html.xpath("//div[@data-isalltop]/@data-uid").get();
            List<Selectable> videoNodes = html.xpath("//div[@data-isalltop]/div/div[@class=\"post-content\"]//div[@class=\"post-wrap\"]/div[@class=\"video-wrapper\"]").nodes();
            String isVideo = "否";
            if (null != videoNodes && videoNodes.size() > 0){
                isVideo = "是";
            }
            List<Selectable> eliteNode = html.xpath("//div[@class=\"detail-icon jin\"]").nodes();
            String isElite = "否";
            if (null != eliteNode && eliteNode.size() > 0){
                isElite = "是";
            }

            String jingHua = html.xpath("//div[@data-isalltop]/div[2]//ul/li[1]/span/a/text()").get().trim();
            String topicCount = html.xpath("//div[@data-isalltop]/div[2]//ul/li[2]/span[2]/a/text()").get().trim();
            String replyCount = html.xpath("//div[@data-isalltop]/div[2]//ul/li[2]/span[4]/a/text()").get().trim();
            if (replyCount.endsWith("万")){
                if (replyCount.contains(".")){
                    replyCount = String.valueOf((int)Double.parseDouble(replyCount.replace("万","")) * 10000);
                }else {
                    replyCount = replyCount.replace("万","0000");
                }
            }
            List<Selectable> infoList = html.xpath("//div[@data-isalltop]//ul[@class=\"grade-list\"]/li").nodes();
            String authorAddr = "";
            String signTime = "";
            String identification = "";
            switch (infoList.size()){
                case 4:
                    signTime = html.xpath("//div[@data-isalltop]/div[2]//ul/li[3]/span[2]/text()").get().trim();
                    break;
                case 5:
                    authorAddr = html.xpath("//div[@data-isalltop]/div[2]//ul/li[3]/span[2]/text()").get().trim();
                    signTime = html.xpath("//div[@data-isalltop]/div[2]//ul/li[4]/span[2]/text()").get().trim();
                    break;
                case 6:
                    authorAddr = html.xpath("//div[@data-isalltop]/div[2]//ul/li[3]/span[2]/text()").get().trim();
                    signTime = html.xpath("//div[@data-isalltop]/div[2]//ul/li[5]/span[2]/text()").get().trim();
                    identification = html.xpath("//div[@data-isalltop]/div[2]//ul/li[4]/span[2]/text()").get().trim();
                    break;
                default:
                    logger.warn("a uncased link");
            }
            String postClient = html.xpath("//div[@class=\"info-tips\"]/p[1]/text()").get();
            String floor = html.xpath("//div[@data-isalltop]//h5[@class=\"title-box\"]/em/text()").get();

            String forumName = html.xpath("//span[@class=\"current-bbs\"]/a/text()").get();
            crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Forum_Name,forumName);

            String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
            CrawlerData crawlerArticleData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), topicId))
                    .url(lastRequestUrl)
                    .releaseTime(DateUtils.parseDate(articleTime,"yyyy-MM-dd HH:mm:ss").getTime())
                    .addContentKV(Field_Author, author)
                    .addContentKV(Field_Author_Id, authorId)
                    .addContentKV(Field_Content,content.toString().trim())
                    .addContentKV(Field_Title, title)
                    .addContentKV(Field_Author_Identification_Model, identification)
                    .addContentKV(Field_Author_Topic_Count, topicCount)
                    .addContentKV(Field_Author_Pick_Count, jingHua)
                    .addContentKV(Field_Author_Reply_Count, replyCount)
                    .addContentKV(Field_Author_Follows, (String) extras.get("follows"))
                    .addContentKV(Field_Author_From, authorAddr)
                    .addContentKV(Field_Author_Sign_In, signTime)
                    .addContentKV(Field_Floor, floor)
                    .addContentKV(Field_Images, imgs.toString().trim())
                    .resultLabelTag(article)
                    .build();
            crawlerArticleData.tagsCreator().bizTags().addSiteBiz("forum");
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_PostClient,postClient);
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Is_Video,isVideo);
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Is_Elite,isElite);
            crawlerArticleDataList.add(crawlerArticleData);
        } catch (ParseException e) {
            logger.error(e.getMessage(), "parse date error");
        }
        return crawlerArticleDataList;
    }

    public List<CrawlerData> washInteraction(CrawlerRequestRecord crawlerRequestRecord,HttpPage httpPage){
        List<CrawlerData> crawlerInteractionDataList = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        try {
            Map<String, Object> extras = lastRequest.getExtras();
            String topicId = (String)extras.get("topicId");
            Html html = httpPage.getHtml();
            String comments = html.xpath("//i[@id=\"huiNumber\"]/text()").get();
            String views = html.xpath("//span[@class=\"view-num\"]/text()").get().split(" ")[1];
            String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
            CrawlerData articleInteraction = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, interaction.enumVal(), topicId))
                    .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), topicId))
                    .url(lastRequest.getUrl())
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .addContentKV(Field_I_Comments, comments)
                    .addContentKV(Field_I_Views, views)
                    .resultLabelTag(interaction)
                    .build();
            articleInteraction.tagsCreator().bizTags().addSiteBiz("forum");
            crawlerInteractionDataList.add(articleInteraction);
            return crawlerInteractionDataList;
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
        return null;
    }

    private List<CrawlerData> washComment(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        List<CrawlerData> crawlerCommentDataList = new ArrayList<>();
        List<Selectable> commentNodes = httpPage.getHtml().xpath("//div[@class=\"club-detail postcontbox\"]/div[@class=\"postcont-list clearfix\"]").nodes();
        Collections.reverse(commentNodes);
        String topicId = (String) extras.get("topicId");
        for (Selectable commentNode : commentNodes) {

            List<String> contents = commentNode.xpath("./div[@class=\"postright fr\"]/div[@class=\"post-content\"]/div[@class=\"post-width\"]//p//text()").all();
            if (null == contents || contents.size() == 0){
                continue;
            }
            StringBuffer content = new StringBuffer();
            for (String text : contents) {
                content.append(text);
            }
            String author = commentNode.xpath("./@data-name").get();
            String authorId = commentNode.xpath("./@data-uid").get();
            String contentId = commentNode.xpath("./@data-id").get();
            String floor = commentNode.xpath("./@data-floor").get();
            String pubTime = commentNode.xpath("./div[@class=\"postright fr\"]/div[@class=\"post-content\"]/div[@class=\"post-text\"]//span[@class=\"post-time\"]//text()").get();
            pubTime = pubTime.split("于")[1];
            String replyFloor = "";
            Selectable yinNode = commentNode.xpath("./div[@class=\"postright fr\"]/div[@class=\"post-content\"]/div[@class=\"post-width\"]/div[@class=\"yinyongbox\"]");
            if (null != yinNode && yinNode.nodes().size() > 0){
                replyFloor = yinNode.xpath("./div/a[@class=\"floor-link\"]/@data-floor").get();
            }
            String jingHua = commentNode.xpath("./div[2]//ul/li[1]/span/a/text()").get().trim();
            String topicCount = commentNode.xpath("./div[2]//ul/li[2]/span[2]/a/text()").get().trim();
            String replyCount = commentNode.xpath("./div[2]//ul/li[2]/span[4]/a/text()").get().trim();
            if (replyCount.endsWith("万")){
                if (replyCount.contains(".")){
                    replyCount = String.valueOf((int)Double.parseDouble(replyCount.replace("万","")) * 10000);
                }else {
                    replyCount = replyCount.replace("万","0000");
                }
            }
            List<Selectable> infoList = commentNode.xpath(".//ul[@class=\"grade-list\"]/li").nodes();
            String authorAddr = "";
            String signTime = "";
            String identification = "";
            switch (infoList.size()){
                case 4:
                    signTime = commentNode.xpath("./div[2]//ul/li[3]/span[2]/text()").get().trim();
                    break;
                case 5:
                    authorAddr = commentNode.xpath("./div[2]//ul/li[3]/span[2]/text()").get().trim();
                    signTime = commentNode.xpath("./div[2]//ul/li[4]/span[2]/text()").get().trim();
                    break;
                case 6:
                    authorAddr = commentNode.xpath("./div[2]//ul/li[3]/span[2]/text()").get().trim();
                    signTime = commentNode.xpath("./div[2]//ul/li[5]/span[2]/text()").get().trim();
                    identification = commentNode.xpath("./div[2]//ul/li[4]/span[2]/text()").get().trim();
                    break;
                default:
                    logger.warn("a uncased link");
            }
            try {
                long releaseTime = DateUtils.parseDate(pubTime,"yyyy-MM-dd HH:mm:ss").getTime();

                String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
                CrawlerData crawlerCommentData = CrawlerData.builder()
                        .data(crawlerRequestRecord, httpPage)
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), contentId))
                        .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), topicId))
                        .url(httpRequest.getUrl())
                        .releaseTime(releaseTime)
                        .addContentKV(Field_Author, author)
                        .addContentKV(Field_Author_Id, authorId)
                        .addContentKV(Field_Content,content.toString().trim())
                        .addContentKV(Field_Author_Identification_Model, identification)
                        .addContentKV(Field_Author_Topic_Count, topicCount)
                        .addContentKV(Field_Author_Pick_Count, jingHua)
                        .addContentKV(Field_Author_Reply_Count, replyCount)
                        .addContentKV(Field_Author_From, authorAddr)
                        .addContentKV(Field_Author_Sign_In, signTime)
                        .addContentKV(Field_Floor, floor)
                        .addContentKV(Field_Reply_Floor,replyFloor)
                        .resultLabelTag(comment)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .build();
                crawlerCommentData.tagsCreator().bizTags().addSiteBiz("forum");
                crawlerCommentData.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
                crawlerCommentDataList.add(crawlerCommentData);
            } catch (ParseException e) {
                logger.error("parse comment date error",e);
            }
        }
        return crawlerCommentDataList;
    }

    @Override
    public void afterExecute(CrawlerRecordContext crawlerRecordContext) {

    }

    /***
     * 判断字符是否为中文
     * @param ch 需要判断的字符
     * @return 中文返回true，非中文返回false
     */
    private static boolean isChinese(char ch) {
        //获取此字符的UniCodeBlock
        Character.UnicodeBlock ub = Character.UnicodeBlock.of(ch);
        //  GENERAL_PUNCTUATION 判断中文的“号
        //  CJK_SYMBOLS_AND_PUNCTUATION 判断中文的。号
        //  HALFWIDTH_AND_FULLWIDTH_FORMS 判断中文的，号
        if (ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS
                || ub == Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS
                || ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A
                || ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B
                || ub == Character.UnicodeBlock.CJK_SYMBOLS_AND_PUNCTUATION         // 判断中文的。号
                || ub == Character.UnicodeBlock.HALFWIDTH_AND_FULLWIDTH_FORMS       // 判断中文的，号
                || ub == Character.UnicodeBlock.GENERAL_PUNCTUATION                 // 判断中文的“号
        ){
//            System.out.println(ch + " 是中文");
            return true;
        }
        return false;
    }

    /**
     * 将url参数转换成map
     * @param url http://*.*.com?aa=11&bb=22&cc=33
     * @return map
     */
    private Map<String, Object> getUrlParams(String url) {
        Map<String, Object> map = new HashMap<String, Object>(0);
        String param = null;
        if (url.contains("?")){
            param = url.split("\\?")[1];
        }
        if (StringUtils.isBlank(param)) {
            return null;
        }
        String[] params = param.split("&");
        for (String s : params) {
            String[] p = s.split("=");
            if (p.length == 2) {
                map.put(p[0], p[1]);
            }
        }
        return map;
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/536.6");
        agentList.add("Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/536.6");
        agentList.add("Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.1");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA(){
        return agentList.get(RandomUtils.nextInt(0,agentList.size() - 1));
    }

    public static Map<String, Object> copyExtras(Map<String,Object> inExtras){
        Map<String,Object> extras = new HashMap<>();
        for (Map.Entry<String, Object> entry : inExtras.entrySet()) {
            extras.put(entry.getKey(),entry.getValue());
        }
        return extras;
    }
}
