package com.chance.cc.crawler.development.scripts.pcauto.praise;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.CategoryTag;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.ParseException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.*;

/**
 * @author lt
 * @version 1.0
 * @date 2021-01-18 13:59:40
 * @email okprog@sina.com
 */
public class PcAutoPraiseCrawlerScript extends CrawlerCommonScript {

    private Logger logger = LoggerFactory.getLogger(PcAutoPraiseCrawlerScript.class);

    private static final String indexRegex = "https?://www\\.pcauto\\.com\\.cn/";
    private static final String keysRegex = "https?://\\S*v1/meta/pcauto/keys\\S*";
    private static final String homeUrlRegex = "https?://price\\.pcauto\\.com\\.cn/sg\\S*/";
    private static final String listUrlRegex = "https:?//price\\.pcauto\\.com\\.cn/comment/\\S*/p\\d*\\.html";
    private static final String firstListUrlRegex = "https:?//price\\.pcauto\\.com\\.cn/comment/\\S*/";
    private static final String articleUrlRegex = "https:?//price\\.pcauto\\.com\\.cn/comment/\\S*/\\S*/view_\\d*\\.html";
    private static final String cmtCountUrlRegex = "https?://cmt\\.pcauto\\.com\\.cn/action/topic/get_data\\.jsp\\?url=\\S*";
    private static final String commentRegex = "https?://cmt\\.pcauto\\.com\\.cn/action/comment/list_new_json\\.jsp\\S*";
    private static final String authorInfoUrlRegex = "https?://my\\.pcauto\\.com\\.cn/\\d*/";

    private static final String homeUlrFormat = "https://price.pcauto.com.cn/%s/";
    private static final String firstListUrlFormat = "https://price.pcauto.com.cn/comment/%s/";
    private static final String listUrlFormat = "https://price.pcauto.com.cn/comment/%s/p%s.html";
    public static final String authorInfoUrlFormat = "https://my.pcauto.com.cn/%s/";
    public static final String cmtCountUrlFormat = "https://cmt.pcauto.com.cn/action/topic/get_data.jsp?url=%s";
    public static final String commentUrlFormat = "https://cmt.pcauto.com.cn/action/comment/list_new_json.jsp?encodeHtml=1&urlHandle=1&url=%s&pageNo=%s&pageSize=30&voteId=0";

    private static final String scriptSite = "praise";

    @Override
    public String domain() {
        return "pcauto";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(keysRegex);
        addUrlRegular(homeUrlRegex);
        addUrlRegular(listUrlRegex);
        addUrlRegular(firstListUrlRegex);
        addUrlRegular(articleUrlRegex);
        addUrlRegular(commentRegex);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String crawlerSite = categoryTag.getKVTagStrVal("site");
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();
        if (supportSourceRecords == null || supportSourceRecords.size() < 1) {
            return super.prepareRequest(requestRecord, supportSourceRecords);
        }
        CrawlerRequestRecord keywordRecord = supportSourceRecords.get(0);
        String keywordUrl = keywordRecord.getHttpRequest().getUrl();
        if (keywordUrl.matches(keysRegex)) {
            try {
                JSONObject jsonObject = JSONObject.parseObject(keywordRecord.getInternalDownloadPage().getRawText());
                if (jsonObject.getIntValue("status") == 0) {
                    JSONArray objects = jsonObject.getJSONArray("content");
                    for (Object object : objects) {
                        String keyword = ((JSONObject) object).getString("keyword");
                        String homeUlr = String.format(homeUlrFormat, keyword);
                        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                                .turnPageRequest(requestRecord)
                                .recordKey(homeUlr)
                                .httpUrl(homeUlr)
                                .releaseTime(System.currentTimeMillis())
                                .copyBizTags()
                                .copyScheduleTags()
                                .notFilterRecord()
                                .build();
                        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
                        httpRequest.addHeader("Host", "price.pcauto.com.cn");
                        httpRequest.addHeader("Referer", "https://www.pcauto.com.cn/");
                        httpRequest.addHeader("User-Agent", getRandomUA());
                        allItemRecords.add(crawlerRequestRecord);
                    }
                }

            } catch (Exception e) {
                logger.error(e.getMessage(), e);
            }
        }
        if (allItemRecords.isEmpty()) {
            return super.prepareRequest(requestRecord, supportSourceRecords);
        }
        return allItemRecords;
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<CrawlerRequestRecord>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        int statusCode = httpPage.getStatusCode();
        String lastRequestUrl = lastRequest.getUrl();
        if (StringUtils.isBlank(httpPage.getRawText()) || !httpPage.isDownloadSuccess() || statusCode != 200) {
            Map<String, Object> extras = lastRequest.getExtras();
            int downloadTimes = 1;
            if (null == extras) {
                extras = new HashMap<>();
                extras.put("downloadTimes", downloadTimes);
            } else {
                try {
                    downloadTimes = Integer.parseInt((String) extras.get("downloadTimes"));
                    extras.put("downloadTimes", downloadTimes + 1);
                } catch (Exception e) {
                    extras.put("downloadTimes", downloadTimes);
                }
            }
            if (null != httpPage.getRawText() && httpPage.getRawText().contains("您访问的页面暂时没能找到") || downloadTimes > 10) {
                logger.error("页面不存在：" + statusCode);
                return parsedLinks;
            }
            parsedLinks.add(crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            return parsedLinks;
        }
        if (lastRequestUrl.matches(homeUrlRegex)) {
            return parseHomeLinks(crawlerRequestRecord, httpPage, parsedLinks, lastRequestUrl);
        }
        if (lastRequestUrl.matches(listUrlRegex) || lastRequestUrl.matches(firstListUrlRegex)) {
            return parseListLinks(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if (lastRequestUrl.matches(articleUrlRegex)) {
            return parseArticleLinks(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if (lastRequestUrl.matches(commentRegex)) {
            return parseCommentLinks(crawlerRequestRecord, httpPage, parsedLinks, lastRequest);
        }
        return null;
    }

    private List<CrawlerRequestRecord> parseCommentLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, HttpRequest lastRequest) {
        //根据返回的json结果计算下一页
        Map<String, Object> articleExtras = lastRequest.getExtras();
        String articleKey = (String) articleExtras.get("articleKey");
        JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
        if (jsonObject.getString("error").equals("topic not found")) {
            return parsedLinks;
        }
        int currentPage = jsonObject.getIntValue("page");
        int commentCount = jsonObject.getIntValue("commentcount");
        int maxPage = (int) Math.ceil(commentCount / 50.0);
        if (currentPage < maxPage) {
            String commentUrl = String.format(commentUrlFormat, (currentPage + 1), articleKey);
            CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRequestRecord)
                    .recordKey(commentUrl)
                    .httpUrl(commentUrl)
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .resultLabelTag(comment)
                    .resultLabelTag(interaction)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.turnPageItem)
                    .copyBizTags()
                    .build();

            commentRecord.getHttpRequest().setExtras(copyExtras(articleExtras));
            parsedLinks.add(commentRecord);
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseArticleLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String authorUrl = httpPage.getHtml().xpath("//div[@class=\"litDy clearfix\"]/table/tbody/tr/td/div/div/div[@class=\"txline\"]/p/a/@href").get();
        String authorId = authorUrl.split("cn/")[1].replace("/", "");
        extras.put("authorId", authorId);
        //用户信息-粉丝数
        authorUrl = String.format(authorInfoUrlFormat, authorId);
        CrawlerRequestRecord authorInfoRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .httpUrl(authorUrl)
                .recordKey(authorUrl)
                .releaseTime(System.currentTimeMillis())
                .notFilterRecord()
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        parsedLinks.add(authorInfoRecord);

        //评论数
        //内下载评论数
        String cmtCountUrl = String.format(cmtCountUrlFormat, crawlerRequestRecord.getHttpRequest().getUrl().split(":")[1]);
        CrawlerRequestRecord cmtCountRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(cmtCountUrl)
                .recordKey(cmtCountUrl)
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        parsedLinks.add(cmtCountRecord);

        //评论翻页
        String commentUrl = String.format(commentUrlFormat, crawlerRequestRecord.getHttpRequest().getUrl(), 1);
        CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(commentUrl)
                .recordKey(commentUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .needWashed(true)
                .needParsed(true)
                .resultLabelTag(comment)
                .resultLabelTag(interaction)
                .build();
        Map<String, Object> cmtExtras = copyExtras(extras);
        cmtExtras.put("articleUrl", crawlerRequestRecord.getHttpRequest().getUrl());
        commentRecord.getHttpRequest().setExtras(cmtExtras);
        parsedLinks.add(commentRecord);
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String seriesId = (String) extras.get("seriesId");
        int praisePage = (int) extras.get("praisePage");
        String nextPageUrl = String.format(listUrlFormat, seriesId, (praisePage + 1));
        CrawlerRequestRecord nextPageRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(nextPageUrl)
                .recordKey(nextPageUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .build();
        Map<String, Object> listExtras = copyExtras(extras);
        listExtras.put("praisePage", (praisePage + 1));
        HttpRequest listRequest = nextPageRecord.getHttpRequest();
        listRequest.setExtras(listExtras);
        parsedLinks.add(nextPageRecord);

        //列表
        List<Selectable> itemNodes = httpPage.getHtml().xpath("//div[@class=\"scollbody\"]/div").nodes();
        for (Selectable itemNode : itemNodes) {
            String itemUrl = itemNode.xpath("./table/tbody/tr/td/div/div/div[@class=\"txline\"]/span/a/@href").get();
            if (!itemUrl.startsWith("http")) {
                itemUrl = "https:" + itemUrl;
            }
            String articleKey = itemUrl.substring(itemUrl.lastIndexOf("_") + 1).split("\\.")[0];
            String pubTime = itemNode.xpath("./table/tbody/tr/td/div/div/div[@class=\"txline\"]/span/a/text()").get();
            if (StringUtils.isNotBlank(pubTime)) {
                pubTime = pubTime.split(" ")[0];
                try {
                    CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                            .itemPageRequest(crawlerRequestRecord)
                            .httpUrl(itemUrl)
                            .recordKey(itemUrl)
                            .releaseTime(DateUtils.parseDate(pubTime, "yyyy-MM-dd").getTime())
                            .copyBizTags()
                            .resultLabelTag(article)
                            .resultLabelTag(interaction)
                            .build();
                    Map<String, Object> itemExtras = copyExtras(extras);
                    itemExtras.put("articleUrl", itemUrl);
                    itemExtras.put("articleKey", articleKey);
                    HttpRequest itemRequest = itemRecord.getHttpRequest();
                    itemRequest.setExtras(itemExtras);
                    parsedLinks.add(itemRecord);
                } catch (Exception e) {
                    logger.error("parse date error");
                }
            }
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseHomeLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, String lastRequestUrl) {
        Html html = httpPage.getHtml();
        String brand = html.xpath("//div[@class=\"pos-mark\"]/a[4]/text()").get();
        String seriesName = html.xpath("//div[@class=\"pos-mark\"]/a[5]/text()").get();
        String seriesId = lastRequestUrl.split("cn/")[1].replace("/", "");
        Map<String, Object> extras = new HashMap<>();
        extras.put("praisePage", 1);
        extras.put("seriesId", seriesId);
        String praiseUrl = String.format(firstListUrlFormat, seriesId);
        CrawlerRequestRecord praiseIndexRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(praiseUrl)
                .recordKey(praiseUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .build();
        List<Map<String, String>> series = new ArrayList<>();
        Map<String, String> seriesInfo = new HashMap<>();
        seriesInfo.put("series_name", seriesName);
        seriesInfo.put("series_url", lastRequestUrl);
        seriesInfo.put("series_id", seriesId);
        series.add(seriesInfo);
        praiseIndexRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Series, series);
        praiseIndexRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Brand, brand);
        praiseIndexRecord.getHttpRequest().setExtras(extras);

        HttpRequest httpRequest = praiseIndexRecord.getHttpRequest();
        httpRequest.addHeader("Host", "price.pcauto.com.cn");
        httpRequest.addHeader("User-Agent", getRandomUA());
        parsedLinks.add(praiseIndexRecord);
        return parsedLinks;
    }

    @Override
    public void afterInternalDownload(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> internalDownloadRecords, List<CrawlerRequestRecord> links) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String lastRequestUrl = httpRequest.getUrl();
        if (lastRequestUrl.matches(articleUrlRegex)) {
            for (CrawlerRequestRecord internalDownloadRecord : internalDownloadRecords) {
                String internalUrl = internalDownloadRecord.getHttpRequest().getUrl();
                if (internalUrl.matches(authorInfoUrlRegex)) {
                    try {
                        String follows = internalDownloadRecord.getInternalDownloadPage().getHtml().xpath("//div[@class=\"main clearfix\"]/div[@class=\"col-b widget-area\"]/div[1]//span[2]/b/a/text()").get();
                        extras.put("follows", follows);
                    } catch (Exception e) {
                        extras.put("follows","0");
                    }
                }
                if (internalUrl.matches(cmtCountUrlRegex)) {
                    HttpPage internalDownloadPage = internalDownloadRecord.getInternalDownloadPage();
                    JSONObject pageObj = JSONObject.parseObject(internalDownloadPage.getRawText());
                    String comments = pageObj.getString("total");
                    if (StringUtils.isNotBlank(comments)) {
                        extras.put("comments", comments);
                    } else {
                        extras.put("comments", "0");
                    }
                }
            }
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(article)) {
            crawlerDataList.addAll(washArticle(crawlerRequestRecord, httpPage));
        }

        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)) {
            crawlerDataList.addAll(washInteraction(crawlerRequestRecord, httpPage));
        }

        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(comment)) {
            crawlerDataList.addAll(washComment(crawlerRequestRecord, httpPage));
        }

        return crawlerDataList;
    }

    private List<CrawlerData> washArticle(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = lastRequest.getExtras();
        Html html = httpPage.getHtml();

        String authorUrl = httpPage.getHtml().xpath("//div[@class=\"litDy clearfix\"]/table/tbody/tr/td/div/div/div[@class=\"txline\"]/p/a/@href").get();
        String authorId = authorUrl.split("cn/")[1].replace("/", "");
        String author = httpPage.getHtml().xpath("//div[@class=\"litDy clearfix\"]/table/tbody/tr/td/div/div/div[@class=\"txline\"]/p/a/text()").get();
        author = author.replaceAll("&\\S*?;", "").replaceAll("\n", "").trim();
        String modelName = httpPage.getHtml().xpath("//div[@class=\"litDy clearfix\"]/table/tbody/tr/td/div/div/div[@class=\"line\"][1]/a/text()").get();
        String modelUrl = httpPage.getHtml().xpath("//div[@class=\"litDy clearfix\"]/table/tbody/tr/td/div/div/div[@class=\"line\"][1]/a/@href").get();

        String buyTime = "";
        String buyAddr = "";
        String province = "";
        String city = "";
        String buyVendor = "";
        String buyPrice = "";
        String economy = "";
        String driveDistance = "";
        String category = "";
        try {
            buyTime = httpPage.getHtml().xpath("//em[text()=\"购买时间\"]/../text()").all().get(1);
            buyTime = buyTime.replaceAll("&\\S*?;", "").replaceAll("\n", "").trim();
            buyAddr = httpPage.getHtml().xpath("//em[text()=\"购买地点\"]/../text()").get();
            province = buyAddr.split(" ")[0];
            city = buyAddr.split(" ")[1];
            buyVendor = httpPage.getHtml().xpath("//em[text()=\"购买商家\"]/../text()").get();
            if (StringUtils.isBlank(buyVendor)) {
                buyVendor = "";
            }
            buyVendor = buyVendor.replaceAll("&\\S*?;", "").replaceAll("\n", "").trim();

            buyPrice = httpPage.getHtml().xpath("//em[text()=\"裸车价格\"]/../i/text()").get();
            buyPrice = String.valueOf(Double.parseDouble(buyPrice) * 10000);
            economy = httpPage.getHtml().xpath("//em[text()=\"平均油耗\"]/../i/text()").get();
            economy = economy + "L/100km";
            driveDistance = httpPage.getHtml().xpath("//em[text()=\"行驶里程\"]/../text()").get();
        } catch (Exception e) {
            category = "微点评";
        }

        StringBuffer sbImage = new StringBuffer();
        String isElite = "否";
        List<Selectable> imgNodes = html.xpath("//ul[@class=\"clearfix picul picul6\"]/li").nodes();
        if (imgNodes.size() > 0){
            isElite = "是";
            for (Selectable imgNode : imgNodes) {
                String imgUrl = imgNode.xpath("./a/img/@src").get();
                sbImage = sbImage.append(imgUrl).append("\\x01");
            }
        }
        //解析评分
        Map<String, String> valMap = new HashMap<>();
        List<Selectable> tagTitles = html.xpath("//div[@class=\"fzbox\"]/ul/li").nodes();
        for (Selectable tagTitle : tagTitles) {
            String tagName = tagTitle.xpath("./span/text()").get().trim();
            String tagVal = tagTitle.xpath("./b/text()").get().trim();
            valMap.put(tagName, tagVal);
        }
        String values = "";
        for (Map.Entry<String, String> entry : valMap.entrySet()) {
            values = String.format("%s,%s:%s", values, entry.getKey(), entry.getValue());
        }
        String sumValue = "";
        Matcher sumMtValue = Pattern.compile("'score':\\s*'\\S*',").matcher(httpPage.getRawText());
        while (sumMtValue.find()){
            sumValue = sumMtValue.group(0).split(":")[1].split("'")[1];
        }
        values = values.substring(1);
        values = values + ",综合评分:" + sumValue;
        List<String> allContents = html.xpath("//div[@class=\"dianPing clearfix\"]/div//text()").all();
        StringBuffer sbContent = new StringBuffer();
        for (String s : allContents) {
            sbContent = sbContent.append(s);
        }
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        CrawlerData crawlerData = CrawlerData.builder()
                .data(crawlerRequestRecord, httpPage)
                .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), (String) extras.get("articleKey")))
                .url(lastRequest.getUrl())
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .addContentKV(Field_Author, author)
                .addContentKV(Field_Author_Id, authorId)
                .addContentKV(Field_Author_Follows, (String) extras.get("follows"))
                .addContentKV(Field_Author_From, buyAddr)
                .addContentKV(Field_Content, sbContent.toString())
                .addContentKV(Field_Praise_Address_Buy, buyAddr)
                .addContentKV(Field_Praise_Fuel_Economy, economy)
                .addContentKV(Field_Praise_Price_Buy, buyPrice)
                .addContentKV(Field_Praise_Time_Buy, buyTime)
                .addContentKV(Field_Praise_Vendor, buyVendor)
                .addContentKV(Field_Praise_Drive_Distance, driveDistance)
                .addContentKV(Field_Praise_values, values)
                .addContentKV(Field_Images,sbImage.toString())
                .resultLabelTag(article)
                .build();
        crawlerData.tagsCreator().bizTags().addSiteBiz("praise");
        crawlerData.tagsCreator().bizTags().addCustomKV(Tag_Field_Is_Elite,isElite);
        Map<String, String> modelMap = new HashMap<>();
        modelMap.put("model_name", modelName);
        modelMap.put("model_url", modelUrl);
        crawlerData.tagsCreator().bizTags().addCustomKV(Tag_Field_Car_Model, modelMap);
        Map<String, String> addr = new HashMap<>();
        addr.put("province", province);
        addr.put("city", city);
        if (StringUtils.isNotBlank(province) && StringUtils.isNotBlank(city)){
            crawlerData.tagsCreator().bizTags().addCustomKV(Tag_Field_Addr_Info, addr);
        }
        if (StringUtils.isNotBlank(category)){
            crawlerData.tagsCreator().bizTags().addCustomKV(Tag_Field_Topic_Type,category);
        }
        crawlerArticleDataList.add(crawlerData);
        return crawlerArticleDataList;
    }

    private List<CrawlerData> washInteraction(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerInteractionDataList = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = lastRequest.getUrl();
        Map<String, Object> extras = lastRequest.getExtras();
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        String articleKey = lastRequest.getUrl().substring(lastRequest.getUrl().lastIndexOf("_") + 1).split("\\.")[0];

        if (lastRequestUrl.matches(articleUrlRegex)) {
            String comments = (String) extras.get("comments");
            String likes = httpPage.getHtml().xpath("//a[@class=\"good\"]/em/text()").get().replace("(", "").replace(")", "");
            try {
                CrawlerData crawlerData = CrawlerData.builder()
                        .data(crawlerRequestRecord, httpPage)
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, interaction.enumVal(), articleKey))
                        .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                        .url(lastRequestUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .addContentKV(Field_I_Comments, String.valueOf(comments))
                        .addContentKV(Field_I_Likes, likes)
                        .resultLabelTag(interaction)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .build();
                crawlerData.tagsCreator().bizTags().addSiteBiz("praise");
                crawlerInteractionDataList.add(crawlerData);
            } catch (Exception e) {
                logger.error(e.getMessage(), e);
            }
        }
        if (lastRequestUrl.matches(commentRegex)) {
            JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
            JSONArray commentList = jsonObject.getJSONArray("data");
            for (Object o : commentList) {
                JSONObject commentJson = (JSONObject) o;
                String commentId = commentJson.getString("id");
                String likes = commentJson.getString("support");
                String pubTime = commentJson.getString("createTime");

                try {
                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRequestRecord, httpPage)
                            .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, interaction.enumVal(), commentId))
                            .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                            .url(lastRequest.getUrl())
                            .releaseTime(DateUtils.parseDate(pubTime, "yyyy-MM-dd HH:mm:ss").getTime())
                            .addContentKV(Field_I_Likes, likes)
                            .resultLabelTag(interaction)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .build();
                    crawlerData.tagsCreator().bizTags().addSiteBiz("praise");
                    crawlerData.setFilter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange);
                    crawlerInteractionDataList.add(crawlerData);
                } catch (ParseException e) {
                    logger.error(e.getMessage(), "parse date error");
                }
            }
        }
        return crawlerInteractionDataList;
    }

    private List<CrawlerData> washComment(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerCommentDataList = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = lastRequest.getExtras();
        JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
        JSONArray commentList = jsonObject.getJSONArray("data");
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        String articleUrl = (String) extras.get("articleUrl");
        String articleKey = articleUrl.substring(articleUrl.lastIndexOf("_") + 1).split("\\.")[0];
        for (Object o : commentList) {
            JSONObject commentJson = (JSONObject) o;
            String commentId = commentJson.getString("id");
            String pubTime = commentJson.getString("createTime");
            String author = commentJson.getString("nickName");
            String authorId = commentJson.getString("userId");
            String content = commentJson.getString("content");
            String floor = commentJson.getString("floor");
            try {
                CrawlerData crawlerCommentData = CrawlerData.builder()
                        .data(crawlerRequestRecord, httpPage)
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                        .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                        .url(articleUrl)
                        .releaseTime(DateUtils.parseDate(pubTime, "yyyy-MM-dd HH:mm:ss").getTime())
                        .addContentKV(Field_Author, author)
                        .addContentKV(Field_Author_Id, authorId)
                        .addContentKV(Field_Content, content)
                        .addContentKV(Field_Floor, floor)
                        .resultLabelTag(comment)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .build();
                crawlerCommentData.tagsCreator().bizTags().addSiteBiz("praise");
                crawlerCommentData.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
                crawlerCommentDataList.add(crawlerCommentData);
            } catch (ParseException e) {
                logger.error(e.getMessage(), "parse date error");
            }
        }
        return crawlerCommentDataList;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    public static Map<String, Object> copyExtras(Map<String, Object> inExtras) {
        Map<String, Object> extras = new HashMap<>();
        for (Map.Entry<String, Object> entry : inExtras.entrySet()) {
            extras.put(entry.getKey(), entry.getValue());
        }
        return extras;
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/536.6");
        agentList.add("Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/536.6");
        agentList.add("Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.1");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA() {
        return agentList.get(RandomUtils.nextInt(0, agentList.size() - 1));
    }
}
