package com.chance.cc.crawler.development.scripts.autohome.praise;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.record.builder.CrawlerRequestRecordBuilder;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.CategoryTag;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.text.ParseException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;

/**
 * @author lt
 * @version 1.0
 * @date 2020-11-25 13:05:25
 * @email okprog@sina.com
 */
public class AutoHomePraiseCrawlerScript extends CrawlerCommonScript {

    private static Logger logger = LoggerFactory.getLogger(AutoHomePraiseCrawlerScript.class);
    private static final List<String> URL_PARAM_YEARS = Arrays.asList("0", "2");     //0:在售  2:停售
    private static final List<String> URL_PARAM_ORDERS = Arrays.asList("1", "2");     //1:发布  2:追加
    private static List<String> contentTagList = new ArrayList<>();
    {
        contentTagList.add("最满意");
        contentTagList.add("最不满意");
        contentTagList.add("为什么选择这款车");
        contentTagList.add("空间");
        contentTagList.add("动力");
        contentTagList.add("操控");
        contentTagList.add("油耗");
        contentTagList.add("舒适性");
        contentTagList.add("外观");
        contentTagList.add("内饰");
        contentTagList.add("性价比");
        contentTagList.add("其他描述");
    }

    private static final String homeUrlFormat = "https://www.autohome.com.cn/%s/";
    private static final String praiseListUrlFormat = "https://k.m.autohome.com.cn/ajax/serieskoubei/getserieskoubeilistbytag?" +
            "seriesId=%s&specId=0&gradeEnum=0&pageIndex=%s&pageSize=20&year=%s&order=%s"; //seriesId pageIndex year order
    private static final String articleUrlFormat = "https://k.m.autohome.com.cn/detail/view_%s.html"; //showId
    private static final String commentUrlFormat = "https://k.m.autohome.com.cn/ajax_v2/getcomments?_appid=koubei&appid=5&koubeiid=%s&pagesize=20&hot=0&lastid=%s";
    private static final String modelUrlFormat = "https://k.autohome.com.cn/spec/%s"; //showId

    private static final String indexRegex = "https?://www\\.autohome\\.com\\.cn/";
    private static final String homeUrlRegex = "https?://www\\.autohome\\.com\\.cn/\\d*/";
    private static final String praiseListUrlRegex = "https://k\\.m\\.autohome\\.com\\.cn/ajax/serieskoubei/getserieskoubeilistbytag\\?seriesId=\\S*";
    private static final String articleUrlRegex = "https?://k\\.m\\.autohome\\.com\\.cn/detail/view_\\S*\\.html";
    private static final String articlePCUrlRegex = "https?://k\\.autohome\\.com\\.cn/detail/view_\\S*\\.html";
    private static final String commentUrlRegex = "https://k\\.m\\.autohome\\.com\\.cn/ajax_v2/getcomments\\?_appid=koubei\\S*";
    private static final String keysRegex = "https?://\\S*v1/meta/autohome/keys\\S*";
    private static final String parseFontRegex = "https?://\\S*/crawler/font/api/v1/parseTTFont";

    public static final String IS_OUTPUT_KV_CONTENT = "is_output_kv_content";

    private static final String scriptSite = "praise";

    private static final long ONE_SECOND = 1000L;
    private static final long ONE_MINUTE = 60000L;
    private static final long ONE_HOUR = 3600000L;
    private static final long ONE_DAY = 86400000L;

    @Override
    public String domain() {
        return "autohome";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(homeUrlRegex);
        addUrlRegular(praiseListUrlRegex);
        addUrlRegular(articleUrlRegex);
        addUrlRegular(commentUrlRegex);
        addUrlRegular(keysRegex);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String crawlerSite = categoryTag.getKVTagStrVal("site");
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();

        if (supportSourceRecords == null || supportSourceRecords.size() <1){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        CrawlerRequestRecord keywordRecord = supportSourceRecords.get(0);
        String keywordUrl = keywordRecord.getHttpRequest().getUrl();
        if (keywordUrl.matches(keysRegex)){
            try {
                JSONObject jsonObject = JSONObject.parseObject(keywordRecord.getInternalDownloadPage().getRawText());
                if (jsonObject.getIntValue("status") == 0){
                    JSONArray contents = jsonObject.getJSONArray("content");
                    for (Object content : contents) {
                        String keyword = ((JSONObject) content).getString("keyword");
                        String homeUrl = String.format(homeUrlFormat,keyword);
                        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                                .turnPageRequest(requestRecord)
                                .httpUrl(homeUrl)
                                .recordKey(homeUrl)
                                .releaseTime(System.currentTimeMillis())
                                .needWashed(false)
                                .needParsed(true)
                                .notFilterRecord()
                                .copyBizTags()
                                .build();
                        crawlerRequestRecord.getHttpRequest().setMethod(HttpConstant.Method.GET);
                        allItemRecords.add(crawlerRequestRecord);
                    }
                }
            }catch (Exception e){
                logger.error(e.getMessage(),e);
            }
        }
        if (allItemRecords.isEmpty()){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }
        return allItemRecords;
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        int statusCode = httpPage.getStatusCode();
        String lastRequestUrl = lastRequest.getUrl();
        if (StringUtils.isBlank(httpPage.getRawText()) || !httpPage.isDownloadSuccess() || statusCode != 200){
            Map<String, Object> extras = lastRequest.getExtras();
            int downloadTimes = 1;
            if (null == extras){
                extras = new HashMap<>();
                extras.put("downloadTimes",downloadTimes);
            }else {
                try {
                    downloadTimes = Integer.parseInt((String) extras.get("downloadTimes"));
                    extras.put("downloadTimes",downloadTimes + 1);
                } catch (Exception e) {
                    extras.put("downloadTimes",downloadTimes);
                }
            }
            if (null != httpPage.getRawText() && httpPage.getRawText().contains("您访问的口碑存在异常") || downloadTimes > 10){
                logger.error("页面不存在：" + statusCode);
                return parsedLinks;
            }

            parsedLinks.add(crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            return parsedLinks;
        }
        if (lastRequestUrl.matches(homeUrlRegex)){
            return parseHomeLinks(crawlerRequestRecord, httpPage, parsedLinks, lastRequestUrl);
        }
        if (lastRequestUrl.matches(praiseListUrlRegex)){
            return parseListLinks(crawlerRequestRecord, httpPage, parsedLinks, lastRequestUrl);
        }
        if (lastRequestUrl.matches(articleUrlRegex)){
            return parseArticleLinks(crawlerRequestRecord, httpPage, parsedLinks, lastRequest);
        }
        if (lastRequestUrl.matches(commentUrlRegex)){
            return parseCommentLinks(crawlerRequestRecord, httpPage, parsedLinks, lastRequest);
        }
        return null;
    }

    private List<CrawlerRequestRecord> parseCommentLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, HttpRequest lastRequest) {
        crawlerRequestRecord.setNeedWashPage(true);
        JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
        if (jsonObject.getIntValue("returncode") == 0){
            JSONArray jsonArray = jsonObject.getJSONObject("result").getJSONArray("list");
            if (null!= jsonArray && jsonArray.size() > 0){
                JSONObject lastOne = (JSONObject)jsonArray.get(jsonArray.size() - 1);
                Map<String, Object> extras = lastRequest.getExtras();
                String contentId = (String)extras.get("contentId");
                String replyId = lastOne.getString("replyid");
                String nextPageCommentUrl = String.format(commentUrlFormat,contentId, replyId);
                CrawlerRequestRecord nextPageCommentRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .recordKey(nextPageCommentUrl)
                        .httpUrl(nextPageCommentUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .resultLabelTag(comment)
                        .resultLabelTag(interaction)
                        .needWashed(true)
                        .notFilterRecord()
                        .copyBizTags()
                        .build();
                nextPageCommentRecord.getHttpRequest().setExtras(copyExtras(extras));
                nextPageCommentRecord.tagsCreator().bizTags().addCustomKV("replyId",replyId);
                nextPageCommentRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
                parsedLinks.add(nextPageCommentRecord);
            }
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseArticleLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, HttpRequest lastRequest) {
        if (httpPage.getRawText().contains("尊敬的用户您好，您的访问出现异常，为确认本次访问为正常用户行为")){
            parsedLinks.add(crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }
        crawlerRequestRecord.setNeedWashPage(true);
        Map<String, Object> extras = lastRequest.getExtras();
        List<String> allContents = httpPage.getHtml().xpath("//div[@class=\"final-timeline-item-content-intro-item\"]//text()").all();
        Matcher ttfMatcher = Pattern.compile("url\\('//k\\d*\\.autoimg\\.cn/g\\d*/\\w*/\\w*/\\S*/\\S*\\.\\.ttf'\\)\\s*format").matcher(httpPage.getRawText());

        //根据调度标签决定是否输出kv_content
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().scheduleTags().getCategoryTag();
        if (categoryTag.isContainKVTag(IS_OUTPUT_KV_CONTENT)){
            Map<String,List<String>> kvContentListMap = new LinkedHashMap<>();
            List<Selectable> allContentNodes = httpPage.getHtml().xpath("//div[@class=\"final-timeline-item-content-intro-item\"]").nodes();
            for (Selectable allContentNode : allContentNodes) {
                String contentTitle = allContentNode.xpath("./div[@class=\"final-timeline-item-content-intro-item-title\"]/span/text()").get();
                List<String> contentValues = allContentNode.xpath("./div[@class=\"final-timeline-item-content-intro-item-text\"]//text()").all();
                kvContentListMap.put(contentTitle,contentValues);
            }
            extras.put("kvContentListMap",kvContentListMap);
        }

        String ttfUrl = "";
        while (ttfMatcher.find()){
            ttfUrl = "https:" + ttfMatcher.group(0).split("'")[1];
        }
        Set<String> hexList = new HashSet<>();
        for (String text : allContents) {
            // 判断出非中文的单个字符
            text = text.trim();
            int[] codes = StringUtils.toCodePoints(text);
            if (codes.length == 1){
                char[] chars = Character.toChars(codes[0]);
                if (!isChinese(chars[0])){
                    hexList.add(Integer.toHexString(codes[0]).toUpperCase());
                }
            }
        }
        extras.put("articleContents",allContents);
        if (StringUtils.isNotBlank(ttfUrl) && hexList.size() > 0){
            extras.put("ttfUrl",ttfUrl);
            extras.put("hexList",hexList);
            //内置下载获取ttfMap
            String parseFontUrl = "http://192.168.1.217:9599/crawler/font/api/v1/parseTTFont";
            CrawlerRequestRecord parseFontRecord = CrawlerRequestRecord.builder()
                    .startPageRequest(domain(), CrawlerEnum.CrawlerRequestType.internalDownload)
                    .httpUrl(parseFontUrl)
                    .recordKey(parseFontUrl)
                    .needParsed(false)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .httpConfig(HttpConfig.me(domain()))
                    .build();

            HttpRequest ttfRequest = new HttpRequest();
            ttfRequest.setUrl(ttfUrl);
            ttfRequest.addHeader("Origin","https://club.autohome.com.cn");
            ttfRequest.addHeader("Referer","https://club.autohome.com.cn/");
            ttfRequest.addHeader("User-Agent",getRandomUA());

            HttpConfig httpConfig = crawlerRequestRecord.getHttpConfig();
            httpConfig.setResponseTextGenerateHtml(false);
            HttpRequest httpRequest = parseFontRecord.getHttpRequest();
            Map<String,Object> params = new HashMap<>();
            params.put("httpRequest",ttfRequest);
            params.put("httpConfig",httpConfig);
            params.put("needParseList",hexList);
            params.put("domain",domain());
            httpRequest.setMethod(HttpConstant.Method.POST);
            httpRequest.setRequestBody(HttpRequestBody.json(JSON.toJSONString(params),"utf-8"));
            parsedLinks.add(parseFontRecord);
        }
        //内连接获取target
        String pcUrl = lastRequest.getUrl().replace("k.m","k");
        CrawlerRequestRecord targetRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .httpUrl(pcUrl)
                .recordKey(pcUrl)
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .notFilterRecord()
                .build();
        HttpRequest targetRequest = targetRecord.getHttpRequest();
        targetRequest.addHeader("User-Agent",getRandomUA());
        parsedLinks.add(targetRecord);

        //获取评论接口链接
        CategoryTag categoryTag1 = crawlerRequestRecord.tagsCreator().scheduleTags().getCategoryTag();
        if(categoryTag1.getLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal()) != null){
            String contentId = (String) extras.get("contentId");
            String commentUrl = String.format(commentUrlFormat,contentId,0);
            CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .recordKey(commentUrl)
                    .httpUrl(commentUrl)
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .resultLabelTag(comment)
                    .resultLabelTag(interaction)
                    .needWashed(true)
                    .notFilterRecord()
                    .copyBizTags()
                    .build();
            commentRecord.getHttpRequest().setExtras(copyExtras(extras));
            commentRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
            parsedLinks.add(commentRecord);
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, String lastRequestUrl) {
        Map<String, Object> urlParams = getUrlParams(lastRequestUrl);
        if (null == urlParams){
            return parsedLinks;
        }
        String seriesId = (String) urlParams.get("seriesId");
        int pageIndex = Integer.parseInt((String) urlParams.get("pageIndex"));
        String year = (String) urlParams.get("year");
        String order = (String) urlParams.get("order");

        JSONObject pageJson = JSONObject.parseObject(httpPage.getRawText());
        int sumPage = pageJson.getJSONObject("result").getIntValue("pagecount");
        if (sumPage > pageIndex){
            String nextPageUrl = String.format(praiseListUrlFormat,seriesId,(pageIndex + 1),year,order);
            CrawlerRequestRecord turnPageRequest = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .recordKey(nextPageUrl)
                    .httpUrl(nextPageUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .build();
            HttpRequest httpRequest = turnPageRequest.getHttpRequest();
            httpRequest.addHeader("Accept","application/json");
            httpRequest.addHeader("Referer",lastRequestUrl);
            httpRequest.addHeader("X-Requested-With","XMLHttpRequest");
            httpRequest.addHeader("User-Agent",getRandomUA());
            httpRequest.addHeader("Host","k.m.autohome.com.cn");
            parsedLinks.add(turnPageRequest);
        }
        JSONArray jsonItems = pageJson.getJSONObject("result").getJSONArray("list");
        for (Object jsonItem : jsonItems) {
            JSONObject itemObj = (JSONObject)jsonItem;
            String showId = itemObj.getString("showId");
            String itemUrl = String.format(articleUrlFormat,showId);
            String praiseId = itemObj.getString("id");
            //获取发布时间 如果有追加发布 则取追加发布时间
            JSONArray append_list = itemObj.getJSONArray("append_list");
            String createTime = itemObj.getString("created");
            String updateTime = null;
            if (append_list.size() > 0){
                JSONObject update = (JSONObject) append_list.get(0);
                updateTime = update.getString("append_updated");
            }
            String releaseTime = null == updateTime ? createTime : updateTime;
            try {
                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .recordKey(itemUrl)
                        .httpUrl(itemUrl)
                        .releaseTime(DateUtils.parseDate(releaseTime,"yyyy-MM-dd HH:mm:ss").getTime())
                        .resultLabelTag(article)
                        .resultLabelTag(interaction)
                        .copyBizTags()
                        .build();
                Map<String,Object> extras = new HashMap<>();
                extras.put("itemObj",itemObj);
                extras.put("articleUrl",itemUrl);
                extras.put("contentId",praiseId);
                HttpRequest httpRequest = itemRecord.getHttpRequest();
                httpRequest.setExtras(extras);
                httpRequest.addHeader("authority","k.m.autohome.com.cn");
                httpRequest.addHeader("cache-control","max-age=0");
                httpRequest.addHeader("upgrade-insecure-requests","1");
                httpRequest.addHeader("user-agent",getRandomUA());
                httpRequest.addHeader("accept","text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9");
                httpRequest.addHeader("sec-fetch-site","same-origin");
                httpRequest.addHeader("sec-fetch-mode","navigate");
                httpRequest.addHeader("sec-fetch-user","?1");
                httpRequest.addHeader("sec-fetch-dest","document");
                httpRequest.addHeader("referer","https://k.m.autohome.com.cn/"+seriesId+"/");
                httpRequest.addHeader("accept-language","zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7");
                parsedLinks.add(itemRecord);
            } catch (ParseException e) {
                logger.error(e.getMessage(),"parse date error");
            }
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseHomeLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks, String lastRequestUrl) {
        //判断一下有没有口碑
        List<String> navUrls = httpPage.getHtml().xpath("//div[@id=\"navTop\"]/ul/li/a/@href | //div[@class=\"header\"]/div[@class=\"models_nav\"]/a/@href").all();
        boolean hasPraise = false;
        for (String navUrl : navUrls) {
            hasPraise = navUrl.contains("k.autohome");
            if (hasPraise){
                break;
            }
        }
        if (hasPraise){
            String brand = httpPage.getHtml().xpath("//div[@class=\"container\"]/div/a[2]/text()").get();
            String carSeries = httpPage.getHtml().xpath("//div[@class=\"athm-sub-nav__car__name\"]//h1//text()|//div[@class=\"subnav-title-name\"]/a/text()").get();
            String seriesId = lastRequestUrl.split("cn/")[1].replace("/","");
            for (String urlParamYear : URL_PARAM_YEARS) {
                for (String urlParamOrder : URL_PARAM_ORDERS) {
                    String praiseListUrl = String.format(praiseListUrlFormat,seriesId,1,urlParamYear,urlParamOrder);

                    CrawlerRequestRecord firstListRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(crawlerRequestRecord)
                            .httpUrl(praiseListUrl)
                            .recordKey(praiseListUrl)
                            .releaseTime(System.currentTimeMillis())
                            .notFilterRecord()
                            .copyBizTags()
                            .build();

                    List<Map<String, String>> series = new ArrayList<>();
                    Map<String, String> seriesMap = new HashMap<>();
                    seriesMap.put("series_name",carSeries);
                    seriesMap.put("series_url",lastRequestUrl.replace("www","k"));
                    seriesMap.put("series_id",seriesId);
                    series.add(seriesMap);
                    firstListRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Series,series);
                    firstListRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Brand,brand);
                    HttpRequest httpRequest = firstListRecord.getHttpRequest();
                    httpRequest.addHeader("Accept","application/json");
                    httpRequest.addHeader("Referer",lastRequestUrl);
                    httpRequest.addHeader("X-Requested-With","XMLHttpRequest");
                    httpRequest.addHeader("User-Agent",getRandomUA());
                    httpRequest.addHeader("Host","k.m.autohome.com.cn");
                    parsedLinks.add(firstListRecord);
                }
            }
        }
        return parsedLinks;
    }

    @Override
    public void afterInternalDownload(CrawlerRequestRecord crawlerRecord, List<CrawlerRequestRecord> internalDownloadRecords, List<CrawlerRequestRecord> links) {
        Map<String, Object> extras = crawlerRecord.getHttpRequest().getExtras();
        for (CrawlerRequestRecord internalDownloadRecord : internalDownloadRecords) {
            HttpRequest internalDownloadRecordHttpRequest = internalDownloadRecord.getHttpRequest();
            String internalRequestUrl = internalDownloadRecordHttpRequest.getUrl();
            HttpPage downloadPage = internalDownloadRecord.getInternalDownloadPage();
            if (downloadPage.isDownloadSuccess()){
                if (internalRequestUrl.matches(parseFontRegex)){
                    JSONObject pageObj = JSONObject.parseObject(downloadPage.getRawText());
                    if (pageObj.getIntValue("status") == 0){
                        JSONObject fonts = pageObj.getJSONObject("content");
                        extras.put("fonts",fonts);
                    }else {
                        crawlerRecord.setNeedWashPage(false);
                        crawlerRecord.setNeedParsedPage(false);
                        CrawlerRequestRecord newRecord = CrawlerRequestRecord.builder()
                                .itemPageRequest(crawlerRecord)
                                .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                                .recordKey(crawlerRecord.getHttpRequest().getUrl())
                                .releaseTime(crawlerRecord.getReleaseTime())
                                .copyBizTags()
                                .needParsed(true)
                                .needWashed(true)
                                .notFilterRecord()
                                .build();
                        newRecord.getHttpRequest().setExtras(extras);
                        links.add(newRecord);
                    }
                }
                if (internalRequestUrl.matches(articlePCUrlRegex)){
                    List<String> targets = downloadPage.getHtml().xpath("//p[@class=\"obje\"]/text()").all();
                    extras.put("targets",targets);
                }
            }
            else {
                crawlerRecord.setNeedWashPage(false);
                crawlerRecord.setNeedParsedPage(false);
                CrawlerRequestRecord newRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRecord)
                        .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                        .recordKey(crawlerRecord.getHttpRequest().getUrl())
                        .releaseTime(crawlerRecord.getReleaseTime())
                        .copyBizTags()
                        .needParsed(true)
                        .needWashed(true)
                        .notFilterRecord()
                        .build();
                newRecord.getHttpRequest().setExtras(extras);
                links.add(newRecord);
            }
        }
    }


    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(article)){
            crawlerDataList.addAll(washArticle(crawlerRequestRecord,httpPage));

        }

        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
            crawlerDataList.addAll(washInteraction(crawlerRequestRecord,httpPage));
        }

        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(comment)){
            crawlerDataList.addAll(washComment(crawlerRequestRecord,httpPage));
        }

        return crawlerDataList;
    }

    public List<CrawlerData> washArticle(CrawlerRequestRecord crawlerRequestRecord,HttpPage httpPage) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        List<String> contents = castList(extras.get("articleContents"),String.class);
        StringBuffer content = new StringBuffer();
        JSONObject fonts = (JSONObject) extras.get("fonts");
        decodeTTFText(contents, content, fonts);
        JSONObject itemObj = (JSONObject)extras.get("itemObj");
        String author = itemObj.getString("nickName");
        String authorId = itemObj.getString("userid");
        String title = itemObj.getString("feeling_summary");
        String model = itemObj.getString("specName");
        String contentId = (String) extras.get("contentId");
        String timeBuy = itemObj.getString("bought_date");
        String address = itemObj.getString("boughtCityName");
        String buyPrice = String.valueOf(itemObj.getDouble("price") * 10000);
        String fuelEconomy = String.valueOf(itemObj.getDouble("actual_oil_consumption")) + "L/100km";
        String driveDistance = itemObj.getIntValue("driven_kilometers") + "km";

        JSONArray photos = itemObj.getJSONArray("photos");
        StringBuffer images = new StringBuffer();
        for (Object photo : photos) {
            images = images.append(photo).append("\\0xa");
        }

        String specId = itemObj.getString("specid");
        String modelUrl = String.format(modelUrlFormat,specId);

        String vendor = httpPage.getHtml().xpath("//section[@class=\"final-header\"]/div/div[@class=\"final-header-info-car\"]/div[1]/div[2]/span/text()").get();

        String province = vendor != null && vendor.contains("|") ? vendor.split("\\|")[0] : "";
        Map<String,String> valMap = new HashMap<>(8);
        valMap.put("空间",itemObj.getString("space"));
        valMap.put("动力",itemObj.getString("power"));
        valMap.put("操控",itemObj.getString("maneuverability"));
        valMap.put("油耗",itemObj.getString("consumption"));
        valMap.put("舒适性",itemObj.getString("comfortableness"));
        valMap.put("外观",itemObj.getString("apperance"));
        valMap.put("内饰",itemObj.getString("interior"));
        valMap.put("性价比",itemObj.getString("cost_efficient"));
        String values = "";
        for (Map.Entry<String, String> entry : valMap.entrySet()) {
            values = String.format("%s,%s:%s",values,entry.getKey(),entry.getValue());
        }
        values = values.substring(1);
        String praiseTime = itemObj.getString("created");

        List<String> targets = castList(extras.get("targets"),String.class);
        String target = JSON.toJSONString(targets);

        try {
            String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");

            CrawlerData crawlerArticleData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), contentId))
                    .url(httpRequest.getUrl())
                    .releaseTime(DateUtils.parseDate(praiseTime,"yyyy-MM-dd HH:mm:ss").getTime())
                    .addContentKV(Field_Author, author)
                    .addContentKV(Field_Author_Id, authorId)
                    .addContentKV(Field_Title, title)
                    .addContentKV(Field_Content,content.toString())
                    .addContentKV(Field_Praise_Time_Buy,timeBuy)
                    .addContentKV(Field_Praise_Address_Buy,address)
                    .addContentKV(Field_Praise_Price_Buy,buyPrice)
                    .addContentKV(Field_Praise_Fuel_Economy,fuelEconomy)
                    .addContentKV(Field_Praise_Drive_Distance,driveDistance)
                    .addContentKV(Field_Praise_Target,target)
                    .addContentKV(Field_Praise_Vendor,vendor)
                    .addContentKV(Field_Praise_values,values)
                    .addContentKV(Field_Images,images.toString())
                    .resultLabelTag(article)
                    .build();
            crawlerArticleData.tagsCreator().bizTags().addSiteBiz("praise");
            Map<String, String> modelMap = new HashMap<>();
            modelMap.put("model_name",model);
            modelMap.put("model_url",modelUrl);
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Car_Model,modelMap);
            Map<String, String> addr = new HashMap<>();
            addr.put("province",province);
            addr.put("city",address);
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Addr_Info,addr);

            //根据调度标签放入kv_content
            CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().scheduleTags().getCategoryTag();
            if (categoryTag.isContainKVTag(IS_OUTPUT_KV_CONTENT)){
                Map<String,String> kvContentMap = new HashMap<>();
                Map<String, List<String>> kvContentListMap = castListMap(extras.get("kvContentListMap"), String.class);
                Set<Map.Entry<String, List<String>>> entries = kvContentListMap.entrySet();
                String flagTag = "";
                for (Map.Entry<String, List<String>> entry : entries) {
                    String contentTitle = entry.getKey();
                    List<String> contentValues = entry.getValue();
                    StringBuffer contentValue = new StringBuffer();
                    decodeTTFText(contentValues,contentValue,fonts);
                    if (contentTagList.contains(contentTitle)){
                        flagTag = contentTitle;
                        kvContentMap.put(contentTitle,contentValue.toString());
                    }else {
                        String flagTagVal = kvContentMap.get(flagTag);
                        kvContentMap.put(flagTag, flagTagVal + contentTitle + "：" + contentValue);
                    }
                }
                crawlerArticleData.tagsCreator().bizTags().addCustomKV("kv_content",kvContentMap);
            }
            crawlerArticleDataList.add(crawlerArticleData);

        } catch (ParseException e) {
            logger.error(e.getMessage(), "parse date error");
        }
        int append_count = itemObj.getIntValue("append_count");
        if (append_count > 0){
            JSONArray appendList = itemObj.getJSONArray("append_list");
            for (Object append : appendList) {
                JSONObject appendObj = (JSONObject)append;
                String appendTime = appendObj.getString("append_created");
                String appendTitle = String.format("%s%s",title,"---追加");
                String appendId = appendObj.getString("append_id");
                String drivenKilometers = appendObj.getString("driven_kilometers");
                String appendFeeling = appendObj.getString("append_feeling");
                String oil_consumption = appendObj.getString("oil_consumption") + "L/100km";

                try {
                    String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
                    CrawlerData crawlerArticleData = CrawlerData.builder()
                            .data(crawlerRequestRecord, httpPage)
                            .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), appendId))
                            .url(httpRequest.getUrl())
                            .releaseTime(DateUtils.parseDate(appendTime,"yyyy-MM-dd HH:mm:ss").getTime())
                            .addContentKV(Field_Author, author)
                            .addContentKV(Field_Author_Id, authorId)
                            .addContentKV(Field_Title, appendTitle)
                            .addContentKV(Field_Content,appendFeeling)
                            .addContentKV(Field_Praise_Time_Buy,timeBuy)
                            .addContentKV(Field_Praise_Address_Buy,address)
                            .addContentKV(Field_Praise_Price_Buy,buyPrice)
                            .addContentKV(Field_Praise_Fuel_Economy,oil_consumption)
                            .addContentKV(Field_Praise_Drive_Distance,drivenKilometers)
                            .addContentKV(Field_Praise_Target,target)
                            .addContentKV(Field_Praise_Vendor,vendor)
                            .addContentKV(Field_Praise_values,values)
                            .resultLabelTag(article)
                            .build();
                    crawlerArticleData.tagsCreator().bizTags().addSiteBiz("praise");
                    Map<String, String> modelMap = new HashMap<>();
                    modelMap.put("model_name",model);
                    modelMap.put("model_url",modelUrl);
                    crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Car_Model,modelMap);
                    Map<String, String> addr = new HashMap<>();
                    addr.put("province",province);
                    addr.put("city",address);
                    crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Addr_Info,addr);
                    crawlerArticleDataList.add(crawlerArticleData);
                } catch (ParseException e) {
                    logger.error(e.getMessage(), "parse date error");
                }

            }

        }
        return crawlerArticleDataList;
    }

    private void decodeTTFText(List<String> contents, StringBuffer content, JSONObject fonts) {
        for (String text : contents) {
            text = text.trim();
            int[] codes = StringUtils.toCodePoints(text);
            //只有一个字符
            if (codes.length == 1){
                char[] chars = Character.toChars(codes[0]);
                //判断是否是正常中文字符
                if (!isChinese(chars[0])){
                    String code = Integer.toHexString(codes[0]).toUpperCase();
                    JSONArray fontsArray = fonts.getJSONArray(code);
                    //判断是否存在该字符的中文解密
                    if (null != fontsArray && fontsArray.size() > 0){
                        JSONObject fontObj = fontsArray.getJSONObject(0);
                        String str = fontObj.getString("textCode");
                        content.append(str);
                        continue;
                    }
                    content.append(text);
                    continue;
                }
                content.append(text);
                continue;
            }
            content.append(text);
        }
    }

    public List<CrawlerData> washInteraction(CrawlerRequestRecord crawlerRequestRecord,HttpPage httpPage) {
        List<CrawlerData> crawlerInteractionDataList = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = lastRequest.getUrl();
        Map<String, Object> extras = lastRequest.getExtras();
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");

        // 判断链接区分
        if (lastRequestUrl.matches(articleUrlRegex)){
            JSONObject itemObj = (JSONObject)extras.get("itemObj");
            String contentId = (String) extras.get("contentId");
            String comments = itemObj.getString("commentCount");
            String views = itemObj.getString("visitCount");
            String likes = itemObj.getString("helpfulCount");
            String pubTime = itemObj.getString("created");
            try {

                CrawlerData crawlerData = CrawlerData.builder()
                        .data(crawlerRequestRecord, httpPage)
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, interaction.enumVal(), contentId))
                        .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), contentId))
                        .url(lastRequest.getUrl())
                        .releaseTime(DateUtils.parseDate(pubTime,"yyyy-MM-dd HH:mm:ss").getTime())
                        .addContentKV(Field_I_Comments,comments)
                        .addContentKV(Field_I_Views,views)
                        .addContentKV(Field_I_Likes,likes)
                        .resultLabelTag(interaction)
                        .build();
                crawlerData.tagsCreator().bizTags().addSiteBiz("praise");
                crawlerInteractionDataList.add(crawlerData);
            } catch (ParseException e) {
                logger.error(e.getMessage(),"parse date error");
            }
        }
        if (lastRequestUrl.matches(commentUrlRegex)){
            JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
            JSONArray commentList = jsonObject.getJSONObject("result").getJSONArray("list");
            for (Object o : commentList) {
                JSONObject commentJson = (JSONObject) o;
                String commentId = commentJson.getString("replyid");
                String likes = commentJson.getString("rup");
                String pubTime = commentJson.getString("rreplydate");
                pubTime = pubTime.split("\\(")[1];
                if (pubTime.contains("+")){
                    pubTime = pubTime.split("\\+")[0];
                }
                if (pubTime.contains("-")){
                    pubTime = pubTime.split("-")[0];
                }
                try {
                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRequestRecord, httpPage)
                            .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, interaction.enumVal(), commentId))
                            .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                            .url((String) extras.get("articleUrl"))
                            .releaseTime(new Long(pubTime))
                            .addContentKV(Field_I_Likes,likes)
                            .resultLabelTag(interaction)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .build();
                    crawlerData.tagsCreator().bizTags().addSiteBiz("praise");
                    crawlerInteractionDataList.add(crawlerData);
                } catch (Exception e) {
                    logger.error(e.getMessage(),"parse date error");
                }
            }
        }
        return crawlerInteractionDataList;
    }

    public List<CrawlerData> washComment(CrawlerRequestRecord crawlerRequestRecord,HttpPage httpPage) {
        List<CrawlerData> crawlerCommentDataList = new ArrayList<>();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
        JSONArray commentList = jsonObject.getJSONObject("result").getJSONArray("list");
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");

        for (Object o : commentList) {
            JSONObject commentJson = (JSONObject)o;
            String commentId = commentJson.getString("replyid");
            String author = commentJson.getString("rmembername");
            String authorId = commentJson.getString("rmemberid");
            String content = commentJson.getString("rcontent");
            String floor = commentJson.getString("rfloor");
            String pubTime = commentJson.getString("rreplydate");
            pubTime = pubTime.split("\\(")[1];
            if (pubTime.contains("+")){
                pubTime = pubTime.split("\\+")[0];
            }
            if (pubTime.contains("-")){
                pubTime = pubTime.split("-")[0];
            }

            try{
                CrawlerData crawlerCommentData = CrawlerData.builder()
                        .data(crawlerRequestRecord, httpPage)
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                        .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), extras.get("contentId")))
                        .url((String) extras.get("articleUrl"))
                        .releaseTime(new Long(pubTime))
                        .addContentKV(Field_Author,author)
                        .addContentKV(Field_Author_Id,authorId)
                        .addContentKV(Field_Content,content)
                        .addContentKV(Field_Floor,floor)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .resultLabelTag(comment)
                        .build();
                crawlerCommentData.tagsCreator().bizTags().addSiteBiz("praise");
                crawlerCommentDataList.add(crawlerCommentData);
            } catch (Exception e) {
                logger.error(e.getMessage(),"parse date error");
            }
        }

        return crawlerCommentDataList;
    }

    @Override
    public void afterExecute(CrawlerRecordContext crawlerRecordContext) {

    }

    public static <T> List<T> castList(Object obj, Class<T> clazz){
        List<T> result = new ArrayList<T>();
        if(obj instanceof List<?>)
        {
            for (Object o : (List<?>) obj)
            {
                result.add(clazz.cast(o));
            }
            return result;
        }
        return null;
    }


    /**
     * 转换map到url参数字符串
     * @param source url参数的map
     * @return 拼接好的url参数字符串
     */
    public static String asUrlParams(Map<String, String> source){
        Iterator<String> it = source.keySet().iterator();
        StringBuilder paramStr = new StringBuilder();
        while (it.hasNext()){
            String key = it.next();
            String value = source.get(key);
            if (StringUtils.isBlank(value)){
                continue;
            }
            try {
                // URL 编码
                value = URLEncoder.encode(value, "utf-8");
            } catch (UnsupportedEncodingException e) {
                // do nothing
                logger.error("url encode error");
            }
            paramStr.append("&").append(key).append("=").append(value);
        }
        // 去掉第一个&
        return paramStr.substring(1);
    }

    /**
     * 将url参数转换成map
     * @param url http://*.*.com?aa=11&bb=22&cc=33
     * @return map
     */
    public static Map<String, Object> getUrlParams(String url) {
        Map<String, Object> map = new HashMap<String, Object>(0);
        String param = null;
        if (url.contains("?")){
            param = url.split("\\?")[1];
        }
        if (StringUtils.isBlank(param)) {
            return null;
        }
        String[] params = param.split("&");
        for (String s : params) {
            String[] p = s.split("=");
            if (p.length == 2) {
                map.put(p[0], p[1]);
            }
        }
        return map;
    }

    private long cleanTime(String timeStr,String patten) throws ParseException {
        if (timeStr.contains("发布")){
            timeStr = timeStr.split("：")[1];
        }
        if (timeStr.contains("刚刚")){
            return System.currentTimeMillis();
        }else if (timeStr.contains("前")){
            return timeBefore(timeStr);
        }else if (timeStr.contains("-")){
            return DateUtils.parseDate(timeStr,patten).getTime();
        }else {
            return 0;
        }

    }

    private long timeBefore(String timeStr) {
        if (timeStr.matches("\\d*天前")){
            int timeNum = Integer.parseInt(timeStr.split("天")[0]);
            return System.currentTimeMillis() - (timeNum * ONE_DAY);

        }else if (timeStr.matches("\\d*秒前")){
            int timeNum = Integer.parseInt(timeStr.split("秒")[0]);
            return System.currentTimeMillis() - (timeNum * ONE_SECOND);

        }else if (timeStr.matches("\\d*分钟前")){
            int timeNum = Integer.parseInt(timeStr.split("分钟")[0]);
            return System.currentTimeMillis() - (timeNum * ONE_MINUTE);

        }else if (timeStr.matches("\\d*分钟\\d*秒前")) {
            String[] split = timeStr.split("分钟");
            int minutes = Integer.parseInt(split[0]);
            int seconds = Integer.parseInt(split[1].split("秒")[0]);
            long times = (minutes * ONE_MINUTE) + (seconds * ONE_SECOND);
            return System.currentTimeMillis() - times;

        }else if (timeStr.matches("\\d*小时前")){
            int timeNum = Integer.parseInt(timeStr.split("小时")[0]);
            return System.currentTimeMillis() - (timeNum * ONE_HOUR);

        }else if (timeStr.matches("\\d*小时\\d*分钟前")){
            String[] split = timeStr.split("小时");
            int hours = Integer.parseInt(split[0]);
            int minutes = Integer.parseInt(split[1].split("分钟")[0]);
            long times = (hours * ONE_HOUR) + (minutes * ONE_MINUTE);
            return System.currentTimeMillis() - times;

        }else if (timeStr.matches("\\d*周前")){
            int timeNum = Integer.parseInt(timeStr.split("周")[0]);
            return System.currentTimeMillis() - (timeNum * ONE_DAY * 7);

        }else if (timeStr.matches("\\d*个月前")){
            int timeNum = Integer.parseInt(timeStr.split("个")[0]);
            return System.currentTimeMillis() - (timeNum * ONE_DAY * 31);
        }else {
            return 0;

        }
    }

    /***
     * 判断字符是否为中文
     * @param ch 需要判断的字符
     * @return 中文返回true，非中文返回false
     */
    private static boolean isChinese(char ch) {
        //获取此字符的UniCodeBlock
        Character.UnicodeBlock ub = Character.UnicodeBlock.of(ch);
        //  GENERAL_PUNCTUATION 判断中文的“号
        //  CJK_SYMBOLS_AND_PUNCTUATION 判断中文的。号
        //  HALFWIDTH_AND_FULLWIDTH_FORMS 判断中文的，号
        if (ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS
                || ub == Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS
                || ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A
                || ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B
                || ub == Character.UnicodeBlock.CJK_SYMBOLS_AND_PUNCTUATION         // 判断中文的。号
                || ub == Character.UnicodeBlock.HALFWIDTH_AND_FULLWIDTH_FORMS       // 判断中文的，号
                || ub == Character.UnicodeBlock.GENERAL_PUNCTUATION                 // 判断中文的“号
        ){
//            System.out.println(ch + " 是中文");
            return true;
        }
        return false;
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/536.6");
        agentList.add("Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/536.6");
        agentList.add("Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.1");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA(){
        return agentList.get(RandomUtils.nextInt(0,agentList.size() - 1));
    }

    public static Map<String, Object> copyExtras(Map<String,Object> inExtras){
        Map<String,Object> extras = new HashMap<>();
        for (Map.Entry<String, Object> entry : inExtras.entrySet()) {
            extras.put(entry.getKey(),entry.getValue());
        }
        return extras;
    }

    public static <T> Map<T,T> castMap(Object obj, Class<T> clazz){
        Map<T,T> result = new HashMap<>();
        if (obj instanceof Map<?,?>){
            Set<?> keySet = ((Map<?, ?>) obj).keySet();
            Set<? extends Map.Entry<?, ?>> entries = ((Map<?, ?>) obj).entrySet();
            for (Map.Entry<?, ?> entry : entries) {
                result.put(clazz.cast(entry.getKey()),clazz.cast(entry.getValue()));
            }
            return result;
        }
        return null;
    }

    public static <T> Map<T,List<T>> castListMap(Object obj, Class<T> clazz){
        Map<T,List<T>> result = new LinkedHashMap<>();
        if (obj instanceof Map<?,?>){
            Set<?> keySet = ((Map<?, ?>) obj).keySet();
            Set<? extends Map.Entry<?, ?>> entries = ((Map<?, ?>) obj).entrySet();
            for (Map.Entry<?, ?> entry : entries) {
                result.put(clazz.cast(entry.getKey()),List.class.cast(entry.getValue()));
            }
            return result;
        }
        return null;
    }

    @Test
    public void testCastMap(){
        Map<String,List<String>> oldMap = new HashMap<>();
        List<String> list = new ArrayList<>();
        list.add("a");
        list.add("2");
        list.add("b");
        oldMap.put("one",list);
        oldMap.put("two",list);

        Object object = (Object)oldMap;

        Map<String, List<String>> stringListMap = castListMap(object, String.class);
        System.out.println(stringListMap);
    }

}
