package com.chance.cc.crawler.development.scripts.douban;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.core.tags.crawler.CrawlerResultTags;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.text.ParseException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Tag_Field_Topic_Type;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2021/3/30 15:43
 * @Description 豆瓣关键词采集
 **/
public class DobanCrawlerScript extends CrawlerCommonScript {

    private static Logger log = LoggerFactory.getLogger(DobanCrawlerScript.class);
    private static final String DOMAIN = "douban";
    private static final String SITE = "searchKw";
    private static final String REQUEST_AGAIN_TAG = DOMAIN + "_request_again";
    private static final String SEARCH_KW_SOURCE_URL = "searchKwSourceUrl";

    private static final String ENTRANCE_URL = "https://www.douban.com/search";
    private static final String SEARCH_URL = "https://www.douban.com/j/search\\S*";
    private static final String ITEM_URL = "http[s]*://www.douban.com/[a-z]*/\\d+/";
    private static final String COMMENT_URL = "http[s]*://www.douban.com/[a-z]*/\\d+/\\?start=\\d+";
    private static final String COMMENT_REPLY_SOURCE_URL = "https://www.douban.com/j/note/comment/%s/replies?count=50&start=%s";
    private static final String COMMENT_REPLY_URL = "https://www.douban.com/j/note/comment\\S*";

    /**
     * 脚本domain定义
     *
     * @return
     */
    @Override
    public String domain() {
        return DOMAIN;
    }

    /**
     * 进入脚本的正则列表
     */
    @Override
    public void initUrlRegulars() {
        addUrlRegular(ENTRANCE_URL);
        addUrlRegular(SEARCH_URL);
        addUrlRegular(ITEM_URL);
        addUrlRegular(COMMENT_URL);
        addUrlRegular(COMMENT_REPLY_URL);
    }

    /**
     * 是否执行脚本 ： 输入数据检查，合格的才进入脚本
     *
     * @param crawlerRequestRecord
     * @return
     */
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        String siteTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        return siteTag.equals(SITE);
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> crawlerRecords = new ArrayList<>();
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            String url = supportSourceRecord.getHttpRequest().getUrl();
            if (url.contains("keys")) {
                initKeyword(requestRecord, supportSourceRecord, crawlerRecords);
            }
        }

        requestRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
        return crawlerRecords;
    }

    /**
     * 解析链接方法
     *
     * @param crawlerRequestRecord
     * @param httpPage
     * @return
     */
    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<CrawlerRequestRecord>();
        String requestUrl = crawlerRequestRecord.getHttpRequest().getUrl();

        if (!httpPage.isDownloadSuccess() || httpPage.getStatusCode() != 200) {
            if(httpPage.getStatusCode() == 403 || httpPage.getStatusCode() == 404){
                HttpConfig httpConfig = crawlerRequestRecord.getHttpConfig();
                String httpSite = httpConfig.getHttpSite();
                httpConfig.setHttpSite(httpSite + "1");
            }
            log.error("{} status code : [{}]!will retry!",requestUrl,httpPage.getStatusCode());
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }

        if (requestUrl.matches(SEARCH_URL)) {
            searchUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }

        if (requestUrl.matches(ITEM_URL)) {
            itemUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }

        if (requestUrl.matches(COMMENT_URL)) {
            commentUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }

        if (requestUrl.matches(COMMENT_REPLY_URL)) {
            commentReplyUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }

        return parsedLinks;
    }

    private void searchUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        try {
            httpPage.getJson().jsonPath($_type + ".items").all();
        } catch (Exception e) {
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return;
        }

        String keyword = (String) httpPage.getRequest().getExtras().get("keyword");
        String requestUrl = httpPage.getRequest().getUrl();
        if(requestUrl.contains("start=1")){
            String total = httpPage.getJson().jsonPath($_type + ".total").get();if(total.equals("0")){
                log.error(keyword + " search result is 0");
                return;
            }
        }
        String[] split = requestUrl.split("\\?");
        String nextUrl = split[0] + "?";
        List<NameValuePair> parse = URLEncodedUtils.parse(split[1], Charset.defaultCharset());
        for (NameValuePair nameValuePair : parse) {
            String name = nameValuePair.getName();
            String value = nameValuePair.getValue();
            if ("start".equals(name)) {
                nextUrl = nextUrl + name + "=" + (Integer.parseInt(value) + 20) + "&";
            } else {
                try {
                    nextUrl = nextUrl + name + "=" + URLEncoder.encode(value, "UTF-8") + "&";
                } catch (UnsupportedEncodingException e) {
                    log.error(e.getMessage());
                }
            }
        }
        String hasMore = httpPage.getJson().jsonPath($_type + ".more").get();
        if("true".equals(hasMore)){
            nextUrl = nextUrl.substring(0, nextUrl.length() - 1);
            CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(nextUrl)
                    .releaseTime(System.currentTimeMillis())
                    .httpHeads(crawlerRequestRecord.getHttpRequest().getHeaders())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            requestRecord.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
            parsedLinks.add(requestRecord);
        }

        List<String> all = httpPage.getJson().jsonPath($_type + ".items").all();
        for (String data : all) {
            Html html = new Html(data);
            String itemUrl = html.xpath("//a/@href").get();
            if (StringUtils.isBlank(itemUrl)) {
                continue;
            } else {
                try {
                    itemUrl = URLDecoder.decode(itemUrl, "UTF-8");
                } catch (UnsupportedEncodingException e) {
                    log.error(e.getMessage());
                }
                itemUrl = itemUrl.substring(itemUrl.indexOf("url=") + 4, itemUrl.indexOf("&"));
            }
            String[] split1 = itemUrl.split(":");
            if ("http".equals(split1[0])) {
                itemUrl = "https:" + split1[1];
            }

            CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRequestRecord)
                    .httpUrl(itemUrl)
                    .recordKey(itemUrl + keyword)
                    .releaseTime(System.currentTimeMillis())
                    .httpHeads(crawlerRequestRecord.getHttpRequest().getHeaders())
                    .copyBizTags()
                    .copyResultTags()
                    .needWashed(false)
                    .build();
            itemRecord.setTurnPageFilterInfo(null);
            parsedLinks.add(itemRecord);
        }
    }

    private void itemUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String requestUrl = httpPage.getRequest().getUrl();
        CrawlerResultTags resultTags = crawlerRequestRecord.tagsCreator().resultTags();
        String comments = washContent("'total': \\d+", httpPage.getRawText());
        if (StringUtils.isNotBlank(comments)) {
            String[] split = comments.split(":");
            comments = split[1].trim();
        } else {
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return;
        }

        //判断是否进行采集文章
        String releaseTime = httpPage.getHtml().xpath("//span[@class=\"pub-date\"]/text()").get();
        long date = 0;
        try {
            date = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime();
        } catch (ParseException e) {
            log.error(e.getMessage());
            crawlerRequestRecord.setNeedWashPage(false);
        }
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis();
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && date != 0 && startTime <= date && date <= endTime){
                crawlerRequestRecord.setNeedWashPage(true);
            }
        }else{
            crawlerRequestRecord.setNeedWashPage(true);
        }

        boolean needWashPage = crawlerRequestRecord.isNeedWashPage();
        if(!needWashPage){
            return;
        }

        crawlerRequestRecord.setReleaseTime(date);
        if (resultTags.hasDataType(comment)) {
            resultTags.getCategoryTag().removeLabelTag("comment");

            CrawlerRequestRecord filterRecord = resultTags.getCategoryTag().getKVTagObjVal("comment_filter_record", CrawlerRequestRecord.class);
            if (filterRecord == null) {
                log.error("filter record can not null !");
                return;
            }

            //评论数要大于0 顺序排列
            int i = Integer.parseInt(comments.trim());
            if (i > 0) {
                int start = i / 100;
                String url = requestUrl + "?start=" + (start * 100);
                CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(url)
                        .releaseTime(System.currentTimeMillis())
                        .httpHeads(crawlerRequestRecord.getHttpRequest().getHeaders())
                        .notFilterRecord()
                        .copyBizTags()
                        .resultLabelTag(comment)
                        .build();
                commentRecord.setFilter(filterRecord.getFilter());
                commentRecord.setFilterInfos(filterRecord.getFilterInfos());
                parsedLinks.add(commentRecord);
            }
        }
    }

    private void commentUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String requestUrl = httpPage.getRequest().getUrl();
        //翻页
        String commentText = washContent("'comments': \\[[\\S\\ ]*\\]", httpPage.getRawText());
        JSONArray commentList = null;
        try {
            commentList = JSONArray.parseArray(commentText.substring(commentText.indexOf(":") + 1).trim());
        } catch (Exception e) {
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return;
        }

        Map<String, Object> extras = new HashMap<>();
        extras.put("commentList", commentList);
        crawlerRequestRecord.getHttpRequest().setExtras(extras);

        String[] split = requestUrl.split("=");
        int currentComment = Integer.parseInt(split[1]);
        String nextCommentUrl = split[0] + "=" + (currentComment - 100);
        if (currentComment > 0) {
            CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(nextCommentUrl)
                    .releaseTime(System.currentTimeMillis())
                    .httpHeads(crawlerRequestRecord.getHttpRequest().getHeaders())
                    .copyBizTags()
                    .needWashed(true)
                    .copyResultTags()
                    .build();
            parsedLinks.add(commentRecord);
        }

        //评论的回复
        if (commentList != null && commentList.size() > 0) {
            for (Object data : commentList) {
                JSONObject jsonObject = (JSONObject) data;
                JSONArray replies = jsonObject.getJSONArray("replies");
                String id = jsonObject.getString("id");
                if (replies != null && replies.size() > 0) {
                    int start = replies.size() / 50;
                    String url = String.format(COMMENT_REPLY_SOURCE_URL, id, (start * 50));
                    CrawlerRequestRecord replyRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(crawlerRequestRecord)
                            .httpUrl(url)
                            .releaseTime(System.currentTimeMillis())
                            .httpHeads(crawlerRequestRecord.getHttpRequest().getHeaders())
                            .notFilterRecord()
                            .copyBizTags()
                            .needWashed(true)
                            .copyResultTags()
                            .build();
                    replyRecord.getHttpRequest().addExtra("commentUrl", requestUrl);
                    parsedLinks.add(replyRecord);
                }
            }
        }
    }

    private void commentReplyUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        try {
            httpPage.getJson().jsonPath($_type + ".replies").get();
        } catch (Exception e) {
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return;
        }

        //翻页
        String requestUrl = httpPage.getRequest().getUrl();
        String[] split = requestUrl.split("\\?");
        String nextUrl = split[0] + "?";
        List<NameValuePair> parse = URLEncodedUtils.parse(split[1], Charset.defaultCharset());
        for (NameValuePair nameValuePair : parse) {
            String name = nameValuePair.getName();
            String value = nameValuePair.getValue();
            if ("start".equals(name)) {
                int i = Integer.parseInt(value);
                if (i <= 0) {
                    return;
                }
                nextUrl = nextUrl + name + "=" + (Integer.parseInt(value) - 50) + "&";
            } else {
                nextUrl = nextUrl + name + "=" + value + "&";
            }
        }
        nextUrl = nextUrl.substring(0, nextUrl.length() - 1);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(nextUrl)
                .releaseTime(System.currentTimeMillis())
                .httpHeads(crawlerRequestRecord.getHttpRequest().getHeaders())
                .copyBizTags()
                .needWashed(true)
                .copyResultTags()
                .build();
        record.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
        parsedLinks.add(record);
    }


    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        CrawlerResultTags crawlerResultTags = crawlerRecord.tagsCreator().resultTags();

        if (crawlerResultTags.hasDataType(article)) {
            crawlerDataList.add(washArticle(crawlerRecord, page));
        }

        if (crawlerResultTags.hasDataType(interaction)) {
            crawlerDataList.add(washInteraction(crawlerRecord, page));
        }

        if (crawlerResultTags.hasDataType(comment)) {
            crawlerDataList.addAll(washComment(crawlerRecord, page));
        }

        return crawlerDataList;
    }

    private CrawlerData washArticle(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        String itemUrl = httpPage.getRequest().getUrl();
        String[] split1 = itemUrl.split("/");
        String articleKey = split1[split1.length - 1];

        String title = httpPage.getHtml().xpath("//h1/text()").get();
        String author = httpPage.getHtml().xpath("//a[@class=\"note-author\"]/text()").get();
        String authorId = httpPage.getHtml().xpath("//a[@class=\"note-author\"]/@href").get();
        if (StringUtils.isNotBlank(authorId)) {
            String[] split = authorId.split("/");
            authorId = split[split.length - 1];
        }
        String releaseTime = httpPage.getHtml().xpath("//span[@class=\"pub-date\"]/text()").get();
        List<String> all = httpPage.getHtml().xpath("//div[@class=\"note\"]//text()").all();
        StringBuffer conents = new StringBuffer();
        for (String articleText : all) {
            conents.append(articleText).append(" ");
        }
        List<String> tagList = httpPage.getHtml().xpath("//div[@class=\"mod-tags\"]/a/text()").all();
        if (tagList != null && tagList.size() > 0) {
            crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Topic_Type, tagList);
        }

        CrawlerData crawlerData = null;
        try {
            crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("article"))
                    .url(itemUrl)
                    .releaseTime(DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime())
                    .addContentKV(Field_Content, conents.toString())
                    .addContentKV(Field_Title, title)
                    .addContentKV(Field_Author, author)
                    .addContentKV(Field_Author_Id, authorId)
                    .build();
        } catch (ParseException e) {
            log.error(e.getMessage());
        }
        return crawlerData;
    }

    private CrawlerData washInteraction(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        String itemUrl = httpPage.getRequest().getUrl();
        String[] split1 = itemUrl.split("/");
        String articleKey = split1[split1.length - 1];

        String releaseTime = httpPage.getHtml().xpath("//span[@class=\"pub-date\"]/text()").get();
        String likes = httpPage.getHtml().xpath("//span[text()='赞']/following-sibling::span/text()").get();
        likes = StringUtils.isNotBlank(likes) ? likes : "0";
        String collections = httpPage.getHtml().xpath("//span[text()='收藏']/following-sibling::span/text()").get();
        collections = StringUtils.isNotBlank(collections) ? collections : "0";
        String forwards = httpPage.getHtml().xpath("//div[@class=\"sharing-douban\"]//span[@class=\"rec-num\"]/text()").get();
        forwards = StringUtils.isNotBlank(forwards) ? forwards : "0";
        String comments = washContent("'total': \\d+", httpPage.getRawText());
        if (StringUtils.isNotBlank(comments)) {
            String[] split = comments.split(":");
            comments = split[1].trim();
        } else {
            comments = "0";
        }

        CrawlerData crawlerData = null;
        try {
            crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), interaction.enumVal(), articleKey))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("interaction"))
                    .url(itemUrl)
                    .releaseTime(DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime())
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .addContentKV(Field_I_Comments, comments)
                    .addContentKV(Field_I_Forwards, forwards)
                    .addContentKV(Field_I_Likes, likes)
                    .addContentKV(Field_I_Collection, collections)
                    .build();
        } catch (ParseException e) {
            log.error(e.getMessage());
        }
        return crawlerData;
    }

    private List<CrawlerData> washComment(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        String requestUrl = httpPage.getRequest().getUrl();
        String url = requestUrl;
        JSONArray commentList = null;
        if (requestUrl.matches(COMMENT_REPLY_URL)) {
            url = (String) httpPage.getRequest().getExtras().get("commentUrl");
            commentList = JSONArray.parseArray("[" + httpPage.getJson().jsonPath($_type + ".replies").get() + "]");
        } else {
            commentList = (JSONArray) httpPage.getRequest().getExtras().get("commentList");
        }
        String[] split = url.split("/");
        String articleKey = split[split.length - 2];

        for (Object data : commentList) {
            JSONObject jsonObject = (JSONObject) data;
            String commentId = jsonObject.getString("id");
            JSONObject author = jsonObject.getJSONObject("author");
            String name = author.getString("name");
            String authorId = author.getString("id");
            String releaseTime = jsonObject.getString("create_time");
            String content = jsonObject.getString("text");
            String comments = jsonObject.getString("total_replies");

            try {
                CrawlerData crawlerData = CrawlerData.builder()
                        .data(crawlerRequestRecord, httpPage)
                        .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), comment.enumVal(), commentId))
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("comment"))
                        .releaseTime(DateUtils.parseDate(releaseTime.trim(), "yyyy-MM-dd HH:mm:ss").getTime())
                        .url(requestUrl)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .addContentKV(Field_Author, name)
                        .addContentKV(Field_Author_Id, authorId)
                        .addContentKV(Field_Content, content)
                        .addContentKV(Field_I_Comments, comments)
                        .build();
                crawlerDataList.add(crawlerData);
            } catch (ParseException e) {
                log.error(e.getMessage());
            }
        }
        return crawlerDataList;
    }

    private static String washNum(String text) {
        if (StringUtils.isBlank(text)) {
            return "0";
        }

        text = text.toLowerCase();
        if (text.contains("万")) {
            String[] split = text.split("万");
            String num = String.valueOf(Double.parseDouble(split[0].trim()) * 10000);
            return num.split("\\.")[0];
        }

        return text;
    }

    private static String washContent(String regx, String input) {
        Pattern compile = Pattern.compile(regx);
        Matcher matcher = compile.matcher(input);
        while (matcher.find()) {
            return matcher.group(0);
        }
        return null;
    }

    @Override
    public void afterExecute(CrawlerRecordContext crawlerRecordContext) {

    }

    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 5) {
                log.error(DOMAIN + " download page the number of retries exceeds the limit" +
                        ",request url {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        CrawlerRequestRecord crawlerRequestRecord = null;
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }

        if (crawlerRequestRecord == null) {
            return;
        }

        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    private void initKeyword(CrawlerRequestRecord requestRecord, CrawlerRequestRecord supportSourceRecord, List<CrawlerRecord> crawlerRecords) {
        HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
        Map<String, Object> extras = requestRecord.getHttpRequest().getExtras();
        String searchSourceUrl = (String) extras.get(SEARCH_KW_SOURCE_URL);
        if (StringUtils.isBlank(searchSourceUrl)) {
            log.error("search kw source url can not null!");
            return;
        }
        Json json = internalDownloadPage.getJson();
        String msg = json.jsonPath($_type + ".msg").get();
        if (!"success".equals(msg)) {
            log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
            return;
        }

        List<String> all = json.jsonPath($_type + ".content").all();
        for (String data : all) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            String keyword = jsonObject.getString("keyword");

            try {
                String url = String.format(searchSourceUrl, URLEncoder.encode(keyword, "UTF-8"));
                CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(requestRecord)
                        .httpUrl(url)
                        .releaseTime(System.currentTimeMillis())
                        .httpHeads(requestRecord.getHttpRequest().getHeaders())
                        .notFilterRecord()
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                turnRecord.tagsCreator().bizTags().addKeywords(keyword);
                turnRecord.getHttpRequest().addExtra("keyword",keyword);
                crawlerRecords.add(turnRecord);
            } catch (UnsupportedEncodingException e) {
                log.error(e.getMessage());
            }
        }
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA(){
        return agentList.get(RandomUtils.nextInt(0,agentList.size() - 1));
    }


    public static void main(String[] args) {
        int m = 131;
        int c = (m / 100) * 100;
        System.out.println(c);
    }
}
