package com.chance.cc.crawler.development.scripts.cnhubei;

import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpConstant;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.downloader.HttpRequestBody;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.core.tags.crawler.CrawlerResultTags;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Tag_Field_Topic_Type;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2021/3/10 14:02
 * @Description
 *      东湖社区 关键词采集脚本
 **/
public class CnHubeiCrawlerScript extends CrawlerCommonScript {

    private static Logger log = LoggerFactory.getLogger(CnHubeiCrawlerScript.class);
    private static final String DOMAIN = "cnhubei";
    private static final String SITE = "searchKw";
    private static final String REQUEST_AGAIN_TAG = DOMAIN + "_request_again";
    private static final String SEARCH_KW_SOURCE_URL = "searchKwSourceUrl";
    private static final String HTTP_BODY = "httpBody";

    private static final String PRIFIX = "http://bbs.cnhubei.com/";
    private static final String ENTRANCE_URL = "http://bbs.cnhubei.com/search.php\\?mod=[a-z]*";
    private static final String SEARCH_URL = "http://bbs.cnhubei.com/search.php\\?mod=[a-z]*&searchid=\\d+&orderby=[a-z]+&ascdesc=[a-z]*&searchsubmit=yes&kw=\\S*";
    private static final String SEARCH_TURN_URL = "http://bbs.cnhubei.com/search.php\\?mod=[a-z]*&searchid=\\d+&orderby=[a-z]*&ascdesc=[a-z]*&searchsubmit=yes&page=\\d+";
    private static final String ITEM_URL = "http://bbs.cnhubei.com/forum.php\\?mod=viewthread&tid=\\d+&highlight=[a-zA-Z0-9%]+";
    private static final String COMMENT_URL = "http://bbs.cnhubei.com/forum.php\\?mod=viewthread&tid=\\d+[&extra=]*&highlight=\\S*#comment";

    /**
     * 脚本domain定义
     *
     * @return
     */
    @Override
    public String domain() {
        return DOMAIN;
    }

    /**
     * 进入脚本的正则列表
     */
    @Override
    public void initUrlRegulars() {
        addUrlRegular(ENTRANCE_URL);
        addUrlRegular(SEARCH_URL);
        addUrlRegular(SEARCH_TURN_URL);
        addUrlRegular(ITEM_URL);
        addUrlRegular(COMMENT_URL);
    }

    /**
     * 是否执行脚本 ： 输入数据检查，合格的才进入脚本
     *
     * @param crawlerRequestRecord
     * @return
     */
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        String siteTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        return siteTag.equals(SITE);
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> crawlerRecords = new ArrayList<>();
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            String url = supportSourceRecord.getHttpRequest().getUrl();
            if (url.contains("keys")) {
                initKeyword(requestRecord, supportSourceRecord, crawlerRecords);
            }
        }

        if(requestRecord.getHttpRequest().getUrl().matches(ENTRANCE_URL)){
            try {
                Thread.sleep(20000);
            } catch (InterruptedException e) {
                log.error(e.getMessage());
            }
        }
        requestRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
        return crawlerRecords;
    }

    /**
     * 解析链接方法
     *
     * @param crawlerRequestRecord
     * @param httpPage
     * @return
     */
    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<CrawlerRequestRecord>();

        if (!httpPage.isDownloadSuccess() || (httpPage.getStatusCode() != 200 && httpPage.getStatusCode() != 404) ){
            log.error("{} status code:{}",crawlerRequestRecord.getHttpRequest().getUrl(),httpPage.getStatusCode());
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }

        if(httpPage.getStatusCode() == 404){
            log.error("{} status code:{}",crawlerRequestRecord.getHttpRequest().getUrl(),httpPage.getStatusCode());
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }

        String requestUrl = httpPage.getRequest().getUrl();
        if(requestUrl.matches(ENTRANCE_URL)){
            searchEntranceUrlRecord(crawlerRequestRecord,httpPage,parsedLinks);
        }

        if(requestUrl.matches(SEARCH_URL) || requestUrl.matches(SEARCH_TURN_URL)){
            searchUrlRecord(crawlerRequestRecord,httpPage,parsedLinks);
        }

        if(requestUrl.matches(ITEM_URL)){
            itemUrlRecord(crawlerRequestRecord,httpPage,parsedLinks);
        }

        if(requestUrl.matches(COMMENT_URL) && requestUrl.contains("page")){
            commentUrlRecord(crawlerRequestRecord,httpPage,parsedLinks);
        }
        return parsedLinks;
    }

    private void searchEntranceUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks){
        String nextUrl = StringEscapeUtils.unescapeHtml(httpPage.getHtml().xpath("//a[text()='下一页']/@href").get());
        String keyword = (String)httpPage.getRequest().getExtras().get("keyword");
        String message = httpPage.getHtml().xpath("//div[@id=\"messagetext\"]/p/text()").get();
        if(StringUtils.isNotBlank(message)){
            requestAgainCrawlerRecord(parsedLinks,crawlerRequestRecord);
            return;
        }
        List<String> result = httpPage.getHtml().xpath("//div[@class=\"sttl mbn\"]//text()").all();
        if(result != null && result.size() > 0){
            for (String s : result) {
                if(s.contains("相关内容 0 个")){
                    log.error("[{}] search result is 0!" ,keyword);
                    return;
                }
            }
        }
        if(StringUtils.isBlank(nextUrl)){
            getItemRecord(crawlerRequestRecord,httpPage,parsedLinks);
            return;
        }
        String sourceUrl = "";
        if((sourceUrl = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(SEARCH_KW_SOURCE_URL)) == null){
            log.error("search source url is not exit!");
            return;
        }
        crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(SEARCH_KW_SOURCE_URL);

        String searchId = nextUrl.split("&")[1].split("=")[1];
        try {
            String requestUrl = String.format(sourceUrl,searchId, URLEncoder.encode(keyword,"UTF-8"));
            CrawlerRequestRecord searchRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(requestUrl)
                    .releaseTime(System.currentTimeMillis())
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            parsedLinks.add(searchRecord);
        } catch (UnsupportedEncodingException e) {
            log.error(e.getMessage());
        }
    }


    private void searchUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        //翻页
        String nextPage = httpPage.getHtml().xpath("//a[text()='下一页']/@href").get();
        if (StringUtils.isNotBlank(nextPage)) {
            nextPage = PRIFIX + StringEscapeUtils.unescapeHtml(nextPage);
            CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(nextPage)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            parsedLinks.add(turnRecord);
        }

        getItemRecord(crawlerRequestRecord,httpPage,parsedLinks);
    }

    private void getItemRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks){
        //列表页解析
        List<Selectable> nodes = httpPage.getHtml().xpath("//div[@id=\"threadlist\"]/ul/li").nodes();
        for (Selectable node : nodes) {
            String itemUrl = node.xpath("./h3/a/@href").get();
            if (StringUtils.isBlank(itemUrl)) {
                continue;
            }else{
                itemUrl = PRIFIX + StringEscapeUtils.unescapeHtml(itemUrl);
            }

            String releaseTime = node.xpath("./p[last()]/span[1]/text()").get();
            if(StringUtils.isBlank(releaseTime)){
                continue;
            }

            long releaseTimeToLong = 0;
            try {
                releaseTimeToLong = DateUtils.parseDate(releaseTime,"yyyy-MM-dd HH:mm").getTime();
                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(itemUrl)
                        .releaseTime(releaseTimeToLong)
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                parsedLinks.add(itemRecord);
            } catch (ParseException e) {
                log.error(e.getMessage());
            }
        }
    }

    private void itemUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks){
        String requestUrl = httpPage.getRequest().getUrl();
        CrawlerResultTags resultTags = crawlerRequestRecord.tagsCreator().resultTags();
        if(resultTags.hasDataType(comment)){
            resultTags.getCategoryTag().removeLabelTag("comment");

            CrawlerRequestRecord filterRecord = resultTags.getCategoryTag().getKVTagObjVal("comment_filter_record", CrawlerRequestRecord.class);
            if(filterRecord == null){
                log.error("filter record can not null !");
                return;
            }

            String comments = httpPage.getHtml().xpath("//span[text()='回复:']/following-sibling::span[1]/text()").get();
            //评论数要大于0
            if(StringUtils.isNotBlank(comments) && Integer.parseInt(comments) > 0){
                //查找最后一页的评论
                String lastUrl = httpPage.getHtml().xpath("//div[@class=\"pg\"]/a[last()-1]/@href").get();
                if(StringUtils.isNotBlank(lastUrl)){
                    lastUrl = PRIFIX + StringEscapeUtils.unescapeHtml(lastUrl) + "#comment";
                }else{
                    lastUrl = requestUrl + "#comment";
                }
                CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(lastUrl)
                        .releaseTime(System.currentTimeMillis())
                        .notFilterRecord()
                        .copyBizTags()
                        .needWashed(true)
                        .resultLabelTag(comment)
                        .build();
                commentRecord.setFilter(filterRecord.getFilter());
                commentRecord.setFilterInfos(filterRecord.getFilterInfos());
                commentRecord.getHttpRequest().addExtra("articleUrl",requestUrl);
                parsedLinks.add(commentRecord);
            }
        }
    }

    private void commentUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks){
        //评论翻页
        String url = httpPage.getHtml().xpath("//a[@class=\"prev\"]/@href").get();
        if(StringUtils.isBlank(url)){
            return;
        }else{
            url = PRIFIX+ StringEscapeUtils.unescapeHtml(url) + "#comment";
        }

        String lastReleaseTime = httpPage.getHtml().xpath("//div[@id=\"postlist\"]/div[contains(@id,'post_')][1]//div[@class=\"authi\"]/em/text()").get();
        try {
            long longTime = DateUtils.parseDate(lastReleaseTime,"发表于 yyyy-MM-dd HH:mm:ss").getTime();
            CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(url)
                    .releaseTime(longTime)
                    .copyBizTags()
                    .needWashed(true)
                    .copyResultTags()
                    .build();
            commentRecord.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
            parsedLinks.add(commentRecord);
        } catch (ParseException e) {
            log.error(e.getMessage());
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        CrawlerResultTags crawlerResultTags = crawlerRecord.tagsCreator().resultTags();

        if (crawlerResultTags.hasDataType(article)) {
            crawlerDataList.add(washArticle(crawlerRecord, page));
        }

        if (crawlerResultTags.hasDataType(interaction)) {
            crawlerDataList.add(washInteraction(crawlerRecord,page));
        }

        if (crawlerResultTags.hasDataType(comment)) {
            crawlerDataList.addAll(washComment(crawlerRecord, page));
        }

        return crawlerDataList;
    }

    public CrawlerData washArticle(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        String itemUrl = httpPage.getRequest().getUrl();
        String articleKey = itemUrl.split("&")[1].split("=")[1];

        String title = httpPage.getHtml().xpath("//a[@id=\"thread_subject\"]/text()").get();
        String author = httpPage.getHtml().xpath("//div[@id=\"postlist\"]/div[contains(@id,'post_')][1]//a[@class=\"xw1\"]/text()").get();
        String releaseTime = httpPage.getHtml().xpath("//div[@id=\"postlist\"]/div[contains(@id,'post_')][1]//div[@class=\"authi\"]/em/text()").get();
        releaseTime = "发表于 ".equals(releaseTime) ? httpPage.getHtml().xpath("//div[@id=\"postlist\"]/div[contains(@id,'post_')][1]//div[@class=\"authi\"]/em/span/@title").get() : releaseTime;
        List<String> all = httpPage.getHtml().xpath("//div[@id=\"postlist\"]/div[contains(@id,'post_')][1]//div[@class=\"pct\"]//td[contains(@id,'postmessage_')]//text()").all();
        StringBuffer conents = new StringBuffer();
        for (String articleText : all) {
            conents.append(articleText).append(" ");
        }

        List<String> topicTypes = httpPage.getHtml().xpath("//div[@id=\"postlist\"]/div[contains(@id,'post_')][1]//div[@class=\"ptg mbm mtn\"]/a//text()").all();
        if(topicTypes != null && topicTypes.size() > 0){
            crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Topic_Type,topicTypes);
        }
        CrawlerData crawlerData = null;
        try {
            long releaseTimeToLong = StringUtils.isNotEmpty(releaseTime) ?
                    DateUtils.parseDate(releaseTime, "发表于 yyyy-MM-dd HH:mm:ss","yyyy-MM-dd HH:mm:ss").getTime()
                    : crawlerRequestRecord.getReleaseTime();
            crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("article"))
                    .url(itemUrl)
                    .releaseTime(releaseTimeToLong)
                    .addContentKV(Field_Content, conents.toString())
                    .addContentKV(Field_Title, title)
                    .addContentKV(Field_Author, author)
                    .build();
        } catch (ParseException e) {
            log.error(e.getMessage());
        }
        return crawlerData;
    }

    public CrawlerData washInteraction(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        String itemUrl = httpPage.getRequest().getUrl();
        String articleKey = itemUrl.split("&")[1].split("=")[1];

        String views = httpPage.getHtml().xpath("//span[text()='查看:']/following-sibling::span[1]/text()").get();
        views = StringUtils.isNotBlank(views) ? views : "0";
        String comments = httpPage.getHtml().xpath("//span[text()='回复:']/following-sibling::span[1]/text()").get();
        comments = StringUtils.isNotBlank(comments) ? comments : "0";
        String releaseTime = httpPage.getHtml().xpath("//div[@id=\"postlist\"]/div[contains(@id,'post_')][1]//div[@class=\"authi\"]/em/text()").get();
        releaseTime = "发表于 ".equals(releaseTime) ? httpPage.getHtml().xpath("//div[@id=\"postlist\"]/div[contains(@id,'post_')][1]//div[@class=\"authi\"]/em/span/@title").get() : releaseTime;

        try {
            long releaseTimeToLong = StringUtils.isNotEmpty(releaseTime) ?
                    DateUtils.parseDate(releaseTime, "发表于 yyyy-MM-dd HH:mm:ss","yyyy-MM-dd HH:mm:ss").getTime()
                    : crawlerRequestRecord.getReleaseTime();
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), interaction.enumVal(), articleKey))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("interaction"))
                    .url(itemUrl)
                    .releaseTime(releaseTimeToLong)
                    .addContentKV(Field_I_Comments, comments)
                    .addContentKV(Field_I_Views, views)
                    .build();

            return crawlerData;
        } catch (ParseException e) {
            log.error(e.getMessage());
        }
        return null;

    }

    public List<CrawlerData> washComment(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        String requestUrl = httpPage.getRequest().getUrl();
        String articleUrl = (String) httpPage.getRequest().getExtras().get("articleUrl");
        String articleKey = articleUrl.split("&")[1].split("=")[1];

        List<Selectable> nodes = httpPage.getHtml().xpath("//div[@id=\"postlist\"]/div[contains(@id,'post_')]").nodes();
        for (int i = nodes.size()-1; i >= 0 ; i--) {
            Selectable node = nodes.get(i);
            String isCrawler = node.xpath(".//td[@class=\"plc\"]//div[@class=\"pi\"]/div[@id=\"fj\"]/label").get();
            if(StringUtils.isNotBlank(isCrawler)){
                //楼主发的内容
                continue;
            }

            String author = node.xpath(".//a[@class=\"xw1\"]/text()").get();
            List<String> all = node.xpath(".//div[@class=\"pct\"]//td[contains(@id,'postmessage_')]//text()").all();
            StringBuffer conents = new StringBuffer();
            for (String articleText : all) {
                if (StringUtils.isBlank(articleText)) {
                    continue;
                }
                conents.append(articleText).append(" ");
            }
            String releaseTime = node.xpath(".//div[@class=\"authi\"]/em/text()").get();
            releaseTime = "发表于 ".equals(releaseTime) ? httpPage.getHtml().xpath(".//div[@class=\"authi\"]/em/span/@title").get() : releaseTime;

            if(StringUtils.isBlank(releaseTime)){
                continue;
            }
            String floor = node.xpath(".//td[@class=\"plc\"]//div[@class=\"pi\"]/strong//em").get();
            String commentId = node.xpath("./@id").get();
            CrawlerData crawlerData = null;
            try {
                crawlerData = CrawlerData.builder()
                        .data(crawlerRequestRecord, httpPage)
                        .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), comment.enumVal(), commentId))
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("comment"))
                        .releaseTime(DateUtils.parseDate(releaseTime.trim(), "发表于 yyyy-MM-dd HH:mm:ss","yyyy-MM-dd HH:mm:ss").getTime())
                        .url(requestUrl)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .addContentKV(Field_Author, author)
                        .addContentKV(Field_Content, conents.toString())
                        .addContentKV(Field_Floor, floor)
                        .build();
                crawlerDataList.add(crawlerData);
            } catch (Exception e) {
                log.error(e.getMessage());
            }
        }
        return crawlerDataList;
    }

    @Override
    public void afterExecute(CrawlerRecordContext crawlerRecordContext) {

    }

    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 5) {
                Map<String, Object> extras = crawlerRecord.getHttpRequest().getExtras();
                String keyword = "";
                if(extras != null && extras.size() > 0){
                    keyword = (String) extras.get("keyword");
                }
                log.error(DOMAIN +" download page the number of retries exceeds the limit" +
                        ",request url {}，keyword [{}]", requestUrl,keyword);
                return;
            }
        }

        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        CrawlerRequestRecord crawlerRequestRecord = null;
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }

        if(crawlerRecord.getHttpRequest().getMethod().equals(HttpConstant.Method.POST)){
            HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
            httpRequest.setMethod(crawlerRecord.getHttpRequest().getMethod());
            httpRequest.setRequestBody(crawlerRecord.getHttpRequest().getRequestBody());
        }

        if (crawlerRequestRecord == null) {
            return;
        }

        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    private void initKeyword(CrawlerRequestRecord requestRecord, CrawlerRequestRecord supportSourceRecord, List<CrawlerRecord> crawlerRecords) {
        HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
        String requestUrl = requestRecord.getHttpRequest().getUrl();
        Map<String, Object> extras = requestRecord.getHttpRequest().getExtras();
        Map<String,Object> httpBody = (Map<String,Object>)extras.get(HTTP_BODY);
        if(httpBody == null){
            log.error("httpBody can not null!");
            return;
        }
        Json json = internalDownloadPage.getJson();
        String msg = json.jsonPath($_type + ".msg").get();
        if (!"success".equals(msg)) {
            log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
            return;
        }

        List<String> all = json.jsonPath($_type + ".content").all();
        for (String data : all) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            String keyword = jsonObject.getString("keyword");

            CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(requestRecord)
                    .httpUrl(requestUrl)
                    .recordKey(requestUrl + keyword)
                    .releaseTime(System.currentTimeMillis())
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            turnRecord.tagsCreator().bizTags().addKeywords(keyword);
            HttpRequest httpRequest = turnRecord.getHttpRequest();
            httpRequest.setMethod(HttpConstant.Method.POST);
            Map<String,Object> params = new HashMap<>();
            for (String key : httpBody.keySet()) {
                if("%s".equals( httpBody.get(key))){
                    params.put(key,keyword);
                }else{
                    params.put(key,httpBody.get(key));
                }
            }
            httpRequest.setRequestBody(HttpRequestBody.form(params,"UTF-8"));
            httpRequest.setHeaders(requestRecord.getHttpRequest().getHeaders());
            httpRequest.addExtra("keyword",keyword);
            crawlerRecords.add(turnRecord);
        }
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA(){
        return agentList.get(RandomUtils.nextInt(0,agentList.size() - 1));
    }

    public static void main(String[] args) {
//        http://bbs.cnhubei.com/forum.php\?mod=viewthread&tid=\d+&highlight=\S*#comment
        String s = "http://bbs.cnhubei.com/forum.php?mod=viewthread&tid=4702551&extra=&highlight=%E4%B8%A4%E4%BC%9A&page=5#comment";
        System.out.println(s.matches(COMMENT_URL));
    }
}
