package com.chance.cc.crawler.development.scripts.toutiao;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.CategoryTag;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.http.Header;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.DigestUtils;

import java.io.*;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Field_Author_Follows;
import static org.apache.commons.lang3.time.DateUtils.*;

/**
 * @author lt
 * @version 1.0
 * @date 2021-04-07 11:08:09
 * @email okprog@sina.com
 */
public class TTSearchKwCrawlerScript extends CrawlerCommonScript {

    private static Logger logger = LoggerFactory.getLogger(TTSearchKwCrawlerScript.class);

    private static Proxy proxy = new Proxy();
    static {
        //代理配置
        //HL89Q19E86E2987D
        //71F33D94CE5F7BF2
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }

    public static final String domain = "toutiao";
    private static final String scriptSite = "searchkw";
    private static final String REQUEST_AGAIN_TAG = domain + "_request_again";
    private Downloader httpDownload;
    private static final Object httpDownloadObj = new Object();
    private static HttpConfig httpConfig = HttpConfig.me("ttwid");
    public static LinkedBlockingQueue<String> uaList= new LinkedBlockingQueue<>();
    private static final String DEFAULT_USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36";

    private static final String IS_FILTER_ARTICLE = "is_filter_article";

    public static final String indexRegex = "https://www\\.toutiao\\.com/";
    public static final String keysRegex = "https?://\\S*v1/meta/" + domain + "/keys\\S*";
    public static final String flushListUrlRegex = "https://www\\.toutiao\\.com/api/pc/list/feed\\S*";

    public static final String searchKwListUrlRegex = "https://so.toutiao.com/search\\S*";
    public static final String articleUrlRegex = "https://m\\.toutiaocdn\\.com/i\\d*/info/";
    public static final String commentUrlRegex = "https://www\\.toutiao\\.com/article/v2/tab_comments/\\S*";
    public static final String repliesUrlRegex = "https://www\\.toutiao\\.com/2/comment/v2/reply_list/\\S*";

    public static final String searchKwListUrlFormat = "https://so.toutiao.com/search?keyword=%s&pd=information&source=search_subtab_switch&dvpf=pc&aid=4916&page_num=0";
    public static final String flushListUrlFormatStart = "https://www.toutiao.com/api/pc/list/feed?channel_id=%s&min_behot_time=0&refresh_count=1&category=pc_profile_channel";
    public static final String flushListUrlFormat="https://www.toutiao.com/api/pc/list/feed?channel_id=%s&max_behot_time=%s&category=pc_profile_channel";
    public static final String articleUrlFormat = "https://m.toutiaocdn.com/i%s/info/";
    public static final String articlePageUrlFormat = "https://www.toutiao.com/a%s/";
    public static final String commentUrlFormat = "https://www.toutiao.com/article/v2/tab_comments/?aid=24&app_name=toutiao_web&offset=%s&count=20&group_id=%s&item_id=%s";
    public static final String repliesUrlFormat = "https://www.toutiao.com/2/comment/v2/reply_list/?aid=24&app_name=toutiao_web&id=%s&offset=%s&count=20&repost=0";
    public static final String userAgentUrl = "http://fake-useragent.herokuapp.com/browsers/0.1.11";

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
            String keywordUrl = internalDownloadPage.getRequest().getUrl();
            //关键词record生成
            if (keywordUrl.matches(keysRegex)) {
                try {
                    JSONObject jsonObject = JSONObject.parseObject(internalDownloadPage.getRawText());
                    if (jsonObject.getIntValue("status") == 0) {
                        JSONArray objects = jsonObject.getJSONArray("content");
                        for (Object object : objects) {
                            String keyword = ((JSONObject) object).getString("keyword");
                            String listUrl = String.format(searchKwListUrlFormat, URLEncoder.encode(keyword, "utf-8"));
                            CrawlerRequestRecord listRecord = CrawlerRequestRecord.builder()
                                    .turnPageRequest(requestRecord)
                                    .httpUrl(listUrl)
                                    .recordKey(listUrl)
                                    .releaseTime(System.currentTimeMillis())
                                    .httpHeads(requestRecord.getHttpRequest().getHeaders())
                                    .notFilterRecord()
                                    .copyBizTags()
                                    .build();
                            listRecord.tagsCreator().bizTags().addKeywords(keyword);
                            allItemRecords.add(listRecord);
                        }
                    }
                } catch (Exception e) {
                    logger.error("from keywords init urls failed");
                    logger.error(e.getMessage(), e);
                }
            }
        }

        String requestUrl = requestRecord.getHttpRequest().getUrl();
        try{
            //获取ttwid的值
            if(requestUrl.matches(indexRegex)){
                String uaPath = (String)requestRecord.getHttpRequest().getExtras().get("ua_path");
                if(StringUtils.isBlank(uaPath)){
                    logger.error("uaPath can not null!");
                    requestRecord.setNeedParsedPage(false);
                    return allItemRecords;
                }
                getUaList(uaPath);
            } else if(requestUrl.matches(searchKwListUrlRegex)){
                Map<String, String> ttwid = getTTWID();
                if(ttwid == null){
                    CrawlerRequestRecord searchKwRecord = getSearchKwRecord(requestRecord);
                    allItemRecords.add(searchKwRecord);
                    requestRecord.setNeedParsedPage(false);
                    requestRecord.setNeedWashPage(false);
                }else{
                    HttpRequest httpRequest = requestRecord.getHttpRequest();
//                httpRequest.addHeader("User-Agent",ttwid.get("userAgent"));
                    httpRequest.addHeader("cookie",ttwid.get("cookie"));
                }
            } else if(requestUrl.matches(flushListUrlRegex)){
                //获取_signature的值
                Map<String, String> signature = getSignature(requestUrl);
                if(signature.size() > 0){
                    requestRecord.getHttpRequest().setUrl(requestUrl + "&_signature="+signature.get("sign"));
                    requestRecord.getHttpRequest().addHeader("User-Agent",signature.get("ua"));
//                logger.info("[{}] get signature is [{}]",requestUrl,signature);
                }else{
                    logger.error("[{}] get signature is null !",requestUrl);
                }
            }else{
                HttpConfig httpConfig = requestRecord.getHttpConfig();
                httpConfig.setProxy(proxy);
                httpConfig.setUseProxy(true);
            }
        }catch (Exception e){
            logger.error(e.getMessage());
        }
        return allItemRecords;
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        if (404 == httpPage.getStatusCode()){
            logger.info("status code is 404");
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }
        if (doHttpPageCheck(crawlerRequestRecord,httpPage)){
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            if(lastRequest.getUrl().contains("&_signature")){
                crawlerRequestRecord.getHttpRequest().setUrl(lastRequest.getUrl().split("&_signature")[0]);
            }
            parsedLinks.add(crawlerRequestRecord);
            return parsedLinks;
        }
        String lastRequestUrl = lastRequest.getUrl();
        if (lastRequestUrl.matches(indexRegex)){
            return parseIndexLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(flushListUrlRegex)){
            return parseFlushListLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(searchKwListUrlRegex)){
            return parseListLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(articleUrlRegex)){
            return parseArticleLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(commentUrlRegex)) {
            return parseCommentLinks(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if (lastRequestUrl.matches(repliesUrlRegex)){
            return parseReplyLinks(crawlerRequestRecord, httpPage, parsedLinks);
        }
        return null;
    }

    @Override
    public void beforeDownload(CrawlerRecordContext context) {
        if (httpDownload == null){
            synchronized (httpDownloadObj) {
                if (httpDownload == null){
                    httpDownload = context.getPageDownloader();
                }
            }
        }
        super.beforeDownload(context);
    }

    private HttpPage downloadRegister(){
        String url = "https://ttwid.bytedance.com/ttwid/union/register/";
        HttpRequest httpRequest = new HttpRequest();
        httpRequest.setUrl(url);
        httpRequest.setMethod(HttpConstant.Method.POST);
        httpRequest.addHeader("User-Agent",getRandomUA());
        httpRequest.addHeader("content-type","application/json");
        httpRequest.addHeader("origin","https://so.toutiao.com");
        httpRequest.addHeader("referer","https://so.toutiao.com/");
        Map<String,Object> map = new HashMap<>();
        map.put("aid",4916);
        map.put("service","so.toutiao.com");
        map.put("unionHost","https://ttwid.bytedance.com");
        map.put("union",true);
        map.put("needFid",false);
        httpRequest.setRequestBody(HttpRequestBody.json(JSONObject.toJSONString(map),"UTF-8"));
        HttpPage download = httpDownload.download(httpRequest, httpConfig);
        return download;
    }

    private Map<String,String> getTTWID(){
        Map<String,String> map = new HashMap<>();
        HttpPage httpPage = downloadRegister();
        //判断注册是否成功
        String rawText = httpPage.getRawText();
        if(StringUtils.isNotBlank(rawText)){
            String message = JSONObject.parseObject(rawText).getString("message");
            //成功
            if(message.equals("union register success")){
                Header[] responseHeaders = httpPage.getResponseHeaders();
                String cookie = "";
                for (Header responseHeader : responseHeaders) {
                    String name = responseHeader.getName();
                    String value = responseHeader.getValue();
                    if(name.equals("Set-Cookie") && value.contains("ttwid")){
                        cookie = value;
                        break;
                    }
                }
                if(StringUtils.isNotBlank(cookie)){
                    map.put("cookie", cookie.split(";")[0]);
                    map.put("userAgent",httpPage.getRequest().getHeaders().get("User-Agent"));
                    return map;
                }
            }
        }
        return null;
    }

    private Map<String,String> getSignature(String url){
        Map<String,String> result = new HashMap<>();
        try{
            String requestUrl = "http://192.168.1.210:8899/encrypt/toutiao/signature";
            String ua = getOneUa();
            if(StringUtils.isBlank(ua)){
                ua = DEFAULT_USER_AGENT;
            }
            Map<String,Object> body = new HashMap<>();
            body.put("url",url);
            body.put("ua",ua);
            HttpRequest httpRequest = new HttpRequest();
            httpRequest.setUrl(requestUrl);
            httpRequest.setMethod(HttpConstant.Method.POST);
            httpRequest.setRequestBody(HttpRequestBody.form(body,"UTF-8"));
            HttpPage download = httpDownload.download(httpRequest, httpConfig);
            if(download != null){
                result.put("sign",download.getRawText());
                result.put("ua",ua);
                return result;
            }else{
                getSignature(url);
            }
        }catch (Exception e){
            logger.error("get one ua error![{}]",e.getMessage());
        }
        return result;
    }

    private CrawlerRequestRecord getSearchKwRecord(CrawlerRequestRecord crawlerRequestRecord){
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        CrawlerRequestRecord listRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(httpRequest.getUrl())
                .recordKey(httpRequest.getUrl() + System.currentTimeMillis())
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .httpHeads(httpRequest.getHeaders())
                .notFilterRecord()
                .copyBizTags()
                .build();
        return listRecord;
    }

    private List<CrawlerRequestRecord> parseFlushListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String httpRequestUrl = httpRequest.getUrl();
        Map<String, Object> urlParams = getUrlParams(httpRequestUrl);
        JSONObject pageObj = JSONObject.parseObject(httpPage.getRawText());
        String message = pageObj.getString("message");
        JSONArray dataNodes = pageObj.getJSONArray("data");
        if(!"success" .equals(message) || dataNodes.size() < 1){
            logger.error("ua is [{}]",httpPage.getRequest().getHeaders().get("User-Agent") );
            try{
                Thread.sleep(10000L);
            }catch (Exception e){
                logger.error(e.getMessage());
            }
            if(httpRequestUrl.contains("&_signature")){
                crawlerRequestRecord.getHttpRequest().setUrl(httpRequestUrl.split("&_signature")[0]);
            }
            requestAgainCrawlerRecord(parsedLinks,crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }

        boolean hasMore = pageObj.getBooleanValue("has_more");
        if (null != urlParams && hasMore && dataNodes.size() > 0){
            String channel_id = (String) urlParams.get("channel_id");
            String maxBeHotTime = dataNodes.getJSONObject(dataNodes.size() - 1).getString("behot_time");
            String nextUrl = String.format(flushListUrlFormat,channel_id,maxBeHotTime);
            CrawlerRequestRecord nextRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(nextUrl)
                    .recordKey(nextUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .notFilterRecord()
                    .build();
            parsedLinks.add(nextRecord);
        }
        for (Object dataNode : dataNodes) {
            JSONObject dataObj = (JSONObject)dataNode;
            String articleKey = dataObj.getString("item_id");
            String beHotTime = dataObj.getString("behot_time");
            String label = dataObj.getString("label");
            if ("广告".equals(label) || StringUtils.isBlank(articleKey) || StringUtils.isBlank(beHotTime)){
                continue;
            }
            long releaseTime = new Long(beHotTime + "000");
            String articleUrl = String.format(articleUrlFormat,articleKey);
            CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRequestRecord)
                    .httpUrl(articleUrl)
                    .recordKey(articleUrl)
                    .releaseTime(releaseTime)
                    .copyBizTags()
                    .resultLabelTag(article)
                    .resultLabelTag(interaction)
                    .needParsed(true)
                    .build();
            parsedLinks.add(itemRecord);
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseIndexLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        Map<String, String> keysMap = castMap(extras.get("keysMap"),String.class);
        Set<Map.Entry<String, String>> entries = keysMap.entrySet();
        for (Map.Entry<String, String> entry : entries) {
            String urlKey = entry.getValue();
            String keyword = entry.getKey();
            String listUrl = String.format(flushListUrlFormatStart,urlKey);
            CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(listUrl)
                    .recordKey(listUrl)
                    .releaseTime(System.currentTimeMillis())
                    .notFilterRecord()
                    .copyBizTags()
                    .build();
            requestRecord.tagsCreator().bizTags().addCustomKV(Tag_Site_Info,keyword);
            parsedLinks.add(requestRecord);
        }


        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseReplyLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String httpRequestUrl = httpRequest.getUrl();
        Map<String, Object> urlParams = getUrlParams(httpRequestUrl);
        if (null != urlParams){
            JSONObject pageObj = JSONObject.parseObject(httpPage.getRawText()).getJSONObject("data");
            boolean hasMore = pageObj.getBooleanValue("has_more");
            if (hasMore){
                String commentId = (String) urlParams.get("id");
                int offset = Integer.parseInt((String) urlParams.get("offset"));
                int count = Integer.parseInt((String) urlParams.get("count"));
                offset += count;
                String replyUrl = String.format(repliesUrlFormat,commentId,offset);
                CrawlerRequestRecord replyRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(replyUrl)
                        .recordKey(replyUrl)
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .resultLabelTag(comment)
                        .resultLabelTag(interaction)
                        .copyBizTags()
                        .build();
                replyRecord.getHttpRequest().setExtras(copyExtras(httpRequest.getExtras()));
                replyRecord.getHttpRequest().addHeader("Accept-Encoding","gzip");
                replyRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
                replyRecord.getHttpRequest().addHeader("Connection","Keep-Alive");
                parsedLinks.add(replyRecord);
            }
        }
        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseCommentLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String httpRequestUrl = httpRequest.getUrl();
        Map<String, Object> urlParams = getUrlParams(httpRequestUrl);
        if (null != urlParams){
            String articleKey = (String) urlParams.get("group_id");
            JSONObject pageObj = JSONObject.parseObject(httpPage.getRawText());
            boolean hasMore = pageObj.getBooleanValue("has_more");
            if (hasMore){
                int offset = Integer.parseInt((String) urlParams.get("offset"));
                int count = Integer.parseInt((String) urlParams.get("count"));
                offset += count;
                String commentUrl = String.format(commentUrlFormat,offset,articleKey,articleKey);
                CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(commentUrl)
                        .recordKey(commentUrl)
                        .releaseTime(System.currentTimeMillis())
                        .needWashed(true)
                        .resultLabelTag(comment)
                        .resultLabelTag(interaction)
                        .copyBizTags()
                        .build();
                commentRecord.getHttpRequest().setExtras(copyExtras(httpRequest.getExtras()));
                commentRecord.getHttpRequest().addHeader("Accept-Encoding","gzip");
                commentRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
                commentRecord.getHttpRequest().addHeader("Connection","Keep-Alive");
                parsedLinks.add(commentRecord);
            }
            JSONArray comments = pageObj.getJSONArray("data");
            for (Object comment : comments) {
                JSONObject cmtObj = ((JSONObject)comment).getJSONObject("comment");
                String idStr = cmtObj.getString("id_str");
                String createTime = cmtObj.getString("create_time");
                Long releaseTime = new Long(createTime + "000");
                int replyCount = cmtObj.getIntValue("reply_count");
                if (replyCount > 0){
                    String replyUrl = String.format(repliesUrlFormat,idStr,0);
                    CrawlerRequestRecord replyRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(crawlerRequestRecord)
                            .httpUrl(replyUrl)
                            .recordKey(replyUrl)
                            .releaseTime(releaseTime)
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                            .resultLabelTag(interaction)
                            .copyBizTags()
                            .notFilterRecord()
                            .build();
                    replyRecord.getHttpRequest().setExtras(copyExtras(httpRequest.getExtras()));
                    replyRecord.getHttpRequest().addHeader("Accept-Encoding","gzip");
                    replyRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
                    replyRecord.getHttpRequest().addHeader("Connection","Keep-Alive");
                    parsedLinks.add(replyRecord);
                }
            }

        }
        return parsedLinks;
    }


    private List<CrawlerRequestRecord> parseArticleLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        crawlerRequestRecord.setNeedWashPage(true);
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        JSONObject pageObject = JSONObject.parseObject(httpPage.getRawText()).getJSONObject("data");
        String articleKey = pageObject.getString("gid");
        String articleUrl = String.format(articlePageUrlFormat,articleKey);
        int comments = pageObject.getIntValue("comment_count");
        if (null != pageObject.getJSONObject("thread")){
            pageObject = JSONObject.parseObject(httpPage.getRawText()).getJSONObject("data").getJSONObject("thread").getJSONObject("thread_base");
            comments = pageObject.getJSONObject("action").getIntValue("comment_count");
        }
        httpRequest.addExtra("articleKey",articleKey);
        httpRequest.addExtra("articleUrl",articleUrl);
        //判断是否采集评论
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().scheduleTags().getCategoryTag();
        if (categoryTag.getLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal()) != null) {
            if (!crawlerRequestRecord.tagsCreator().bizTags().hasKVTag("comment_record_filter_info")) {
                logger.error("toutiao crawler comment need to filter information!");
                return parsedLinks;
            }
            crawlerRequestRecord.tagsCreator().resultTags().addResultDataType(comment);
            crawlerRequestRecord.tagsCreator().resultTags().addResultDataType(interaction);
            KVTag filterInfoTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
            CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);
            //评论链接
            if (comments > 0){
                String commentUrl = String.format(commentUrlFormat,0,articleKey,articleKey); //使用时间戳模拟device_id
                CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(commentUrl)
                        .recordKey(commentUrl)
                        .releaseTime(System.currentTimeMillis())
                        .notFilterRecord()
                        .needWashed(true)
                        .resultLabelTag(comment)
                        .resultLabelTag(interaction)
                        .copyBizTags()
                        .build();
                commentRecord.setTurnPageFilterInfo(null);
                commentRecord.getHttpRequest().setExtras(copyExtras(httpRequest.getExtras()));
                commentRecord.setFilter(filterInfoRecord.getFilter());
                commentRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                commentRecord.getHttpRequest().addHeader("Accept-Encoding","gzip");
                commentRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());
                commentRecord.getHttpRequest().addHeader("Connection","Keep-Alive");
                parsedLinks.add(commentRecord);
            }
        }


        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String httpRequestUrl = httpRequest.getUrl();
        List<String> all = httpPage.getHtml().xpath("//span[@class=\"text-ellipsis text-underline-hover\"]//text()").all();
        if(all != null && all.size() > 0) {
            String result = StringUtils.join(all);
            logger.error(result + " url is [{}]",httpRequestUrl);
        }else{
            String nextPage = httpPage.getHtml().xpath("//span[text()='下一页']").get();
            List<Selectable> nodes = httpPage.getHtml().xpath("//div[@data-log-extra]").nodes();
            if(StringUtils.isBlank(nextPage) && nodes.size() < 1){
                requestAgainCrawlerRecord(parsedLinks,crawlerRequestRecord);
                crawlerRequestRecord.setNeedWashPage(false);
                crawlerRequestRecord.setNeedWashPage(false);
                return parsedLinks;
            }

            if(StringUtils.isBlank(nextPage)){
                logger.error("url [{}] has not next url!",httpRequestUrl);
            }else{
                String nextUrl = getNextUrl(httpRequestUrl, "keyword", "page_num");
                CrawlerRequestRecord listRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRequestRecord)
                        .httpUrl(nextUrl)
                        .recordKey(nextUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                parsedLinks.add(listRecord);
            }
        }

        List<Selectable> nodes = httpPage.getHtml().xpath("//div[@data-log-extra]").nodes();
        for (Selectable node : nodes) {
            String extra = node.xpath("./@data-log-extra").get();
            if(StringUtils.isBlank(extra)){
                continue;
            }
            String releaseTime = node.xpath(".//div[@class=\"cs-view margin-top-3 cs-view-block cs-source\"]//div/span[@class=\"text-ellipsis\"]").get();
            if(StringUtils.isBlank(releaseTime)){
                continue;
            }

            try{
                JSONObject jsonObject = JSONObject.parseObject(StringEscapeUtils.unescapeHtml(extra));
                String id = jsonObject.getString("search_result_id");
                String articleUrl = String.format(articleUrlFormat,id);
                Long releaseTimeToLong = washTime(releaseTime);
                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(articleUrl)
                        .releaseTime(releaseTimeToLong)
                        .copyBizTags()
                        .resultLabelTag(article)
                        .resultLabelTag(interaction)
                        .build();
                parsedLinks.add(itemRecord);
            }catch (Exception e){
                logger.error(e.getMessage());
            }
        }
        return parsedLinks;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String httpRequestUrl = httpRequest.getUrl();
        Map<String, Object> extras = httpRequest.getExtras();
        String articleKey = (String) extras.get("articleKey");
        String articleUrl = (String) extras.get("articleUrl");
        String rawText = httpPage.getRawText();
        if (StringUtils.isBlank(rawText)){
            logger.error("httpPage is empty !");
            return crawlerDataList;
        }
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(article)){
            JSONObject pageObj = JSONObject.parseObject(httpPage.getRawText());
            JSONObject dataObj = pageObj.getJSONObject("data");
            String title = dataObj.getString("title");
            if (StringUtils.isBlank(title)){
                dataObj = pageObj.getJSONObject("data").getJSONObject("thread").getJSONObject("thread_base");
                title = dataObj.getString("title");
            }

            String contentStr = dataObj.getString("content");
            StringBuffer sbContent = new StringBuffer();
            StringBuffer sbImage = new StringBuffer();
            if (StringUtils.isNotBlank(contentStr)) {
                Html html = new Html(contentStr);
                List<String> contents = html.xpath("//p//text()").all();
                List<String> images = html.xpath("//img/@src").all();
                for (String content : contents) {
                    sbContent.append(content);
                }
                for (String image : images) {
                    sbImage.append(image).append("//x01");
                }
            } else {
            }
            String author = null;
            String authorId = null;
            String follows = null;
            String comments = null;
            String likes = null;
            try {
                author = dataObj.getString("detail_source");
                if (StringUtils.isBlank(author)){
                    author = dataObj.getString("source");
                    if (StringUtils.isBlank(author)){
                        author = dataObj.getJSONObject("user").getJSONObject("info").getString("name");
                    }
                }
                authorId = dataObj.getString("media_id");
                if (StringUtils.isBlank(authorId)){
                    authorId = dataObj.getJSONObject("user").getJSONObject("info").getString("user_id");
                }
                follows = dataObj.getString("follower_count");
                if (StringUtils.isBlank(follows)){
                    follows = "0";
                }
                comments = dataObj.getString("comment_count");
                if (StringUtils.isBlank(comments)){
                    comments = dataObj.getJSONObject("action").getString("comment_count");
                }
                likes = dataObj.getString("digg_count");
                if (StringUtils.isBlank(likes)){
                    likes = dataObj.getJSONObject("action").getString("digg_count");
                }
            } catch (Exception e) {
                author = "";
                authorId = "";
                follows = "";
                comments = "";
                likes = "";
            }
            String publishTime = dataObj.getString("publish_time");
            if (StringUtils.isBlank(publishTime)){
                publishTime = dataObj.getString("create_time");
            }
            long releaseTime = new Long(publishTime + "000");
            CrawlerData crawlerAData = CrawlerData.builder()
                    .data(crawlerRequestRecord,httpPage)
                    .url(articleUrl)
                    .dataId(StringUtils.joinWith("-",domain(),site,article.enumVal(),articleKey))
                    .releaseTime(releaseTime)
                    .addContentKV(Field_Title,title)
                    .addContentKV(Field_Author,author)
                    .addContentKV(Field_Author_Id,authorId)
                    .addContentKV(Field_Content,sbContent.toString())
                    .addContentKV(Field_Images,sbImage.toString())
                    .addContentKV(Field_Author_Follows,follows)
                    .resultLabelTag(article)
                    .build();
            if (crawlerRequestRecord.tagsCreator().scheduleTags().getCategoryTag().isContainKVTag(IS_FILTER_ARTICLE)){
                crawlerAData.tagsCreator().requestTags().addRequestType(CrawlerEnum.CrawlerRequestType.filter);
                crawlerAData.tagsCreator().requestTags().addRequestType(CrawlerEnum.CrawlerRequestType.result);
            }
            crawlerDataList.add(crawlerAData);
            if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
                CrawlerData crawlerInteractionData = CrawlerData.builder()
                        .data(crawlerRequestRecord,httpPage)
                        .url(articleUrl)
                        .dataId(StringUtils.joinWith("-",domain(),site,interaction.enumVal(),articleKey))
                        .parentId(StringUtils.joinWith("-",domain(),site,article.enumVal(),articleKey))
                        .releaseTime(releaseTime)
                        .addContentKV(Field_I_Comments,comments)
                        .addContentKV(Field_I_Likes,likes)
                        .resultLabelTag(interaction)
                        .build();
                if (crawlerRequestRecord.tagsCreator().scheduleTags().getCategoryTag().isContainKVTag(IS_FILTER_ARTICLE)){
                    crawlerInteractionData.tagsCreator().requestTags().addRequestType(CrawlerEnum.CrawlerRequestType.filter);
                    crawlerInteractionData.tagsCreator().requestTags().addRequestType(CrawlerEnum.CrawlerRequestType.result);
                }
                crawlerDataList.add(crawlerInteractionData);
            }

        }
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(comment)){
            JSONObject pageObj = JSONObject.parseObject(httpPage.getRawText());
            if (httpRequestUrl.matches(commentUrlRegex)){
                JSONArray comments = pageObj.getJSONArray("data");
                for (Object cmt : comments) {
                    JSONObject cmtObj = ((JSONObject) cmt).getJSONObject("comment");
                    String commentId = cmtObj.getString("id_str");
                    String content = cmtObj.getString("text");
                    String author = cmtObj.getString("user_name");
                    String authorId = cmtObj.getString("user_id");
                    String pubTime = cmtObj.getString("create_time");
                    String likes = cmtObj.getString("digg_count");
                    String replies = ((JSONObject) cmt).getString("reply_count");
                    Long releaseTime = new Long(pubTime + "000");
                    CrawlerData crawlerCData = CrawlerData.builder()
                            .data(crawlerRequestRecord,httpPage)
                            .url(articleUrl)
                            .dataId(StringUtils.joinWith("-",domain(),site,comment.enumVal(),commentId))
                            .parentId(StringUtils.joinWith("-",domain(),site,article.enumVal(),articleKey))
                            .releaseTime(releaseTime)
                            .addContentKV(Field_Author,author)
                            .addContentKV(Field_Author_Id,authorId)
                            .addContentKV(Field_Content,content)
                            .resultLabelTag(comment)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .build();
                    crawlerDataList.add(crawlerCData);
                    if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
                        CrawlerData crawlerInteractionData = CrawlerData.builder()
                                .data(crawlerRequestRecord,httpPage)
                                .url(articleUrl)
                                .dataId(StringUtils.joinWith("-",domain(),site,interaction.enumVal(),commentId))
                                .parentId(StringUtils.joinWith("-",domain(),site,comment.enumVal(),commentId))
                                .releaseTime(releaseTime)
                                .addContentKV(Field_I_Comments,replies)
                                .addContentKV(Field_I_Likes,likes)
                                .resultLabelTag(interaction)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                .build();
                        crawlerDataList.add(crawlerInteractionData);
                    }
                }
            }
            if (httpRequestUrl.matches(repliesUrlRegex)){
                JSONArray replies = pageObj.getJSONObject("data").getJSONArray("data");
                for (Object rep : replies) {
                    JSONObject cmtObj = (JSONObject)rep;
                    String commentId = cmtObj.getString("id_str");
                    String content = cmtObj.getString("text");
                    String pubTime = cmtObj.getString("create_time");
                    Long releaseTime = new Long(pubTime + "000");
                    String likes = cmtObj.getString("digg_count");
                    String author = cmtObj.getJSONObject("user").getString("name");
                    String authorId = cmtObj.getJSONObject("user").getString("user_id");
                    CrawlerData crawlerCData = CrawlerData.builder()
                            .data(crawlerRequestRecord,httpPage)
                            .url(articleUrl)
                            .dataId(StringUtils.joinWith("-",domain(),site,comment.enumVal(),commentId))
                            .parentId(StringUtils.joinWith("-",domain(),site,article.enumVal(),articleKey))
                            .releaseTime(releaseTime)
                            .addContentKV(Field_Author,author)
                            .addContentKV(Field_Author_Id,authorId)
                            .addContentKV(Field_Content,content)
                            .resultLabelTag(comment)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .build();
                    crawlerDataList.add(crawlerCData);
                    if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
                        CrawlerData crawlerInteractionData = CrawlerData.builder()
                                .data(crawlerRequestRecord,httpPage)
                                .url(articleUrl)
                                .dataId(StringUtils.joinWith("-",domain(),site,interaction.enumVal(),commentId))
                                .parentId(StringUtils.joinWith("-",domain(),site,comment.enumVal(),commentId))
                                .releaseTime(releaseTime)
                                .addContentKV(Field_I_Likes,likes)
                                .resultLabelTag(interaction)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                .build();
                        crawlerDataList.add(crawlerInteractionData);
                    }
                }
            }
        }
        return crawlerDataList;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(keysRegex);
        addUrlRegular(searchKwListUrlRegex);
        addUrlRegular(articleUrlRegex);
        addUrlRegular(commentUrlRegex);
        addUrlRegular(repliesUrlRegex);
        addUrlRegular(flushListUrlRegex);
        addUrlRegular(userAgentUrl);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String crawlerSite = categoryTag.getKVTagStrVal("site");
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }

    /**
     * 检查页面下载是否成功、完整
     *
     * @param crawlerRequestRecord last record
     * @param httpPage             page
     * @return boolean for page check
     */
    private boolean doHttpPageCheck(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = lastRequest.getUrl();
        int statusCode = httpPage.getStatusCode();
        if (statusCode != 200) {
            logger.error("download page {} error, status code is {}", lastRequestUrl, statusCode);
            return true;
        }
        if (!httpPage.isDownloadSuccess()) {
            logger.error("download page failed, check your link {}", lastRequestUrl);
            return true;
        }
        if (StringUtils.isBlank(httpPage.getRawText())) {
            logger.error("download page empty, check your link {}", lastRequestUrl);
            return true;
        }
        return false;
    }

    /**
     * 将url参数转换成map
     *
     * @param url http://*.*.com?aa=11&bb=22&cc=33
     * @return map
     */
    private Map<String, Object> getUrlParams(String url) {
        Map<String, Object> map = new HashMap<String, Object>(0);
        String param = url;
        if (url.contains("?")) {
            param = url.split("\\?")[1];
        }
        if (StringUtils.isBlank(param)) {
            return null;
        }
        String[] params = param.split("&");
        for (String s : params) {
            String[] p = s.split("=");
            if (p.length == 2) {
                map.put(p[0], p[1]);
            }
        }
        return map;
    }

    public static Map<String, Object> copyExtras(Map<String, Object> inExtras) {
        Map<String, Object> extras = new HashMap<>();
        for (Map.Entry<String, Object> entry : inExtras.entrySet()) {
            extras.put(entry.getKey(), entry.getValue());
        }
        return extras;
    }

    public static Map<String,String> getAsCp(){
        String asCode = "479BB4B7254C150";
        String cpCode = "7E0AC8874BB0985";
        int t = (int) (new Date().getTime()/1000);
        String e = Integer.toHexString(t).toUpperCase();
        String i = DigestUtils.md5DigestAsHex(String.valueOf(t).getBytes()).toUpperCase();
        if (e.length()==8) {
            char[] n = i.substring(0,5).toCharArray();
            char[] a = i.substring(i.length()-5).toCharArray();
            StringBuilder s = new StringBuilder();
            StringBuilder r = new StringBuilder();
            for (int o = 0; o < 5; o++) {
                s.append(n[o]).append(e.substring(0,0+1));
                r.append(e.substring(o+3,o+4)).append(a[o]);
            }
            asCode = "A1" + s + e.substring(e.length()-3);
            cpCode = e.substring(0,3) + r + "E1";
        }
        Map<String,String> map = new HashMap<>();
        map.put("as",asCode);
        map.put("cp",cpCode);
        return map;
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.0 Mobile/15E148 Safari/604.1");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) CriOS/72.0.3626.101 Mobile/15E148 Safari/605.1");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/15.0b13894 Mobile/16D57 Safari/605.1.15");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/8.1.1 Mobile/16D57 Safari/605.1.15");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/16D57");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) OPiOS/16.0.14.122053 Mobile/16D57 Safari/9537.53");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) OPT/2 Mobile/16D57");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) OPiOS/12.0.5.3 Version/7.0 Mobile/16D57 Safari/9537.53");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.0 EdgiOS/42.10.3 Mobile/16D57 Safari/605.1.15");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/16D57 unknown BingWeb/6.9.8.1");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.0 MQQBrowser/9.0.3 Mobile/16D57 Safari/604.1 MttCustomUA/2 QBWebViewType/1 WKType/1");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/16D57 SearchCraft/3.4.1 (Baidu; P2 12.1.4)");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X; zh-CN) AppleWebKit/537.51.1 (KHTML, like Gecko) Mobile/16D57 UCBrowser/12.3.0.1138 Mobile AliApp(TUnionSDK/0.1.20.3)");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X; zh-cn) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/16D57 Quark/3.0.6.926 Mobile");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/16D57 MicroMessenger/7.0.3(0x17000321) NetType/WIFI Language/zh_CN");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/16A366 QQ/7.8.8.420 V1_IPH_SQ_7.8.8_1_APP_A Pixel/1125 Core/WKWebView Device/Apple(iPhone X) NetType/4G QBWebViewType/1 WKType/1");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12. Mobile/16D57 Safari/600.1.4 baidubrowser/4.14.1.11 (Baidu; P2 12.1.4)");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/16D57 baiduboxapp/11.3.6.10 (Baidu; P2 12.1.4)");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/606.4.5 (KHTML, like Gecko) Mobile/16D57 QHBrowser/317 QihooBrowser/4.0.10");
        agentList.add("Mozilla/5.0 (iPhone; CPU iPhone OS 12_1_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/16D57 Mb2345Browser/5.2.1");
    }

    public static <T> Map<T,T> castMap(Object obj, Class<T> clazz){
        Map<T,T> result = new HashMap<>();
        if (obj instanceof Map<?,?>){
            Set<?> keySet = ((Map<?, ?>) obj).keySet();
            Set<? extends Map.Entry<?, ?>> entries = ((Map<?, ?>) obj).entrySet();
            for (Map.Entry<?, ?> entry : entries) {
                result.put(clazz.cast(entry.getKey()),clazz.cast(entry.getValue()));
            }
            return result;
        }
        return null;
    }

    private static String getRandomUA() {
        return agentList.get(RandomUtils.nextInt(0, agentList.size() - 1));
    }

    public static String unescapeHtml2J(String str) {
        int times = 0;
        while (str.contains("&") && str.contains(";")) {
            str = StringEscapeUtils.unescapeHtml(str);
            times++;
            if (times > 5) {
                break;
            }
        }
        return str;
    }

    private String getNextUrl(String requestUrl, String keyword, String page) {
        String[] split = requestUrl.split("\\?");
        String nextUrl = split[0] + "?";
        List<NameValuePair> parse = URLEncodedUtils.parse(split[1], Charset.defaultCharset());
        for (NameValuePair nameValuePair : parse) {
            String name = nameValuePair.getName();
            String value = nameValuePair.getValue();
            if (StringUtils.isNotBlank(page) && page.equals(name)) {
                nextUrl = nextUrl + name + "=" + (Integer.parseInt(value) + 1) + "&";
            } else if (StringUtils.isNotBlank(keyword) && keyword.equals(name)) {
                try {
                    nextUrl = nextUrl + name + "=" + URLEncoder.encode(value, "UTF-8") + "&";
                } catch (UnsupportedEncodingException e) {
                    logger.error(e.getMessage());
                }
            } else {
                nextUrl = nextUrl + name + "=" + value + "&";
            }
        }
        return nextUrl.substring(0, nextUrl.length() - 1);
    }

    private static long washTime(String time) throws ParseException {
        long releaseTimeToLong = 0;
        if (StringUtils.isBlank(time)) {
            return releaseTimeToLong;
        }

        long currentTime = System.currentTimeMillis();
        if ("刚刚".equals(time)) {
            return currentTime;
        }else if(time.matches("\\d+秒前")){
            String num = time.split("秒")[0];
            return currentTime - Integer.parseInt(num.trim()) * MILLIS_PER_SECOND;
        } else if (time.matches("\\d+分钟前")) {
            String num = time.split("分钟")[0];
            return currentTime - Integer.parseInt(num.trim()) * MILLIS_PER_MINUTE;
        } else if (time.matches("\\d+小时前")) {
            String num = time.split("小时")[0];
            return currentTime - Integer.parseInt(num.trim()) * MILLIS_PER_HOUR;
        } else if(time.matches("\\d+天前")){
            String num = time.split("天前")[0];
            return currentTime - Integer.parseInt(num.trim()) * MILLIS_PER_DAY;
        } else if(time.matches("昨天 \\d+:\\d+")){
            String m = time.split("昨天")[1];
            time = getDate(-1) + m;
        } else if(time.matches("前天 08:30")){
            String m = time.split("前天")[1];
            time = getDate(-2) + m;
        }else if (time.matches("\\d+月\\d+日") || time.matches("\\d+月\\d+日 \\d+:\\d+")) {
            time = LocalDate.now().getYear() + time;
        }
        return DateUtils.parseDate(time, "yyyy-MM-dd HH:mm:ss", "发表于 yyyy/MM/dd HH:mm", "yyyyMM月dd日", "yyyy-MM-dd HH:mm", "yyyyMM月dd日 HH:mm", "日期：yyyy-MM-dd", "yyyy年MM月dd日 HH:mm",
                    "时间：yyyy-MM-dd HH:mm:ss","yyyy年MM月dd日").getTime();
    }

    /**
     * 获取时间
     * @param index 为正加天数，为负减天数
     * @return
     */
    private static String getDate(int index){
            TimeZone tz = TimeZone.getTimeZone("Asia/Shanghai");
            TimeZone.setDefault(tz);
            Calendar calendar = Calendar.getInstance();
            SimpleDateFormat fmt = new SimpleDateFormat("yyyy-MM-dd");
            calendar.add(Calendar.DAY_OF_MONTH,index);
            String date = fmt.format(calendar.getTime());
            return date;
    }


    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 5) {
                logger.error(domain() + " download page the number of retries exceeds the limit" +
                        ",request url {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        CrawlerRequestRecord crawlerRequestRecord = null;
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }

        if (crawlerRequestRecord == null) {
            return;
        }
        if(!requestUrl.matches(searchKwListUrlRegex)){
            crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        }
        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    private void getUaList(String path) throws IOException {
            BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(path),"UTF-8"));
            if(uaList != null && uaList.size() > 0 && bufferedReader.lines().count() > 0){
                uaList.clear();
            }

            String line = null;
            while ((line = bufferedReader.readLine()) != null){
                if(StringUtils.isBlank(line)){
                    continue;
                }
                uaList.add(line);
            }
    }

    private String getOneUa() throws InterruptedException {
            String ua = uaList.poll(3, TimeUnit.MILLISECONDS);
            uaList.add(ua);
            return ua;
    }

    public static void main(String[] args) {
        System.out.println(getDate(1));
    }

}
