package com.chance.cc.crawler.development.scripts.sina.module;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainUrls;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.filter;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Site_Info;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2021/2/4 11:47
 * @Description 新浪 汽车板块
 **/
public class SinaAutoCrawlerScript extends CrawlerCommonScript {

    private static Logger log = LoggerFactory.getLogger(SinaAutoCrawlerScript.class);
    private static final String DOMAIN = "sina";
    private static final String SITE = "auto";
    private static final String REQUEST_AGAIN_TAG = "sina_request_retry";
    private static final String SEARCH_KW_SOURCE_URL = "searchKwSourceUrl";
    private static final String DOMAIN_RESULT_JSON_RECORD_TAG = "domain_result_json";//初始record结果字段
    private static final String SYC = "syc";//结果进行同步的字段

    private static final String AUTO_ENTRANCE_URL = "http://auto.sina.com.cn/";
    private static final String AUTO_MODULE_URL = "http[s]*://auto.sina.com.cn/[a-zA-Z]*/";
    private static final String AUTO_MODULE_HTML_SOURCE_URL = "http://interface.sina.cn/auto/inner/getAutoSubpageInfo.d.json?cid=%s&pageSize=15&page=%s";
    private static final String AUTO_MODULE_HTML_URL = "http://interface.sina.cn/auto/inner/getAutoSubpageInfo.d.json\\S*";
    private static final String AUTO_MODULE_NEWS_JSON_SOURCE_URL = "https://interface.sina.cn/auto/news/getWapNewsNewBycID.d.json?cid=%s&page=1&limit=20&tagid=0&exceptIDs=%s";
    private static final String AUTO_MODULE_NEWS_JSON_URL = "https://interface.sina.cn/auto/news/getWapNewsNewBycID.d.json\\?cid=\\S*&page=\\d+&limit=\\d+&tagid=0&exceptIDs=\\S*";
    private static final String AUTO_MODULE_INNER_JSON_SOURCE_URL = "https://interface.sina.cn/auto/inner/get%sHot.d.json?page=2";
    private static final String AUTO_MODULE_INNER_JSON_URL = "https://interface.sina.cn/auto/inner/get[a-zA-Z]*Hot.d.json\\S*";
    private static final String AUTO_MODULE_MP_JSON_SOURCE_URL = "https://interface.sina.cn/auto/news/getWapNewsNewBycID.d.json?cid=%s&page=1&limit=20&tagid=%s";
    private static final String AUTO_MODULE_MP_JSON_URL = "https://interface.sina.cn/auto/news/getWapNewsNewBycID.d.json\\?cid=\\S*&page=\\d+&limit=\\d+&tagid=\\d+";

    /**
     * 脚本domain定义
     *
     * @return
     */
    @Override
    public String domain() {
        return DOMAIN;
    }

    /**
     * 进入脚本的正则列表
     */
    @Override
    public void initUrlRegulars() {
        addUrlRegular(AUTO_ENTRANCE_URL);
        addUrlRegular(AUTO_MODULE_URL);
        addUrlRegular(AUTO_MODULE_HTML_URL);
        addUrlRegular(AUTO_MODULE_INNER_JSON_URL);
        addUrlRegular(AUTO_MODULE_NEWS_JSON_URL);
        addUrlRegular(AUTO_MODULE_MP_JSON_URL);
    }

    /**
     * 是否执行脚本 ： 输入数据检查，合格的才进入脚本
     *
     * @param crawlerRequestRecord
     * @return
     */
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        return site.equals(SITE) || StringUtils.isBlank(site);
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> crawlerRecords = new ArrayList<>();

        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            String url = supportSourceRecord.getHttpRequest().getUrl();
            if (url.contains("keys")) {
                initKeyword(requestRecord, supportSourceRecord, crawlerRecords);
            }
        }

        getSycItemUrlRecord(requestRecord,crawlerRecords);
        return crawlerRecords;
    }


    /**
     * 解析链接方法
     *
     * @param crawlerRequestRecord
     * @param httpPage
     * @return
     */
    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<CrawlerRequestRecord>();
        String requestUrl = crawlerRequestRecord.getHttpRequest().getUrl();

        if (!httpPage.isDownloadSuccess() || (httpPage.getStatusCode() != 200 && httpPage.getStatusCode() != 404)) {
            log.error("{} status code : {}", crawlerRequestRecord.getHttpRequest().getUrl(), httpPage.getStatusCode());
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }

        if (httpPage.getStatusCode() == 404) {
            log.error("{} status code : {}", crawlerRequestRecord.getHttpRequest().getUrl(), httpPage.getStatusCode());
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }

        if (requestUrl.matches(AUTO_ENTRANCE_URL)) {
            //暂时没有进行清洗
        }

        if (requestUrl.matches(AUTO_MODULE_URL)) {
            autoModuleUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }

        if (requestUrl.matches(AUTO_MODULE_HTML_URL)) {
            autoModuleHtmlUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }

        if (requestUrl.matches(AUTO_MODULE_INNER_JSON_URL)) {
            autoModuleInnerJsonUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }

        if (requestUrl.matches(AUTO_MODULE_NEWS_JSON_URL)) {
            autoModuleNewsJsonUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }

        if(requestUrl.matches(AUTO_MODULE_MP_JSON_URL)){
            autoModuleMpJsonUrlRecord(crawlerRequestRecord,httpPage,parsedLinks);
        }
        return parsedLinks;
    }

    //汽车板块地址
    private void autoModuleUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String requestUrl = httpPage.getRequest().getUrl();
        String[] split = requestUrl.split("/");
        String siteInfo = split[split.length - 1];
        //翻页
        String nextUrl = "";
        switch (siteInfo) {
            case "estation":
                nextUrl = getModuleInnerJsonUrl(httpPage);
                crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(SYC,"true");
                break;
            case "news":
                nextUrl = getModuleNewsJsonUrl(httpPage);
                break;
            case "mp":
                nextUrl = getModuleMpJsonUrl(httpPage,"pageType = '\\d+'","0");
                crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(SYC,"true");
                break;
            default:
                nextUrl = getModuleHtmlUrl(httpPage, siteInfo);
        }
        if (StringUtils.isNotBlank(nextUrl)) {
            CrawlerRequestRecord nextRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(nextUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            if ("news".equals(siteInfo) || "video".equals(siteInfo) || "mp".equals(siteInfo)) {
                nextRecord.tagsCreator().requestTags().removeRequestType(filter);
            }
            parsedLinks.add(nextRecord);
        }

        if ("news".equals(siteInfo) || "video".equals(siteInfo) || "mp".equals(siteInfo)) {
            return;
        }

        //详情页
        String nodeXpath = "//div[@class=\"con\"]";
        String itemXpath = ".//h3/a/@href";
        String releaseTimeXpath = ".//span[@class=\"time fL\"]";
        String releaseTimePatterns = "yyyy-MM-dd HH:mm:ss";
        switch (siteInfo) {
            case "estation":
                nodeXpath = "//ul[@class=\"clearfix\"]/li[@class=\"item\"]";
                itemXpath = ".//div[@class=\"tit\"]/a/@href";
                releaseTimeXpath = "";
                releaseTimePatterns = "";
                break;
        }
        getAutoModuleItemUrlRecord(null, nodeXpath, itemXpath, releaseTimeXpath, releaseTimePatterns,
                crawlerRequestRecord, httpPage, parsedLinks);
    }

    private void autoModuleHtmlUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        //翻页
        String requestUrl = httpPage.getRequest().getUrl();
        String nextUrl = getNextUrl(requestUrl, "", "page");
        CrawlerRequestRecord nextRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(nextUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        parsedLinks.add(nextRecord);

        //详情
        String getHtml = "data";
        String nodeXpath = "//div[@class=\"con\"]";
        String itemXpath = ".//h3/a/@href";
        String releaseTimeXpath = ".//span[@class=\"time fL\"]";
        String releaseTimePatterns = "yyyy-MM-dd HH:mm:ss";
        getAutoModuleItemUrlRecord(getHtml, nodeXpath, itemXpath, releaseTimeXpath, releaseTimePatterns,
                crawlerRequestRecord, httpPage, parsedLinks);

    }

    private void autoModuleInnerJsonUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        //翻页
        String requestUrl = httpPage.getRequest().getUrl();
        String nextUrl = getNextUrl(requestUrl, "", "page");
        CrawlerRequestRecord nextRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(nextUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        parsedLinks.add(nextRecord);

        //详情
        getAutoModuleJsonItemUrlRecord("data", "url", null, null, crawlerRequestRecord, httpPage, parsedLinks);
    }

    private void autoModuleNewsJsonUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        //翻页
        String requestUrl = httpPage.getRequest().getUrl();
        String nextUrl = getNextUrl(requestUrl, "exceptIDs", "page");
        CrawlerRequestRecord nextRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(nextUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        parsedLinks.add(nextRecord);

        //详情
        getAutoModuleJsonItemUrlRecord("data", "pc_url", "cTime", "yyyy-MM-dd HH:mm:ss", crawlerRequestRecord, httpPage, parsedLinks);
    }

    private void autoModuleMpJsonUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks){
        //翻页
        String requestUrl = httpPage.getRequest().getUrl();
        String nextUrl = getNextUrl(requestUrl, null, "page");
        CrawlerRequestRecord nextRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(nextUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        parsedLinks.add(nextRecord);

        //详情
        getAutoModuleJsonItemUrlRecord("data", "pc_url", "cTime", "yyyy-MM-dd HH:mm:ss", crawlerRequestRecord, httpPage, parsedLinks);
    }

    //获取comment地址中需要的id或者channel
    private String getChannelOrId(HttpPage httpPage, String regx) {
        String result = washContent(regx, httpPage.getRawText());
        if (StringUtils.isBlank(result)) {
            return null;
        }
        if (result.contains("'")) {
            result = result.substring(result.indexOf("'") + 1, result.lastIndexOf("'"));
        } else if (result.contains("\"")) {
            result = result.substring(result.indexOf("\"") + 1, result.lastIndexOf("\""));
        } else if(result.contains("=")){
            result = result.split("=")[1].trim();
        }
        return result;
    }

    //获取汽车板块新闻下一页json格式的地址
    private String getModuleNewsJsonUrl(HttpPage httpPage) {
        String cid = washContent("pageType = \"\\d+\"", httpPage.getRawText());
        String exceptIDs = washContent("exceptIDs='\\S+'", httpPage.getRawText());
        if (StringUtils.isBlank(cid) || StringUtils.isBlank(exceptIDs)) {
            log.error("cid or exceptIDs can not null!");
            return null;
        }
        cid = cid.substring(cid.indexOf("\"") + 1, cid.lastIndexOf("\""));
        exceptIDs = exceptIDs.substring(exceptIDs.indexOf("'") + 1, exceptIDs.lastIndexOf("'"));
        try {
            return String.format(AUTO_MODULE_NEWS_JSON_SOURCE_URL, cid, URLEncoder.encode(exceptIDs, "UTF-8"));
        } catch (UnsupportedEncodingException e) {
            log.error(e.getMessage());
        }
        return null;
    }

    //获取汽车板块新闻下一页json格式的地址
    private String getModuleMpJsonUrl(HttpPage httpPage,String cidRegx,String tagid) {
        String cid = getChannelOrId(httpPage,cidRegx);
        return String.format(AUTO_MODULE_MP_JSON_SOURCE_URL, cid,tagid);
    }

    //汽车板块获取下一页html格式的地址
    private String getModuleHtmlUrl(HttpPage httpPage, String siteInfo) {
        String pageType = washContent("pageType = \"\\d+\"", httpPage.getRawText());
        if (StringUtils.isBlank(pageType)) {
            log.error("pageType can not null!");
            return "";
        }
        pageType = pageType.substring(pageType.indexOf("\"") + 1, pageType.lastIndexOf("\""));
        if ("video".equals(siteInfo)) {
            return String.format(AUTO_MODULE_HTML_SOURCE_URL, pageType, 1);
        }
        return String.format(AUTO_MODULE_HTML_SOURCE_URL, pageType, 2);
    }

    private String getModuleInnerJsonUrl(HttpPage httpPage) {
        String requestUrl = httpPage.getRequest().getUrl();
        String[] split = requestUrl.split("/");
        String siteInfo = split[split.length - 1];
        String siteInfoUpper = siteInfo.substring(0, 1).toUpperCase() + siteInfo.substring(1);
        return String.format(AUTO_MODULE_INNER_JSON_SOURCE_URL, siteInfoUpper);
    }

    //汽车板块html格式获取详情页record
    private void getAutoModuleItemUrlRecord(String getHtml, String nodeXpath, String itemXpath, String releaseTimeXpath, String releaseTimePatterns,
                                            CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        if (StringUtils.isNotBlank(getHtml)) {
            httpPage.setHtml(new Html(httpPage.getJson().jsonPath($_type + "." + getHtml).get()));
        }
        List<Selectable> nodes = httpPage.getHtml().xpath(nodeXpath).nodes();
        for (Selectable node : nodes) {
            String itemUrl = node.xpath(itemXpath).get();
            if (StringUtils.isBlank(itemUrl)) {
                continue;
            } else {
                itemUrl = StringEscapeUtils.unescapeHtml(itemUrl);
            }

            try {
                long releaseTimeToLong = System.currentTimeMillis();
                if (StringUtils.isNotBlank(releaseTimeXpath)) {
                    String releaseTime = node.xpath(releaseTimeXpath).get();
                    if (StringUtils.isBlank(releaseTime)) {
                        continue;
                    }
                    releaseTimeToLong = DateUtils.parseDate(releaseTime, releaseTimePatterns).getTime();
                }
                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(itemUrl)
                        .releaseTime(releaseTimeToLong)
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                if (StringUtils.isBlank(releaseTimeXpath)) {
                    itemRecord.setNeedWashPage(false);
                    itemRecord.setTurnPageFilterInfo(null);
                }

                parsedLinks.add(itemRecord);
            } catch (ParseException e) {
                log.error(e.getMessage());
            }
        }
    }

    //汽车板块json格式获取详情页record
    private void getAutoModuleJsonItemUrlRecord(String jsonPath, String itemXPath, String releaseTimeXpath, String releaseTimePatterns,
                                                CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        List<String> all = httpPage.getJson().jsonPath($_type + "." + jsonPath).all();
        for (String s : all) {
            JSONObject jsonObject = JSONObject.parseObject(s);
            String itemUrl = jsonObject.getString(itemXPath);
            if (StringUtils.isBlank(itemUrl)) {
                continue;
            }

            long releaseTimeToLong = System.currentTimeMillis();
            try {
                if (StringUtils.isNotBlank(releaseTimeXpath)) {
                    String releaseTime = jsonObject.getString(releaseTimeXpath);
                    if (StringUtils.isBlank(releaseTime)) {
                        continue;
                    }
                    releaseTimeToLong = DateUtils.parseDate(releaseTime, releaseTimePatterns).getTime();
                }

                CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(itemUrl)
                        .releaseTime(releaseTimeToLong)
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                if (StringUtils.isBlank(releaseTimeXpath)) {
                    itemRecord.setNeedWashPage(false);
                }

                parsedLinks.add(itemRecord);
            } catch (ParseException e) {
                log.error(e.getMessage());
            }
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        return null;
    }

    private void initKeyword(CrawlerRequestRecord requestRecord, CrawlerRequestRecord supportSourceRecord, List<CrawlerRecord> crawlerRecords) {
        HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
        Map<String, Object> extras = requestRecord.getHttpRequest().getExtras();
        if (extras == null) {
            return;
        }
        String searchKwSourceUrl = (String) extras.get(SEARCH_KW_SOURCE_URL);
        if (StringUtils.isBlank(searchKwSourceUrl)) {
            log.error("searchKw source url can not is null!");
            return;
        }
        Json json = internalDownloadPage.getJson();
        String msg = json.jsonPath($_type + ".msg").get();
        if (!"success".equals(msg)) {
            log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
            return;
        }

        List<String> all = json.jsonPath($_type + ".content").all();
        for (String data : all) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            String keyword = jsonObject.getString("keyword");

            try {
                String itemUrl = String.format(searchKwSourceUrl, URLEncoder.encode(keyword, "UTF-8"));
                CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(requestRecord)
                        .httpUrl(itemUrl)
                        .releaseTime(System.currentTimeMillis())
                        .notFilterRecord()
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                turnRecord.tagsCreator().bizTags().addCustomKV(Tag_Site_Info, keyword);
                crawlerRecords.add(turnRecord);
            } catch (UnsupportedEncodingException e) {
                log.error(e.getMessage());
            }
        }
    }

    @Override
    public void afterExecute(CrawlerRecordContext crawlerRecordContext) {

    }

    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 5) {
                log.error("sina download page the number of retries exceeds the limit" +
                        ",request url {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        CrawlerRequestRecord crawlerRequestRecord = null;
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }

        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    private void getSycItemUrlRecord(CrawlerRequestRecord requestRecord,List<CrawlerRecord> crawlerRecords){
        if (requestRecord.tagsCreator().bizTags().hasKVTag(DOMAIN_RESULT_JSON_RECORD_TAG)) {
            KVTag domainResultJson = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag(DOMAIN_RESULT_JSON_RECORD_TAG);
            CrawlerDomainUrls crawlerDomainUrls = JSON.parseObject(String.valueOf(domainResultJson.getVal()), CrawlerDomainUrls.class);
            String url = crawlerDomainUrls.getUrl();
            Json urlJson = new Json(url);
            String itemUrl = urlJson.jsonPath($_type + ".itemUrl").get();
            long releaseTimeToLong = Long.parseLong(urlJson.jsonPath($_type + ".releaseTimeToLong").get());
            if(!isDateRange(requestRecord,releaseTimeToLong)){
                return ;
            }
            requestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(DOMAIN_RESULT_JSON_RECORD_TAG);

            CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(requestRecord)
                    .httpUrl(itemUrl)
                    .releaseTime(releaseTimeToLong)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            crawlerRecords.add(itemRecord);
        }
    }

    private static String washContent(String regx, String input) {
        Pattern compile = Pattern.compile(regx);
        Matcher matcher = compile.matcher(input);
        while (matcher.find()) {
            return matcher.group(0);
        }
        return null;
    }

    /**
     * 判断是否在时间范围内
     *
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord, Long releaseTimeToLong) {
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    } else if (hourFromNow != 0) {
                        endTime = System.currentTimeMillis();
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if (startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime) {
                isRange = true;
            }
        } else {
            isRange = true;
        }
        return isRange;
    }

    private String getNextUrl(String requestUrl, String keyword, String page) {
        String[] split = requestUrl.split("\\?");
        String nextUrl = split[0] + "?";
        List<NameValuePair> parse = URLEncodedUtils.parse(split[1], Charset.defaultCharset());
        for (NameValuePair nameValuePair : parse) {
            String name = nameValuePair.getName();
            String value = nameValuePair.getValue();
            if (StringUtils.isNotBlank(page) && page.equals(name)) {
                nextUrl = nextUrl + name + "=" + (Integer.parseInt(value) + 1) + "&";
            } else if (StringUtils.isNotBlank(keyword) && keyword.equals(name)) {
                try {
                    nextUrl = nextUrl + name + "=" + URLEncoder.encode(value, "UTF-8") + "&";
                } catch (UnsupportedEncodingException e) {
                    log.error(e.getMessage());
                }
            } else {
                nextUrl = nextUrl + name + "=" + value + "&";
            }
        }
        return nextUrl.substring(0, nextUrl.length() - 1);
    }

    public static void main(String[] args) {
        String s = "https://a.sina.cn/t/author/19656553/";

        String substring = s.substring(s.substring(0, s.length() - 1).lastIndexOf("/") + 1);
        System.out.println(substring);


    }
}
