package com.chance.cc.crawler.development.scripts.zhihu.article;

import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.Downloader;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static org.codehaus.groovy.runtime.EncodingGroovyMethods.md5;

/**
 * @ClassName song
 * @Description TODO
 * @Author ding
 * @Date 2021/9/1 14:22
 * @Version 1.0
 **/
public class ZHArticleCrawlerScript extends CrawlerCommonScript {
    private static Logger log = LoggerFactory.getLogger(ZHArticleCrawlerScript.class);
    private static final String domain = "zhihu";
    private static final String side = "article";
    private static final String RECORD_AGAIN_REQUEST = "record_again_request";

    private static final String URL = "https://www.zhihu.com/";

    private static final String CommentUrl = "https://www.zhihu.com/api/v4/answers/%s/root_comments?limit=20&offset=0&order=normal&status=open";
    private static final String list = "https://www.zhihu.com/api/v5.1/topics/%s/feeds/timeline_activity?offset=%s&limit=10";
    private static final String include = "&include=data[?(target.type=topic_sticky_module)].target.data[?(target.type=answer)].target.content,relationship.is_authorized,is_author,voting,is_thanked,is_nothelp&data[?(target.type=topic_sticky_module)].target.data[?(target.type=answer)].target.is_normal,comment_count,voteup_count,content,relevant_info,excerpt.author.badge[?(type=best_answerer)].topics";
    private static final String include1 = "&data[?(target.type=topic_sticky_module)].target.data[?(target.type=answer)].target.is_normal,comment_count,voteup_count,content,relevant_info,excerpt.author.badge[?(type=best_answerer)].topics&include=data[?(target.type=topic_sticky_module)].target.data[?(target.type=answer)].target.content,relationship.is_authorized,is_author,voting,is_thanked,is_nothelp";
    private static final String listUrl = "https://www.zhihu.com/topic/\\S*/newest";
    private static final String ListUrl = "https://www.zhihu.com/api/v5.1/topics\\S*";
    private static final String commentUrl = "https://www.zhihu.com/api/v4/answers/\\S*/root_comments\\?limit\\S*";


    private static final String signHostPrefix = "http://192.168.1.210:8899/encrypt/zhihu";
    private Downloader downloader;
    private static final Map<String,String> headMap = new HashMap<>();


    @Override
    public void initUrlRegulars() {
        addUrlRegular(URL);
        addUrlRegular(listUrl);
        addUrlRegular(ListUrl);
        addUrlRegular(commentUrl);
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> listRecord = new ArrayList<>();
        if (supportSourceRecords != null){
            for (CrawlerRequestRecord supportSourceRecord :supportSourceRecords){
                String url = supportSourceRecord.getHttpRequest().getUrl();
                if (url.contains("keys")){
                    HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
                    Json json = internalDownloadPage.getJson();
                    String msg = json.jsonPath($_type+".msg").get();
                    if (!"success".equals(msg)){
                        log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
                        return listRecord;
                    }
                    List<String> all = json.jsonPath($_type + ".content").all();
                    for (String data : all) {
                        JSONObject jsonObject = JSONObject.parseObject(data);
                        String keyword = jsonObject.getString("keyword");
                        String startUrl = URL +"topic/"+ keyword + "/newest";
                        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                                .itemPageRequest(requestRecord)
                                .httpUrl(startUrl)
                                .releaseTime(System.currentTimeMillis())
                                .copyResultTags()
                                .copyBizTags()
                                .build();
                        listRecord.add(record);
                    }
                }
            }
        }
        return listRecord;
    }

    @Override
    public void beforeDownload(CrawlerRecordContext context) {
        if (null == downloader){
            downloader = context.getPageDownloader();
        }
        super.beforeDownload(context);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parseLinks = new ArrayList<>();
        if (page.getStatusCode() != 200 ||  !page.isDownloadSuccess()){
            log.error("page == [{}] || statusCode != 200 and error page = "+ page.getStatusCode());
            if (page.getStatusCode() != 404){
                recordAgainRequest(crawlerRecord,parseLinks);
                crawlerRecord.setNeedParsedPage(false);
                crawlerRecord.setNeedWashPage(false);
                return  parseLinks;
            }else{
                crawlerRecord.setNeedParsedPage(false);
                crawlerRecord.setNeedWashPage(false);
                return parseLinks;
            }

        }
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (url.matches(listUrl)){
            this.getListArticle(crawlerRecord,page,parseLinks);
        }
        if (url.matches(ListUrl)){
            this.getPageListArticle(crawlerRecord,page,parseLinks);
        }

        return parseLinks;
    }
    /*
    * 获取到列表下一页接口
    * */
    private void getPageListArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        List<String> all = page.getJson().jsonPath($_type + ".data").all();
        for (String s : all){
            JSONObject jsonObject = JSONObject.parseObject(s);
            JSONObject target = jsonObject.getJSONObject("target");
            String creat_time = (String) target.get("created_time");
            if (creat_time ==  null){
                creat_time = (String) target.get("created_at");
            }
            if (creat_time ==  null){
                creat_time = (String) target.get("updated");
            }
            creat_time = creat_time + "000";

            if (!isDateRange(crawlerRecord,Long.valueOf(creat_time))){
                return;
            }
            String id = (String) target.get("id");
            String comonentsUrl = String.format(CommentUrl, id);
            KVTag commont_filter_tag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("commont_filter_tag");
            CrawlerRecord filterRecord = JSONObject.parseObject((String)commont_filter_tag.getVal(),CrawlerRecord.class);
            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(comonentsUrl)
                    .releaseTime(Long.valueOf(creat_time))
                    .copyRequestTags()
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                    .copyResultTags()
                    .notFilterRecord()
                    .needWashed(true)
                    .copyBizTags()
                    .build();
            record.tagsCreator().bizTags().addCustomKV("seriesId",id);
            record.setFilter(filterRecord.getFilter());
            record.setFilterInfos(filterRecord.getFilterInfos());
            parseLinks.add(record);
        }
        String s = page.getJson().jsonPath($_type + ".paging").get();
        JSONObject jsonObject = JSONObject.parseObject(s);
        String nextPageUrl = (String) jsonObject.get("next");
        genSign86Headers(nextPageUrl);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(nextPageUrl)
                .releaseTime(System.currentTimeMillis())
                .copyRequestTags()
                .copyResultTags()
                .copyBizTags()
                .build();
        record.getHttpRequest().setHeaders(headMap);
        parseLinks.add(record);
    }

    /*
    * 得到列表接口
    * */
    private void getListArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String url = crawlerRecord.getHttpRequest().getUrl();
        String seriesId = null;
        Matcher mat = Pattern.compile("\\d+").matcher(url);
        while(mat.find()){
            seriesId = mat.group(0);
        }
        if (seriesId == null){
            log.error("The vehicle id was not obtained url = " + url);
            return;
        }
        int pageSize = 0;
        String listParameter = String.format(list, seriesId,pageSize);
        Long releaseTime = System.currentTimeMillis();
        listParameter = listParameter +include1;
        genSign86Headers(listParameter);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .releaseTime(releaseTime)
                .httpUrl(listParameter)
                .copyBizTags()
                .copyResultTags()
                .build();
        record.getHttpRequest().setHeaders(headMap);
        record.tagsCreator().bizTags().addCustomKV("pageSize",pageSize);
        parseLinks.add(record);
    }

    private void genSign86Headers(String listParameter) {
        String api = removeOrigin(listParameter);
        String dc0 = "\"APDdLoQDqBOPThRvoqfcqQ2B3rku5G3xIfU=|1630472725\"";
        String x_zse_83 = headMap.get("x-zse-93");
        String x_zst_81 = headMap.get("x-zst-81");
        String needEncodeStr = StringUtils.joinWith("+",x_zse_83,api,dc0,x_zst_81);
        String md5Code = null;
        try {
            md5Code = md5(needEncodeStr.getBytes());
        } catch (NoSuchAlgorithmException e) {
            e.printStackTrace();
        }
        String signCode = downloadSign(md5Code);
        headMap.put("x-zse-96","2.0_" + signCode);
    }

    private String downloadSign(String md5Code) {
        String sign86 = "";
        String url = signHostPrefix + "?code=" + md5Code;
        HttpConfig httpConfig = HttpConfig.me("sign");
        HttpRequest httpRequest = new HttpRequest();
        httpRequest.setUrl(url);
        try {
            HttpPage httpPage = downloader.download(httpRequest, httpConfig);
            JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
            sign86 = jsonObject.getString("sign");
        } catch (Exception e) {
            log.error(e.getMessage());
        }
        return sign86;
    }

    private String removeOrigin(String url){
        String api = "";
        if (url.startsWith("http") && url.contains("com")){
            api = url.split("zhihu.com")[1];
        }
        return api;
    }

    static{
       // headMap.put("x-zst-81","3_2.0aR_sn77yn6O92wOB8hPZnQr0EMYxc4f18wNBUgpTk7tuoMYqK6P0EH9y-LS9-hp1DufI-we8gGHPgJO1xuPZ0GxCTJHR7820XM20cLRGDJXfgGCBxupMuD_Ie8FL7AtqM6O1VDQyQ6nxrRPCHukMoCXBEgOsiRP0XL2ZUBXmDDV9qhnyTXFMnXcTF_ntRueThT2LocSLnh2sogFqs_Ymo6rBobXKrDx9KGpYwgN8UUcmxwxCqCXmS43mzqCPvXHCkwFBb4uyU9psk0LC88wqtcV9r0wOjreC80wmT9xqfhVBWhVLyhNfPcV0aBw_xgwfvGc1xhU8cHc1TDwBHutCAvuqqgpsS_x90CgMTUcB6Le1CGN95qfzBrxOoUwBo8YBYLw0cDr0Cqo_VuwGDh30XGX0CUNMqBYyNuwmscwB9hwKVG3MxUx9FhpOCgH9bXxGKXLYP9NY6hxsoefftCwq6uLBOvw9kho15wHL-JrC");
        headMap.put("x-zst-81","3_2.0aR_sn77yn6O92wOB8hPZnQr0EMYxc4f18wNBUgpTr7tuSRFqK6P0E69y-LS9-hp1DufI-we8gGHPgJO1xuPZ0GxCTJHR7820XM20cLRGDJXfgGCBxupMuD_Ie8FL7AtqM6O1VDQyQ6nxrRPCHukMoCXBEgOsiRP0XL2ZUBXmDDV9qhnyTXFMnXcTF_ntRueTh7gfBqcCoDoOWgw0WBOGUhr_TBtLrUHOYiVfYU3L_wF_6HFL5gOCDrLCkHCPvRFKGwtqzbpGLJeO_9O889o_2Dc0IDxYnwV_D9g99UCm1cX9eXXOzgSGBUpLJ0N8frxOUBeCCuXLnhLsIhL81GxCbgHmJUcsPggqTbOBcDoqeJO18CL0vqfz6RgKk72u9B31MXoYirUMivHXQ9F_RqOOccg_fwYYkbxmBbx0qhOBe92MYcHqBCXCO9efww3xa9XO6Gc9IJLMaUC1NrSCXCC_WJH8kqcu_utmuhLCurNC");
        headMap.put("x-zse-93","101_3_2.0");
        headMap.put("cookie","d_c0=\"APDdLoQDqBOPThRvoqfcqQ2B3rku5G3xIfU=|1630472725\"");
    }
    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> listCrawlerData = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
            if (url.matches(ListUrl)){
                this.washArticle(crawlerRecord,page,listCrawlerData);
            }
            if (url.matches(ListUrl)){
                this.washInteraction(crawlerRecord,page,listCrawlerData);
            }
            if (url.matches(commentUrl)){
                this.washComment(crawlerRecord,page,listCrawlerData);
            }


        return listCrawlerData;
    }

    private void washComment(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> listCrawlerData) {
        String seriesId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("seriesId");
        List<String> all = page.getJson().jsonPath($_type + ".data").all();
        if (all.size() != 0){
         for (String s : all){
             JSONObject jsonObject = JSONObject.parseObject(s);
             String id = (String)jsonObject.get("id");//评论id
             String content = (String)jsonObject.get("content");//评论内容
             String created_time = (String) jsonObject.get("created_time");//评论的创建时间
             created_time = created_time + "000";
             if (!isDateRange(crawlerRecord,Long.valueOf(created_time))){
                 continue;
             }
             JSONObject author = jsonObject.getJSONObject("author");
             JSONObject member = author.getJSONObject("member");
             String authorId = (String)member.get("id");
             String authorName = (String)member.get("name");
             String vote_count = (String)jsonObject.get("vote_count");//评论赞数
             CrawlerData commentData = CrawlerData.builder()
                     .data(crawlerRecord,page)
                     .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.comment,id))
                     .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,seriesId))
                     .url(crawlerRecord.getHttpRequest().getUrl())
                     .releaseTime(Long.valueOf(created_time))
                     .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                     .addContentKV(AICCommonField.Field_Content,content)
                     .addContentKV(AICCommonField.Field_Author,authorName)
                     .addContentKV(AICCommonField.Field_Author_Id,authorId)
                     .flowInPipelineTag("kafka")
                     .build();
             commentData.setFilterPipelineResult(true);
             listCrawlerData.add(commentData);
             CrawlerData interactionData = CrawlerData.builder()
                     .data(crawlerRecord,page)
                     .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.interaction,id))
                     .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.comment,id))
                     .releaseTime(Long.valueOf(created_time))
                     .url(crawlerRecord.getHttpRequest().getUrl())
                     .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                     .addContentKV(AICCommonField.Field_I_Likes,vote_count)
                     .addContentKV(AICCommonField.Field_I_Comments,"0")
                     .flowInPipelineTag("kafka")
                     .build();
             interactionData.setFilterPipelineResult(true);
             listCrawlerData.add(interactionData);

         }

        }
    }

    private void washInteraction(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> listCrawlerData) {
        List<String> all = page.getJson().jsonPath($_type + ".data").all();
        for (String s : all) {
            JSONObject jsonObject = JSONObject.parseObject(s);
            JSONObject target = jsonObject.getJSONObject("target");
            String creat_time = (String) target.get("created_time");
            if (creat_time ==  null){
                creat_time = (String) target.get("created_at");
            }
            if (creat_time ==  null){
                creat_time = (String) target.get("updated");
            }
            creat_time = creat_time + "000";

            if (!isDateRange(crawlerRecord, Long.valueOf(creat_time))) {
                return;
            }
            String id = (String) target.get("id");//文章id
            JSONObject question = target.getJSONObject("question");
            String type = (String)target.get("type");//回答类型
            String url = null;
            if (question == null){
                url = target.getString("url");
            }else{
                url = (String)question.get("url");//文章url
                url = url.substring(4,url.length());
                url = "https" + url +"/"+ type+"/" + id;
            }
            String comment_count =  (String)target.get("comment_count");//评论数
            String voteup_count =  (String)target.get("voteup_count");//点赞数
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.interaction,id))
                    .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,id))
                    .releaseTime(Long.valueOf(creat_time))
                    .url(url)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                    .addContentKV(AICCommonField.Field_I_Likes,voteup_count)
                    .addContentKV(AICCommonField.Field_I_Comments,comment_count)
                    .flowInPipelineTag("kafka")
                    .build();
            listCrawlerData.add(crawlerData);
        }
    }

    private void washArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> listCrawlerData) {
        KVTag commont_filter_tag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("commont_filter_tag");
        CrawlerRecord filterRecord = JSONObject.parseObject((String)commont_filter_tag.getVal(),CrawlerRecord.class);
        crawlerRecord.setFilter(filterRecord.getFilter());
        crawlerRecord.setFilterInfos(filterRecord.getFilterInfos());
        List<String> all = page.getJson().jsonPath($_type + ".data").all();
        log.info("*******************************");
        for (String s : all){
            JSONObject jsonObject = JSONObject.parseObject(s);
            JSONObject target = jsonObject.getJSONObject("target");
            String creat_time = (String) target.get("created_time");
            if (creat_time ==  null){
                creat_time = (String) target.get("updated");
            }
            if (creat_time ==  null){
                creat_time = (String) target.get("created_at");
            }
            creat_time = creat_time + "000";
            if (!isDateRange(crawlerRecord,Long.valueOf(creat_time))){
                continue;
            }
            String id = (String) target.get("id");//文章id
            String content = (String)target.get("content");//文章内容
            StringBuffer text = new StringBuffer();
            StringBuffer images = new StringBuffer();
            if (content != null){
                page.setHtml(Html.create(content));
                List<String> allText = page.getHtml().xpath("//p//text()").all();
                if (allText.size()!=0){
                    for (String t : allText){
                        text.append(t);
                    }
                }else{
                    text.append(content);
                }
                List<String> imageText = page.getHtml().xpath("//img/@src").all();
                for (String i : imageText){
                    images.append(i).append("\\x01");
                }
            }else{
                text.append(content);
            }


            JSONObject author = target.getJSONObject("author");
            String authorId = (String)author.get("url");//作者id
            String[] split = authorId.split("/");
            authorId = split[split.length-1];
            String authorname = (String)author.get("name");//作者姓名
            JSONObject question = target.getJSONObject("question");
            String type = (String)target.get("type");//回答类型
            String title = null;
            String url = null;
            if (question == null){
                title = target.getString("excerpt_title");
                url = target.getString("url");
            }else{
                title = (String)question.get("title");//文章标题
                url = (String)question.get("url");//文章url
                url = url.substring(4,url.length());
                url = "https" + url +"/"+ type+"/" + id;
                url = url.replaceAll("api","www").replaceAll("questions","question");
            }
           CrawlerData crawlerData = CrawlerData.builder()
                   .data(crawlerRecord,page)
                   .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,id))
                   .url(url)
                   .releaseTime(Long.valueOf(creat_time))
                   .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                   .addContentKV(AICCommonField.Field_Author,authorname)
                   .addContentKV(AICCommonField.Field_Author_Id,authorId)
                   .addContentKV(AICCommonField.Field_Title,title)
                   .addContentKV(AICCommonField.Field_Content, String.valueOf(text))
                   .addContentKV(AICCommonField.Field_Images, String.valueOf(images))
                   .addContentKV(AICCommonField.Field_Urls,url)
                   .addContentKV("articleType",type)
                   .flowInPipelineTag("kafka")
                   .build();
           listCrawlerData.add(crawlerData);
        }

    }


    private static void recordAgainRequest(CrawlerRequestRecord crawlerRequestRecord,List<CrawlerRequestRecord> parseList){
        int count = 0;
        String url = crawlerRequestRecord.getHttpRequest().getUrl();
        if (crawlerRequestRecord.tagsCreator().bizTags().hasKVTag(RECORD_AGAIN_REQUEST)){
            count = Integer.valueOf(crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(RECORD_AGAIN_REQUEST));
            count++;
            if (count >= 10){
                log.error("url excessive number of repeated downloads this url = "+url);
            }
        }else{
            count = 1;
        }
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .releaseTime(System.currentTimeMillis())
                .httpUrl(url)
                .recordKey(crawlerRequestRecord.getRecordKey()+count)
                .copyRequestTags()
                .copyResultTags()
                .copyBizTags()
                .build();

        record.getHttpRequest().setCookies(crawlerRequestRecord.getHttpRequest().getCookies());
        record.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
        record.getHttpRequest().setHeaders(crawlerRequestRecord.getHttpRequest().getHeaders());
        record.setNeedParsedPage(crawlerRequestRecord.isNeedParsedPage());
        record.setNeedWashPage(crawlerRequestRecord.isNeedWashPage());
        record.tagsCreator().bizTags().addCustomKV(RECORD_AGAIN_REQUEST,count);
        parseList.add(record);

    }
    /**
     * 判断是否在时间范围内
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord,Long releaseTimeToLong){
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis()-60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime){
                isRange = true;
            }
        }else{
            isRange = true;
        }
        return isRange;
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return crawlerRecord.tagsCreator().bizTags().site().equals(side);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }
}
