package com.chance.cc.crawler.development.scripts.weibo.tie;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.tags.KVTag;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Random;

import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;

/**
 * @ClassName weibo
 * @Description TODO
 * @Author songding
 * @Date 2021/10/26 22:57
 * @Version 1.0
 **/
public class WeiboTeCrawlerScript extends CrawlerCommonScript {
    private static Logger log = LoggerFactory.getLogger(WeiboTeCrawlerScript.class);
    private static final String domain = "weibo";
    private static final String site = "tie";

    private static final String articleUrl = "https://weibo.com/ajax/statuses/show?id=%s";
    //评论
    private static final String commentUrl = "https://weibo.com/ajax/statuses/buildComments?flow=1&is_reload=1&id=%s&is_show_bulletin=2&is_mix=0&count=20&uid=%s";
    //评论翻页
    private static final String turnCommentUrl = "https://weibo.com/ajax/statuses/buildComments?flow=1&is_reload=1&id=%s&is_show_bulletin=2&is_mix=0&max_id=%s&count=20&uid=%s";
    //评论回复
    private static final String replyCommentUrl = "https://weibo.com/ajax/statuses/buildComments?flow=1&is_reload=1&id=%s&is_show_bulletin=2&is_mix=1&fetch_level=1&max_id=0&count=20&uid=%s";
    //评论回复翻页
    private static final String turnPageReplyCommentUrl = "https://weibo.com/ajax/statuses/buildComments?flow=1&is_reload=1&id=%s&is_show_bulletin=2&is_mix=1&fetch_level=1&max_id=%s&count=20&uid=%s";

    private static final String weiboUrlRegulars = "http[s]*://weibo.com/\\d+/[a-zA-Z0-9\\?\\_\\=]*";
   // private static final String weiboUrlRegularss = "https://weibo.com/\\S*/\\S*";
    private static final String articleRegulars = "https://weibo.com/ajax/statuses/show\\?id=\\S*";
    //评论
    private static final String commentRegulars = "https://weibo.com/ajax/statuses/buildComments\\?flow=1&is_reload=1&id=\\S*&is_show_bulletin=2&is_mix=0&count=20&uid=\\S*";
    //评论翻页
    private static final String turnPageCommentRegulars = "https://weibo.com/ajax/statuses/buildComments\\?flow=1&is_reload=1&id=\\S*&is_show_bulletin=2&is_mix=0&max_id=\\S*&count=20&uid=\\S*";
    //评论回复
    private static final String replyCommentRegulars = "https://weibo.com/ajax/statuses/buildComments\\?flow=1&is_reload=1&id=\\S*&is_show_bulletin=2&is_mix=1&fetch_level=1&max_id=0&count=20&uid=\\S*";
    //评论回复翻页
    private static final String turnPageReplyCommentRegulars = "https://weibo.com/ajax/statuses/buildComments\\?flow=1&is_reload=1&id=\\S*&is_show_bulletin=2&is_mix=1&fetch_level=1&max_id=\\S*&count=20&uid=\\S*";

    private static final Object obj = new Object();

    @Override
    public void initUrlRegulars() {
        //addUrlRegular(weiboUrlRegularss);
        addUrlRegular(weiboUrlRegulars);
        addUrlRegular(articleRegulars);
        addUrlRegular(commentRegulars);
        addUrlRegular(turnPageCommentRegulars);
        addUrlRegular(replyCommentRegulars);
        addUrlRegular(turnPageReplyCommentRegulars);
    }
    private  int times(){
        int i = 0;
        for (int a=0;a<1000;){
            i = new Random().nextInt(100)*1000;
            if (i<30000 || i>60000){
                break;
            }
        }
        return i;
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {

        List<CrawlerRequestRecord> parseLinks = new ArrayList<>();
        if (page.getStatusCode() != 200 || page.isDownloadSuccess() != true){
            log.error("download error or page != 200  code="+page.getStatusCode());
            if (page.getStatusCode() == 414){
                synchronized (obj){
                    try {
                        int time = times();
                        log.warn("crawler thread sleep="+time/1000+"s");
                        Thread.sleep(time);
                        this.recordAgainDownload(crawlerRecord,page,parseLinks);
                        crawlerRecord.setNeedWashPage(false);
                        return parseLinks;
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                }
                this.recordAgainDownload(crawlerRecord,page,parseLinks);
                crawlerRecord.setNeedWashPage(false);
                return parseLinks;
            }else{
                this.recordAgainDownload(crawlerRecord,page,parseLinks);
                crawlerRecord.setNeedWashPage(false);
                return parseLinks;
            }
        }
        String url = crawlerRecord.getHttpRequest().getUrl();
      /*  if(url.matches(weiboUrlRegulars)){
            this.parseArticleUrl(crawlerRecord,page,parseLinks);
        }*/

        if (url.matches(weiboUrlRegulars)){
            this.parseCommentUrl(crawlerRecord,page,parseLinks);
        }
        if (url.matches(commentRegulars) || url.matches(turnPageCommentRegulars)){//评论  和翻页
            this.turnCommentUrl(crawlerRecord,page,parseLinks);
        }
        if (url.matches(replyCommentRegulars) || url.matches(turnPageReplyCommentRegulars)){//评论回复 和翻页
            this.turnReplyCommentUrl(crawlerRecord,page,parseLinks);
        }
        return parseLinks;
    }

    private void turnReplyCommentUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String max_id = page.getJson().jsonPath($_type + ".max_id").get();
        if (max_id.equals("0")){
            return;
        }
        List<String> all = page.getJson().jsonPath($_type + ".data").all();
        if (all.size()==0){
            return;
        }
        String uid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("uid");
        String commentMid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("commentMid");
        String replyCommentUrl  = String.format(turnPageReplyCommentUrl,commentMid,max_id,uid);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(replyCommentUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.tagsCreator().bizTags().addCustomKV("commentMid",commentMid);
        record.tagsCreator().bizTags().addCustomKV("uid",uid);
        parseLinks.add(record);

    }

    private void turnCommentUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String max_id = page.getJson().jsonPath($_type + ".max_id").get();
        String uid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("id");
        String mid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("mid");
        List<String> all = page.getJson().jsonPath($_type + ".data").all();
        if (all.size()==0){
            return;
        }
        Long releaseTime = 0l;
        for (String str :all){
            JSONObject jsonObject = JSONObject.parseObject(str);
            String created_at = jsonObject.getString("created_at");
            releaseTime = getReleaseTime(created_at);
            String more_info = jsonObject.getString("more_info");
            if (more_info != null){
                String commentMid = jsonObject.getString("mid");
                String replyComment = String.format(replyCommentUrl,commentMid,uid);
                CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRecord)
                        .httpUrl(replyComment)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                record.tagsCreator().bizTags().addCustomKV("commentMid",commentMid);
                record.tagsCreator().bizTags().addCustomKV("uid",uid);
                parseLinks.add(record);
            }

        }
        //判断有没有下一页
        if (max_id.equals("0")){
            return;
        }
        String commentUrl = String.format(turnCommentUrl,mid,max_id,uid);
        CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(commentUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        parseLinks.add(commentRecord);
}

    private Long getReleaseTime(String dt){
        SimpleDateFormat sdf1= new SimpleDateFormat("EEE MMM dd HH:mm:ss z yyyy", Locale.ENGLISH);
        SimpleDateFormat sdf2= new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Long  time = 0l;
        try {
            time = DateUtils.parseDate(sdf2.format(sdf1.parse(dt)),"yyyy-MM-dd HH:mm:ss").getTime();
        } catch (ParseException e) {
            e.printStackTrace();
        }
        return time;
    }
    private void parseCommentUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {

        String url0 = crawlerRecord.getHttpRequest().getUrl();
        String[] split1 = url0.split("/");
        String eid = split1[split1.length-1];
        String mid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("mid");
        String url = crawlerRecord.getHttpRequest().getUrl();
        String[] split = url.split("/");
        String id = split[split.length-2];
        String commentUrl = String.format(this.commentUrl,mid,id);
        CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(commentUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        commentRecord.tagsCreator().bizTags().addCustomKV("id",id);
        commentRecord.tagsCreator().bizTags().addCustomKV("mid",mid);
        commentRecord.tagsCreator().bizTags().addCustomKV("eid",eid);
        parseLinks.add(commentRecord);
    }

    private void parseArticleUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String url = crawlerRecord.getHttpRequest().getUrl();
        String[] split = url.split("/");
        String eid = split[split.length-1];
        String id = split[split.length-2];
        String articleUrl = String.format(this.articleUrl,eid);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(articleUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.tagsCreator().bizTags().addCustomKV("id",id);
        record.tagsCreator().bizTags().addCustomKV("urls",url);
        record.tagsCreator().bizTags().addCustomKV("eid",eid);
        parseLinks.add(record);
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> dataList = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.article)){
            if (url.matches(articleRegulars)){
                this.washArticle(crawlerRecord,page,dataList);
            }
        }
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.interaction)){
            if (url.matches(articleRegulars)){
                this.washInteraction(crawlerRecord,page,dataList);
            }

        }
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.comment)){
            if (url.matches(commentRegulars) || url.matches(turnPageCommentRegulars)){
                this.washComment(crawlerRecord,page,dataList);
            }
            if(url.matches(replyCommentRegulars) || url.matches(turnPageReplyCommentRegulars)){
                this.washReplyComment(crawlerRecord,page,dataList);
            }
        }
        return dataList;
    }

    private void washReplyComment(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        String commentMid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("commentMid");
        String mid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("mid");
        String eid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("eid");
        String urls = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("urls");
        KVTag comment_record_filter_info = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
        CrawlerRecord filterRecord = JSON.parseObject((String) comment_record_filter_info.getVal(), CrawlerRecord.class);
        crawlerRecord.setFilter(filterRecord.getFilter());
        crawlerRecord.setFilterInfos(filterRecord.getFilterInfos());
        List<String> all = page.getJson().jsonPath($_type + ".data").all();
        for (String str :all) {
            JSONObject jsonObject = JSONObject.parseObject(str);
            String created_at = jsonObject.getString("created_at");
            Long releaseTime = getReleaseTime(created_at);
            if (!isDateRange(crawlerRecord, releaseTime)) {
                continue;
            }
            String id = jsonObject.getString("mid");
            String text = jsonObject.getString("text_raw");
            JSONObject user = jsonObject.getJSONObject("user");
            String name = user.getString("name");
            String userId = user.getString("id");
            String total_number = jsonObject.getString("total_number");
            String like_counts = jsonObject.getString("like_counts");
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.comment, id))
                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article, mid))
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .releaseTime(releaseTime)
                    .addContentKV(Field_Author_Id,userId)
                    .addContentKV(Field_Author, name)
                    .addContentKV(Field_Content, text)
                    .addContentKV(Field_I_Likes, like_counts)
                    .addContentKV(Field_I_Comments, "0")
                    .addContentKV(Field_Urls, urls)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                    .build();
            dataList.add(crawlerData);
        }
    }

    private void washInteraction(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList){
            String mid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("id");
            String eid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("eid");
            String time = page.getJson().jsonPath($_type + ".created_at").get();
            Long releaseTime = getReleaseTime(time);
            if (!isDateRange(crawlerRecord, releaseTime)) {
                return;
            }
            String reposts_count = page.getJson().jsonPath($_type + ".reposts_count").get();
            String comments_count = page.getJson().jsonPath($_type + ".comments_count").get();
            String attitudes_count = page.getJson().jsonPath($_type + ".attitudes_count").get();
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.interaction, eid))
                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article, eid))
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .releaseTime(releaseTime)
                    .addContentKV(Field_I_Comments, comments_count)
                    .addContentKV(Field_I_Likes, attitudes_count)
                    .addContentKV(Field_I_Forwards, reposts_count)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                    .build();
            dataList.add(crawlerData);

    }

    private void washArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        String mid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("id");
        String eid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("eid");
        String time = page.getJson().jsonPath($_type + ".created_at").get();
        Long releaseTime = getReleaseTime(time);
        if (!isDateRange(crawlerRecord,releaseTime)){
            return;
        }
        String content = page.getJson().jsonPath($_type + ".text_raw").get();
        String user = page.getJson().jsonPath($_type + ".user").get();
        JSONObject jsonObject = JSONObject.parseObject(user);
        String screen_name = jsonObject.getString("screen_name");
        String id = page.getJson().jsonPath($_type + ".id").get();
        CrawlerData crawlerData = CrawlerData.builder()
                .data(crawlerRecord,page)
                .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,eid))
                .url(crawlerRecord.getHttpRequest().getUrl())
                .releaseTime(releaseTime)
                .addContentKV(Field_Author, screen_name)
                .addContentKV(Field_Content, content)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .build();
        dataList.add(crawlerData);
    }

    private void washComment(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> dataList) {
        String mid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("mid");
        String urls = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("urls");
        String eid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("eid");

        KVTag comment_record_filter_info = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
        CrawlerRecord filterRecord = JSON.parseObject((String) comment_record_filter_info.getVal(), CrawlerRecord.class);
        crawlerRecord.setFilter(filterRecord.getFilter());
        crawlerRecord.setFilterInfos(filterRecord.getFilterInfos());
        List<String> all = page.getJson().jsonPath($_type + ".data").all();
        for (String str :all){
            JSONObject jsonObject = JSONObject.parseObject(str);
            String created_at = jsonObject.getString("created_at");
            Long releaseTime = getReleaseTime(created_at);
            if (!isDateRange(crawlerRecord,releaseTime)){
                continue;
            }
            String commentId = jsonObject.getString("mid");
            String more_info = jsonObject.getString("more_info");
            if (more_info == null){
                JSONArray comments = jsonObject.getJSONArray("comments");
                for (Object obj : comments){
                    JSONObject replyJson = JSONObject.parseObject((String) obj);
                    String replyCreated_at = replyJson.getString("created_at");
                    Long replyReleaseTime = getReleaseTime(replyCreated_at);
                    if (!isDateRange(crawlerRecord,replyReleaseTime)){
                        continue;
                    }
                    String replyId = replyJson.getString("mid");
                    String replyLike_count = replyJson.getString("like_count");
                    String replyText_raw = replyJson.getString("text_raw");
                    JSONObject replyUser = replyJson.getJSONObject("user");
                    String userId = replyUser.getString("id");
                    String replyName = replyUser.getString("name");
                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRecord,page)
                            .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.comment,replyId))
                            .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,mid))
                            .url(crawlerRecord.getHttpRequest().getUrl())
                            .releaseTime(releaseTime)
                            .addContentKV(Field_Author_Id,userId)
                            .addContentKV(Field_Author, replyName)
                            .addContentKV(Field_Content, replyText_raw)
                            .addContentKV(Field_I_Likes, replyLike_count)
                            .addContentKV(Field_I_Comments, "0")
                            .addContentKV(Field_Urls,urls)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                            .build();
                    dataList.add(crawlerData);
                }
            }

            String text = jsonObject.getString("text_raw");
            JSONObject user = jsonObject.getJSONObject("user");
            String name = user.getString("name");
            String userId = user.getString("id");
            String total_number = jsonObject.getString("total_number");
            String like_counts = jsonObject.getString("like_counts");
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.comment,commentId))
                    .parentId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.article,mid))
                    .url(crawlerRecord.getHttpRequest().getUrl())
                    .releaseTime(releaseTime)
                    .addContentKV(Field_Author_Id,userId)
                    .addContentKV(Field_Author, name)
                    .addContentKV(Field_Content, text)
                    .addContentKV(Field_I_Likes, like_counts)
                    .addContentKV(Field_I_Comments, total_number)
                    .addContentKV(Field_Urls,urls)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                    .build();
            dataList.add(crawlerData);
        }

    }


    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return crawlerRecord.tagsCreator().bizTags().site().equals(site);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }
    private void recordAgainDownload(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        Integer integerCount = 0;
        String count = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("count");
        if (count == null){
            integerCount = 1;
        }else{
            integerCount = Integer.valueOf(count);
            if (integerCount >= 10){
                log.error("The number of downloads exceeds the limit");
                return;
            }
            integerCount+=1;
        }

        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        record.setRecordKey(crawlerRecord.getHttpRequest().getUrl()+integerCount);
        record.tagsCreator().bizTags().addCustomKV("count",integerCount);
        parseLinks.add(record);
    }
    /**
     * 判断是否在时间范围内
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord,Long releaseTimeToLong){
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis()-60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime){
                isRange = true;
            }
        }else{
            isRange = true;
        }
        return isRange;
    }
}
