package com.chance.cc.crawler.development.scripts.tianya;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;

public class TianyaCarSeriesCrawlerScript extends CrawlerCommonScript {
    private Logger log = LoggerFactory.getLogger(TianyaCarSeriesCrawlerScript.class);//日志
    private static final String DOMAIN = "tianya";
    private static final String SITE = "carSeries";
    private static final String REQUEST_AGAIN_TAG = DOMAIN + "_request_again";


    //    http://bbs.tianya.cn/list-cars-1.shtml
    private static final String CommentRegulars = "http://bbs.tianya.cn/list-\\S*-1.shtml";//评论模块首页链接
    //    http://bbs.tianya.cn/list.jsp?item=cars&order=1
    private static final String ArticleRegulars = "http://bbs.tianya.cn/list.jsp\\?item=\\S*&order=1";//文章模块首页链接

    //    http://bbs.tianya.cn/list.jsp?item=cars&nextid=1628808109000
    private static final String CommentNextRegulars = "http://bbs.tianya.cn/list.jsp\\?item=\\S*&nextid=\\d*";//评论模块下一页链接
    //    http://bbs.tianya.cn/list.jsp?item=cars&order=1&nextid=488398
    private static final String ArticleNextRegulars = "http://bbs.tianya.cn/list.jsp\\?item=\\S*&order=1&nextid=\\d*";//文章模块下一页链接
    //    http://bbs.tianya.cn/post-cars-461651-2.shtml
    private static final String ContentRegulars = "http://bbs.tianya.cn/post-\\S*-\\S*-\\S*.shtml";//内容详情链接

    @Override
    public String domain() {
        return DOMAIN;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(CommentRegulars);
        addUrlRegular(ArticleRegulars);
        addUrlRegular(CommentNextRegulars);
        addUrlRegular(ArticleNextRegulars);
        addUrlRegular(ContentRegulars);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        String site = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        return SITE.equals(site);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList();
        //        判断页面是否加载成功
        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        if (page.getStatusCode() != 200 || (!page.isDownloadSuccess())) {
            log.error("download page url == {null} error status is {}", requestUrl, page.getStatusCode());
            if (page.getStatusCode() != 404) {//如果没有成功的下载  进行重新下载
                this.requestAgainCrawlerRecord(parsedLinks, crawlerRecord);
                crawlerRecord.setNeedWashPage(false);
                return parsedLinks;
            }
        }
        if (crawlerRecord.getHttpRequest().getUrl().matches(CommentRegulars) || crawlerRecord.getHttpRequest().getUrl().matches(ArticleRegulars) ||crawlerRecord.getHttpRequest().getUrl().matches(CommentNextRegulars)||crawlerRecord.getHttpRequest().getUrl().matches(ArticleNextRegulars)) {
            contentRequest(crawlerRecord, page, parsedLinks);//解析内容链接
        } else if (crawlerRecord.getHttpRequest().getUrl().matches(ContentRegulars)) {
            contentNextRequest(crawlerRecord, page, parsedLinks);//解析内容下一页
        }
        return parsedLinks;
    }

    //解析车系
    private void contentRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
//        http://bbs.tianya.cn/list.jsp?item=cars&nextid=1628808109000
        String url = page.getHtml().xpath("//a[./text()=\"下一页\"]/@href").get();
        if (url!=null){
            String nextPageUrl = "http://bbs.tianya.cn" + url.replaceAll("amp;","");
            CrawlerRequestRecord turnPageRequest = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .recordKey(nextPageUrl)
                    .httpUrl(nextPageUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            parsedLinks.add(turnPageRequest);
        }


//        明细页解析
        List<Selectable> nodes = page.getHtml().xpath("//div[@class=\"mt5\"]//tbody//tr").nodes();//所有资讯
        for (Selectable node : nodes) {
            try {
                String contentUrl = node.xpath(".//td[@class=\"td-title faceblue\"]//a/@href").get();//内容链接
                if (StringUtils.isBlank(contentUrl)) {
                    continue;
                }
                contentUrl = "http://bbs.tianya.cn" + contentUrl;
                String views = node.xpath("./td[3]/text()").get();//文章点击量
                String comments = node.xpath("./td[4]/text()").get();//文章回复量
                String pubTime = node.xpath("./td[5]/@title").get();//时间
                crawlerRecord.tagsCreator().bizTags().getCategoryTag().addKVTag(AICCommonField.Field_I_Views, views);
                crawlerRecord.tagsCreator().bizTags().getCategoryTag().addKVTag(AICCommonField.Field_I_Comments, comments);
                crawlerRecord.tagsCreator().bizTags().getCategoryTag().addKVTag("pubTime", pubTime);
                long releaseTimeToLong = DateUtils.parseDate(pubTime, "yyyy-MM-dd HH:mm").getTime();
                if (!isDateRange(crawlerRecord, releaseTimeToLong)) {
                    continue;
                }

                KVTag filterInfoTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
                CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);
                CrawlerRequestRecord contentRecord = CrawlerRequestRecord.builder()//解析资讯
                        .itemPageRequest(crawlerRecord)
                        .recordKey(contentUrl)
                        .httpUrl(contentUrl)
                        .releaseTime(releaseTimeToLong)
                        .copyBizTags()
                        .copyResultTags()
                        .needWashed(true)
                        .notFilterRecord()
                        .build();
                contentRecord.setFilter(filterInfoRecord.getFilter());
                contentRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                parsedLinks.add(contentRecord);
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }
        }
    }


    //解析文章列表
    private void contentNextRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
//        http://bbs.tianya.cn/post-cars-461651-2.shtml
        try {
            String url = page.getHtml().xpath("//a[./text()=\"下页\"]/@href").get();
            if (url != null) {
                String nextPageUrl = "http://bbs.tianya.cn" + url;
                KVTag filterInfoTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
                CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);
                CrawlerRequestRecord contentRecord = CrawlerRequestRecord.builder()//解析资讯
                        .itemPageRequest(crawlerRecord)
                        .recordKey(nextPageUrl)
                        .httpUrl(nextPageUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .copyResultTags()
                        .needWashed(true)
                        .notFilterRecord()
                        .build();
                contentRecord.setFilter(filterInfoRecord.getFilter());
                contentRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                parsedLinks.add(contentRecord);
            }
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        String sitBiz = crawlerRecord.tagsCreator().bizTags().siteBiz();
        if (crawlerRecord.getHttpRequest().getUrl().matches(ContentRegulars)) {//文章
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(article)) {//文章
                crawlerDataList.addAll(articleWash(crawlerRecord, page));
            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(interaction)) {//文章互动量
                crawlerDataList.addAll(articleInteractionWash(crawlerRecord, page));
            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(comment)) {//回复
                crawlerDataList.addAll(commentWash(crawlerRecord, page));
            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(comment)) {//回复的评论
                crawlerDataList.addAll(commentAnotherWash(crawlerRecord, page));
            }
            if (sitBiz=="autoAge-comment"||sitBiz=="purchase-comment"){
                if (crawlerRecord.tagsCreator().resultTags().hasDataType(interaction)) {//回复互动量
                    crawlerDataList.addAll(commentInteractionWash(crawlerRecord, page));
                }
            }
        }

        return crawlerDataList;
    }


    //解析文章
    public List<CrawlerData> articleWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        log.info("清洗文章");
        String url = page.getRequest().getUrl();
        String title = page.getHtml().xpath("//h1[@class=\"atl-title\"]//span//text()").get();//标题
        String authorName = page.getHtml().xpath("//a[@class=\"js-vip-check\"]/text()").get();//作者名
        String authorId = page.getHtml().xpath("//a[@class=\"js-vip-check\"]/@uid").get();//作者id
        List<String> texts = page.getHtml().xpath("//div[@class=\"bbs-content clearfix\"]//text()").all();//文章文本
        StringBuffer conents = new StringBuffer();//将文本拼接
        for (String text : texts) {
            conents.append(text).append(" ");
        }
        List<String> picture = page.getHtml().xpath("//div[@class=\"bbs-content clearfix\"]//img/@src").all();//图片
        StringBuffer pictures = new StringBuffer();//拼接图片
        for (String text : picture) {
            pictures.append(text).append("\0x1");
        }
        String pubTime = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("pubTime");
        String[] articleId = Pattern.compile("[^0-9]").matcher(url).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ");//url的关键字
        try {
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), article.enumVal(), articleId[articleId.length-2]))
                    .resultLabelTag(article)
                    .url(url)
                    .releaseTime(DateUtils.parseDate(pubTime, "yyyy-MM-dd HH:mm").getTime())
                    .addContentKV(AICCommonField.Field_Content, conents.toString().replaceAll("&#xD;","").trim())
                    .addContentKV(AICCommonField.Field_Title, title)
                    .addContentKV(AICCommonField.Field_Author, authorName)
                    .addContentKV(AICCommonField.Field_Author_Id, authorId)
                    .addContentKV(AICCommonField.Field_Images, pictures.toString().trim())
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerData);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        return crawlerArticleDataList;
    }


    //清洗汽车时代回复互动量
    public List<CrawlerData> articleInteractionWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        log.info("清洗文章互动量");
        try {
            String url = page.getRequest().getUrl();//内容url
            String views = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_I_Views);
            String comments = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_I_Comments);
            String pubTime = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("pubTime");
            String[] articleId = Pattern.compile("[^0-9]").matcher(url).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ");//url的关键字

            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), interaction.enumVal(), articleId[articleId.length-2]))
                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), article.enumVal(), articleId[articleId.length-2]))
                    .resultLabelTag(interaction)
                    .url(url)
                    .releaseTime(DateUtils.parseDate(pubTime, "yyyy-MM-dd HH:mm").getTime())
                    .flowInPipelineTag("kafka")
                    .addContentKV(AICCommonField.Field_I_Views, views)
                    .addContentKV(AICCommonField.Field_I_Comments, comments)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerData);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        return crawlerArticleDataList;
    }

    //  回复清洗
    private List<CrawlerData> commentWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        log.info("清洗回复");
        String url = page.getRequest().getUrl();
        List<Selectable> nodes = page.getHtml().xpath("//div[@class=\"atl-item\"]").nodes();
        for (Selectable node : nodes) {
            try {

                List<String> contents = node.xpath(".//div[@class=\"bbs-content\"]//text()").all();//评论内容
                StringBuffer content = new StringBuffer();//将文本拼接
                for (String text : contents) {
                    content.append(text).append(" ");
                }
                String releaseTime = node.xpath("./@js_restime").get();//回复时间
                long time = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime();
                String user = node.xpath("./@host").get();//评论用户
                String userId = node.xpath("./@hostid").get();//评论用户id
                String commentId = node.xpath("./@replyid").get();//评论id
                String floor = node.xpath("./@id").get();//楼数id
                String[] articleId = Pattern.compile("[^0-9]").matcher(url).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ");//url的关键字

                CrawlerData crawlerData = CrawlerData.builder()
                        .data(crawlerRecord, page)
                        .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), comment.enumVal(), commentId))
                        .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), article.enumVal(), articleId[articleId.length-2]))
                        .resultLabelTag(comment)
                        .url(url)
                        .releaseTime(time)
                        .addContentKV(AICCommonField.Field_Content, content.toString().replaceAll("&#xD;", "").trim())
                        .addContentKV(AICCommonField.Field_Author, user)
                        .addContentKV(AICCommonField.Field_Author_Id, userId)
                        .addContentKV("comment_id", commentId)
                        .addContentKV(AICCommonField.Field_Floor, floor)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .flowInPipelineTag("kafka")
                        .build();
                crawlerData.setFilterPipelineResult(true);
                crawlerArticleDataList.add(crawlerData);
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }
        }
        return crawlerArticleDataList;
    }

    //  回复互动量
    private List<CrawlerData> commentInteractionWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        log.info("清洗回复互动量");
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        String url = page.getRequest().getUrl();
        List<Selectable> nodes = page.getHtml().xpath("//div[@class=\"atl-item\"]").nodes();
        for (Selectable node : nodes) {
            try {
                String commentNum = node.xpath("//a[@class=\"a-link-2 ir-remark\"]").get();//评论数
                String commentId = node.xpath("./@replyid").get();//评论id
                commentNum = Pattern.compile("[^0-9]").matcher(commentNum).replaceAll("").trim();//url的关键字
                if (commentNum == null || "".equals(commentNum)) {
                    commentNum = "0";
                }
                String releaseTime = node.xpath("./@js_restime").get();//回复时间
                long time = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime();
                String[] articleId = Pattern.compile("[^0-9]").matcher(url).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ");//url的关键字

                CrawlerData crawlerData = CrawlerData.builder()
                        .data(crawlerRecord, page)
                        .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), interaction.enumVal(), commentId))
                        .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), comment.enumVal(), commentId))
                        .resultLabelTag(interaction)
                        .url(url)
                        .releaseTime(time)
                        .addContentKV(AICCommonField.Field_I_Comments, commentNum)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .flowInPipelineTag("kafka")
                        .build();
                crawlerData.setFilterPipelineResult(true);
                crawlerArticleDataList.add(crawlerData);
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }
        }
        return crawlerArticleDataList;
    }

    //  清洗评论
    private List<CrawlerData> commentAnotherWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        log.info("清洗评论");
        String url = page.getRequest().getUrl();
        List<Selectable> parents = page.getHtml().xpath("//div[@class=\"atl-item\"]").nodes();
        for (Selectable parent : parents) {
            String parentCommentId = parent.xpath("./@replyid").get();//评论id
            List<Selectable> nodes = parent.xpath(".//div[@class=\"ir-list\"]//li").nodes();
            for (Selectable node : nodes) {
                try {
                    String content = node.xpath(".//span[@class=\"ir-content\"]//text()").get();//评论内容
                    String releaseTime = node.xpath("./@replytime").get();//回复时间
                    long time = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime();
                    String user = node.xpath("./@username").get();//评论用户
                    String userId = node.xpath("./@userid").get();//评论用户id
                    String commentId = node.xpath("./@rid").get();//评论id
                    String[] articleId = Pattern.compile("[^0-9]").matcher(url).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ");//url的关键字

                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRecord, page)
                            .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), comment.enumVal(), commentId))
                            .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), comment.enumVal(), parentCommentId))
                            .resultLabelTag(comment)
                            .url(url)
                            .releaseTime(time)
                            .addContentKV(AICCommonField.Field_Content, content)
                            .addContentKV(AICCommonField.Field_Author, user)
                            .addContentKV(AICCommonField.Field_Author_Id, userId)
                            .addContentKV("comment_id", commentId)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .flowInPipelineTag("kafka")
                            .build();
                    crawlerData.setFilterPipelineResult(true);
                    crawlerArticleDataList.add(crawlerData);
                } catch (Exception e) {
                    log.error(e.getMessage(), e);
                }
            }
        }
        return crawlerArticleDataList;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {
    }

    /*
     * 重新下载
     * */
    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 10) {
                log.error("pcauto download page the number of retries exceeds the limit" +
                        ",request url {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        CrawlerRequestRecord crawlerRequestRecord = null;
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }

        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA() {
        return agentList.get(RandomUtils.nextInt(0, agentList.size() - 1));
    }


    /**
     * 判断是否在时间范围内
     *
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord, Long releaseTimeToLong) {
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    } else if (hourFromNow != 0) {
                        endTime = System.currentTimeMillis() - 60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if (startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime) {
                isRange = true;
            }
        } else {
            isRange = true;
        }
        return isRange;
    }

}
