package com.chance.cc.crawler.development.scripts.autohome.chejiahao;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainUrls;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Field_Author_Follows;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Tag_Field_Topic_Type;

/**
 * @author lt
 * @version 1.0
 * @date 2021-07-07 13:20:05
 * @email okprog@sina.com
 */
public class AutoHomeCJHCrawlerScript extends CrawlerCommonScript {

    private Logger logger = LoggerFactory.getLogger(AutoHomeCJHCrawlerScript.class);


    private static final String DOMAIN = "autohome";
    private static final String Article_Site = "che_article";
    private static final String Video_Site = "che_video";
    private static final String InfoType = "info_type";

    private static final String listUrlFormat = "https://chejiahao.autohome.com.cn/Default/IndexMore?infotype=%s&pageId=%s&pageIdentity=1";
    private static final String commentUrlFormat = "https://reply.autohome.com.cn/api/comments/show.json?count=50&appid=21&replydata=1&order=0&page=%s&id=%s";
    private static final String commentCountUrlFormat = "https://reply.autohome.com.cn/api/QueryComment/CountsByObjIds?_appid=cms&appid=21&dataType=json&objids=%s";
    private static final String likesUrlFormat = "https://reply.autohome.com.cn/api/like/listcount.json?_appid=m&appid=21&liketype=1&ids=%5B%7B%22objid%22%3A#objId%2C%22secobj%22%3A%22%22%7D%5D&datatype=json";
    private static final String viewsUrlFormat = "https://chejiahao.autohome.com.cn/Infos/GetGetSeeNumber?infoId=%s";

    private static final String indexRegex = "https?://chejiahao\\.autohome\\.com\\.cn/\\?infotype=\\d*";
    private static final String listUrlRegex = "https?://chejiahao\\.autohome\\.com\\.cn/Default/IndexMore\\?infotype=\\d*&pageId=\\d*&pageIdentity=\\d*";
    private static final String articleUrlRegex = "https?://chejiahao\\.autohome\\.com\\.cn/info/\\d*";
    private static final String commentRegex = "https://reply\\.autohome\\.com\\.cn/api/comments/show\\.json\\?count=50&appid=21&replydata=1&order=0&page=\\d*&id=\\d*";
    private static final String commentCountUrlRegex = "https://reply\\.autohome\\.com\\.cn/api/QueryComment/CountsByObjIds\\?_appid=cms&appid=21&dataType=json&objids=\\d*";
    private static final String likesUrlRegex = "https://reply\\.autohome\\.com\\.cn/api/like/listcount\\.json\\?\\S*";
    private static final String viewsUrlRegex = "https://chejiahao\\.autohome\\.com\\.cn/Infos/GetGetSeeNumber\\?infoId=\\d*";

    private static final String COMMENT = "https://reply.autohome.com.cn/api/comments/show.json?id=%s&page=%s&appid=21&count=20&replydata=1&datatype=jsonp&callback=jsonpCallback3&_=%s";
    private static final String COMMENTRegulars = "https://reply.autohome.com.cn/api/comments/show.json\\S*";
    private static final String CJH_BASE_URL = "https://chejiahao.autohome.com.cn";
    private static ConcurrentHashMap<String,Set<String>> idsByInfoType = new ConcurrentHashMap<>(2);
    private static final Object lock = new Object();


    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        HttpRequest httpRequest = requestRecord.getHttpRequest();
        if (requestRecord.tagsCreator().bizTags().hasKVTag(Tag_Field_Domain_Result_Json)) {
            KVTag domainResultJson = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag(Tag_Field_Domain_Result_Json);
            CrawlerDomainUrls crawlerDomainUrls = JSON.parseObject(String.valueOf(domainResultJson.getVal()), CrawlerDomainUrls.class);
            String url = crawlerDomainUrls.getUrl();
            JSONObject contentObj = JSONObject.parseObject(url);
            String articleUrl = contentObj.getString(Field_Urls);
            String site = contentObj.getString("site");
            Long releaseTime = contentObj.getLong("releaseTime");
            requestRecord.setReleaseTime(releaseTime);
            requestRecord.setNeedParsedPage(true);
            requestRecord.setNeedWashPage(true);
            requestRecord.tagsCreator().resultTags().addResultDataType(article);
            requestRecord.tagsCreator().resultTags().addResultDataType(interaction);
            httpRequest.setUrl(articleUrl);
            requestRecord.setRecordKey(articleUrl);
            requestRecord.tagsCreator().bizTags().addSite(site);
            requestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(Tag_Field_Domain_Result_Json); //移除
        }
        return super.prepareRequest(requestRecord, supportSourceRecords);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<CrawlerRequestRecord>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = lastRequest.getUrl();
        if (doHttpPageCheck(crawlerRequestRecord, httpPage)) {
            logger.error("{} request download has error, status code {},error info [{}] ,will retry", DOMAIN,
                    httpPage.getStatusCode(), httpPage.getRawText());
            addCrawlerRecords(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return parsedLinks;
        }
        if (lastRequestUrl.matches(indexRegex) || lastRequestUrl.matches(listUrlRegex)) {
            return parseListLinks(crawlerRequestRecord, httpPage, parsedLinks);
        }
        if (lastRequestUrl.matches(articleUrlRegex)){
            return parseArticleLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(commentRegex)){
            return parseCommentLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(COMMENTRegulars)){
            return getCommentTurn(crawlerRequestRecord,httpPage,parsedLinks);
        }
        return null;
    }

    private List<CrawlerRequestRecord> getCommentTurn(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        try{
            String rawText = httpPage.getRawText();
            rawText = rawText.substring(15,rawText.length()-1);
            httpPage.setRawText(rawText);
            List<String> page = httpPage.getJson().jsonPath($_type + ".commentlist").all();
            if (page.size() == 0){
                return parsedLinks;
            }
        }catch (Exception e){
            crawlerRequestRecord.setNeedWashPage(false);
            logger.error("no page");
            return parsedLinks;
        }
        String articleKey = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleKey");
        KVTag pageId = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("pageId");
        Integer val = (Integer) pageId.getVal();
        val = val+1;
        String commentUrl = String.format(COMMENT,articleKey,val,System.currentTimeMillis());
        CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .recordKey(commentUrl)
                .httpUrl(commentUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .resultLabelTag(comment)
                .resultLabelTag(interaction)
                .needWashed(true)
                .build();
        commentRecord.tagsCreator().bizTags().addCustomKV("articleKey",articleKey);
        commentRecord.tagsCreator().bizTags().addCustomKV("pageId",val);
        parsedLinks.add(commentRecord);

        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseCommentLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        //根据返回的json结果计算下一页
        String articleKey = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleKey");
        JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
        int currentPage = jsonObject.getIntValue("page");
        int commentCount = jsonObject.getIntValue("commentcount");
        int maxPage = (int) Math.ceil(commentCount / 50.0);
        if (currentPage < maxPage){
            String commentUrl = String.format(commentUrlFormat,(currentPage + 1), articleKey);
            CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .recordKey(commentUrl)
                    .httpUrl(commentUrl)
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .resultLabelTag(comment)
                    .resultLabelTag(interaction)
                    .needWashed(true)
                    .copyBizTags()
                    .build();
            parsedLinks.add(commentRecord);
        }

        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseArticleLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String httpRequestUrl = httpRequest.getUrl();
        String rawText = httpPage.getRawText();
        if (rawText.contains("您要浏览的页面暂时无法访问或不存在")){
            logger.info("{} article page [{}] is 404 not found",DOMAIN,httpRequestUrl);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }
        String articleKey = httpRequestUrl.substring(httpRequestUrl.lastIndexOf("/") + 1);
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("articleKey",articleKey);
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("articleUrl",httpRequestUrl);
        //内下载链接
        genInternalRecords(crawlerRequestRecord, parsedLinks, articleKey);

        //评论列表链接

        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(comment)) {
            if (!crawlerRequestRecord.tagsCreator().bizTags().hasKVTag("comment_record_filter_info")) {
                logger.error("{} crawler comment need to filter information!", DOMAIN);
                return parsedLinks;
            }
            KVTag filterInfoTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
            CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);
            //生成外循环评论链接
            //String commentUrl = String.format(commentUrlFormat,1,articleKey);
            String commentUrl = String.format(COMMENT,articleKey,1,System.currentTimeMillis());
            CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRequestRecord)
                    .recordKey(commentUrl)
                    .httpUrl(commentUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyResultTags()
                    .resultLabelTag(comment)
                    .resultLabelTag(interaction)
                    .needWashed(true)
                    .build();
            commentRecord.tagsCreator().bizTags().addCustomKV("urls",crawlerRequestRecord.getHttpRequest().getUrl());
            commentRecord.tagsCreator().bizTags().addCustomKV("articleKey",articleKey);
            commentRecord.tagsCreator().bizTags().addCustomKV("pageId",1);
            parsedLinks.add(commentRecord);
        }
        return parsedLinks;
    }

    private void genInternalRecords(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> parsedLinks, String articleKey) {
        String likesUrl = likesUrlFormat.replace("#objId",articleKey);
        CrawlerRequestRecord itemInternalLikesRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .httpUrl(likesUrl)
                .recordKey(likesUrl)
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        parsedLinks.add(itemInternalLikesRecord);

        //view接口
        String viewsUrl = String.format(viewsUrlFormat,articleKey);
        CrawlerRequestRecord viewsRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .httpUrl(viewsUrl)
                .recordKey(viewsUrl)
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        parsedLinks.add(viewsRecord);

        //评论数接口
        String commentCountUrl = String.format(commentCountUrlFormat,articleKey);
        CrawlerRequestRecord commentCountRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .httpUrl(commentCountUrl)
                .recordKey(commentCountUrl)
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        parsedLinks.add(commentCountRecord);
    }

    private List<CrawlerRequestRecord> parseListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String infoType = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(InfoType);
        List<Selectable> selectables = httpPage.getHtml().xpath("//div[@pageid]").nodes();
        String lastRequestUrl = crawlerRequestRecord.getHttpRequest().getUrl();
        if (lastRequestUrl.matches(indexRegex)){
            //根据调度的infoType初始对应的id set集合
            synchronized (lock){
                String keyName = "ids_" + infoType;
                Set<String> ids = new HashSet<>();
                idsByInfoType.put(keyName,ids);
            }
        }

        if (!selectables.isEmpty()) {
            Selectable lastNode = selectables.get(selectables.size() - 1);
            String pageId = lastNode.xpath("./@pageid").get();

            String nextPageUrl = String.format(listUrlFormat, infoType, pageId);
            CrawlerRequestRecord listItemRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .recordKey(nextPageUrl)
                    .httpUrl(nextPageUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .build();
            HttpRequest httpRequest = listItemRecord.getHttpRequest();
            httpRequest.addHeader("origin", CJH_BASE_URL);
            httpRequest.addHeader("referer", String.format("%s/?infotype=%s", CJH_BASE_URL, infoType));
            httpRequest.addHeader("user-agent", getRandomUA());
            parsedLinks.add(listItemRecord);

            for (Selectable selectable : selectables) {
                String articleKey = selectable.xpath("./@data-infoid").get();
                String itemUrl = CJH_BASE_URL + "/info/" + articleKey;
                String pubTime = selectable.xpath("./div[@class=\"userInfo\"]/div[@class=\"info fn-right\"]/span[4]/text()").get();
                //每一个id存储到单次采集的set中，如果单次采集中出现重复，直接剔除掉
                synchronized (lock){
                    String keyName = "ids_" + infoType;
                    Set<String> ids = idsByInfoType.get(keyName);
                    if (ids.add(articleKey)){
                        try {
                            String regex_1 = "\\d{2}-\\d{2} \\d{2}:\\d{2}";
                            if (pubTime.matches(regex_1)) {
                                Calendar calendar = Calendar.getInstance();
                                int year = calendar.get(Calendar.YEAR);
                                pubTime = year + "-" + pubTime;
                            }
                            long releaseTime = DateUtils.parseDate(pubTime, "yyyy-MM-dd HH:mm").getTime();
                            CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                                    .itemPageRequest(crawlerRequestRecord)
                                    .recordKey(itemUrl)
                                    .httpUrl(itemUrl)
                                    .releaseTime(releaseTime)
                                    .resultLabelTag(article)
                                    .resultLabelTag(interaction)
                                    .copyBizTags()
                                    .build();
                            itemRecord.getHttpRequest().addHeader("user-agent", getRandomUA());
                            parsedLinks.add(itemRecord);
                        } catch (Exception e) {
                            logger.error(e.getMessage(), "parse date error");
                        }
                    }
                }
            }
        }else {
            logger.error("page download error : page is not full");
            logger.error(httpPage.getRawText());
        }
        return parsedLinks;
    }

    @Override
    public void afterInternalDownload(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> internalDownloadRecords, List<CrawlerRequestRecord> links) {
        for (CrawlerRequestRecord internalDownloadRecord : internalDownloadRecords) {
            HttpRequest downloadRecordHttpRequest = internalDownloadRecord.getHttpRequest();
            String downloadRecordHttpRequestUrl = downloadRecordHttpRequest.getUrl();
            HttpPage internalDownloadPage = internalDownloadRecord.getInternalDownloadPage();
            // 获取点赞数
            if (downloadRecordHttpRequestUrl.matches(likesUrlRegex)){
                try {
                    JSONObject jsonObject = JSONObject.parseObject(internalDownloadPage.getRawText());
                    int returnCode = jsonObject.getIntValue("returncode");
                    if (0 == returnCode){
                        JSONObject result = (JSONObject) jsonObject.getJSONArray("result").get(0);
                        String likes = result.getString("count");
                        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("likes",likes);
                    }
                } catch (Exception e) {
                    crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("likes","0");
                }
            }
            // 获取评论数
            if (downloadRecordHttpRequestUrl.matches(commentCountUrlRegex)){
                try {
                    JSONObject jsonObject = JSONObject.parseObject(internalDownloadPage.getRawText());
                    int returnCode = jsonObject.getIntValue("returncode");
                    if (0 == returnCode){
                        JSONObject result = jsonObject.getJSONObject("result");
                        JSONObject sum = result.getJSONObject("sum");
                        String comments = sum.getString("sumreplycount");
                        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("comments",comments);
                    }
                } catch (Exception e) {
                    crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("comments","0");
                }
            }
            if (downloadRecordHttpRequestUrl.matches(viewsUrlRegex)){
                try {
                    String rawText = internalDownloadPage.getRawText();
                    String views = rawText.replaceAll("\"","");
                    crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("views",views);
                }catch (Exception e){
                    crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("views","0");
                }
            }
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        String url = crawlerRequestRecord.getHttpRequest().getUrl();
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(article)){
            if (url.matches(articleUrlRegex)){
                crawlerDataList.addAll(washArticle(crawlerRequestRecord, httpPage));
            }
        }

        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
            crawlerDataList.addAll(washInteraction(crawlerRequestRecord,httpPage));
        }

        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(comment)){
            if (url.matches(COMMENTRegulars)){
                this.washCommentNEW(crawlerRequestRecord,httpPage,crawlerDataList);
            }
           // crawlerDataList.addAll(washComment(crawlerRequestRecord,httpPage));
        }

        return crawlerDataList;
    }

    private void washCommentNEW(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerData> crawlerDataList) {
        String articleKey = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleKey");
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        String urls = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("urls");
        if (urls == null){
            urls = crawlerRequestRecord.getHttpRequest().getUrl();
        }
        List<String> all = httpPage.getJson().jsonPath($_type + ".commentlist").all();
        for (String str :all){
            JSONObject jsonObject = JSONObject.parseObject(str);
            String ReplyId = jsonObject.getString("ReplyId");
            String RMemberId = jsonObject.getString("RMemberId");
            String RMemberName = jsonObject.getString("RMemberName");
            String RContent = jsonObject.getString("RContent");
            String replydate = jsonObject.getString("RReplyDate");
            Long time = Long.valueOf(replydate.substring(6,replydate.length()-7));
            /*try {
                time = DateUtils.parseDate(replydate,"yyyy-MM-dd HH:mm:ss").getTime();
            } catch (ParseException e) {
                e.printStackTrace();
            }*/
            if (!isDateRange(crawlerRequestRecord,time)){
                continue;
            }
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord,httpPage)
                    .dataId(StringUtils.joinWith("-",crawlerRequestRecord.getDomain(),comment,ReplyId))
                    .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                    .url(urls)
                    .releaseTime(time)
                    .addContentKV(Field_Author,RMemberName)
                    .addContentKV(Field_Author_Id,RMemberId)
                    .addContentKV(Field_Content,RContent)
                    .resultLabelTag(comment)
                    .flowInPipelineTag("kafka")
                    .flowInPipelineTag("console")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerDataList.add(crawlerData);

        }
    }
    /**
     * 判断是否在时间范围内
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord,Long releaseTimeToLong){
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis()-60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime){
                isRange = true;
            }
        }else{
            isRange = true;
        }
        return isRange;
    }
    private List<CrawlerData> washArticle(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        String author = httpPage.getHtml().xpath("//div[@class=\"articleTag\"]/span[1]/text()").get();
        String follows = httpPage.getHtml().xpath("//div[@class=\"num\"]/span[1]/text()").get();
        if (StringUtils.isBlank(follows)){
            follows = "0";
        }
        if (follows.endsWith("关注")){
            follows = follows.replace("关注","");
        }
        if (follows.contains("万")){
            follows = follows.split("万")[0];
            follows = String.valueOf(Double.parseDouble(follows) * 10000);
        }

        String authorUrl = httpPage.getHtml().xpath("//div[@class=\"name text-overflow\"]/a/@href").get();
        String authorId = "";
        if (StringUtils.isNotBlank(authorUrl)){
            authorId = authorUrl.split("Authors/")[1].split("#")[0];
        }

        String title = httpPage.getHtml().xpath("//div[@class=\"title\"]/text() | //div[@class=\"title \"]/text()").get();
        List<String> allContents = httpPage.getHtml().xpath("//div[@class=\"introduce\"]//p//text()").all();
        StringBuffer sbContent = new StringBuffer();
        for (String s : allContents) {
            sbContent = sbContent.append(s).append(" ");
        }
        List<String> allImages = httpPage.getHtml().xpath("//div[@class=\"introduce\"]//img/@src").all();
        StringBuffer sbImages = new StringBuffer();
        for (String s : allImages) {
            sbImages = sbImages.append(s).append(",");
        }
        List<String> categories = httpPage.getHtml().xpath("//div[@class=\"tagBot\"]//span/text()").all();
        if (null != categories && !categories.isEmpty()){
            crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(Tag_Field_Topic_Type,categories);
        }
        String articleKey = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleKey");
        try {
            String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");

            CrawlerData crawlerArticleData = CrawlerData.builder()
                    .data(crawlerRequestRecord,httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                    .url(lastRequest.getUrl())
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .addContentKV(Field_Author, author)
                    .addContentKV(Field_Author_Id, authorId)
                    .addContentKV(Field_Author_Follows, follows)
                    .addContentKV(Field_Title, title == null ? "" : title)
                    .addContentKV(Field_Content, sbContent.toString())
                    .addContentKV(Field_Images, sbImages.toString())
                    .resultLabelTag(article)
                    .flowInPipelineTag("console")
                    .flowInPipelineTag("file")
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerArticleData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerArticleData);

            CrawlerData crawlerRedisData = CrawlerData.builder()
                    .data(crawlerRequestRecord,httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                    .url(lastRequest.getUrl())
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .addContentKV(Field_Urls, lastRequest.getUrl())
                    .addContentKV("releaseTime", String.valueOf(crawlerRequestRecord.getReleaseTime()))
                    .addContentKV("site", site)
                    .resultLabelTag(article)
                    .flowInPipelineTag("console")
                    .flowInPipelineTag("file_redis")
                    .flowInPipelineTag("redis")
                    .build();
            crawlerRedisData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerRedisData);
        } catch (Exception e) {
            logger.error(e.getMessage(),"wash data error");
        }
        return crawlerArticleDataList;
    }

    private List<CrawlerData> washInteraction(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerInteractionDataList = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = lastRequest.getUrl();
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");

        if (lastRequestUrl.matches(articleUrlRegex)){
            String articleKey = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleKey");
            String views = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("views");
            if (views.contains("万")){
                views = views.replace("万","");
                if (views.contains(".")){
                    views = views.replace(".","") + "000";
                }else {
                    views = views + "0000";
                }

            }
            String comments = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("comments");
            String likes = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("likes");
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, interaction.enumVal(), articleKey))
                    .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), articleKey))
                    .url(lastRequest.getUrl())
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .addContentKV(Field_I_Comments,comments)
                    .addContentKV(Field_I_Views,views)
                    .addContentKV(Field_I_Likes,likes)
                    .resultLabelTag(interaction)
                    .flowInPipelineTag("console")
                    .flowInPipelineTag("file")
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerInteractionDataList.add(crawlerData);
        }
        if (lastRequestUrl.matches(commentRegex)){
            JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
            JSONArray commentList = jsonObject.getJSONArray("commentlist");
            for (Object o : commentList) {
                JSONObject commentJson = (JSONObject)o;
                String commentId = commentJson.getString("ReplyId");
                String likes = commentJson.getString("RUp");
                String pubTime = commentJson.getString("RReplyDate");
                pubTime = pubTime.split("\\(")[1];
                if (pubTime.contains("+")){
                    pubTime = pubTime.split("\\+")[0];
                }
                if (pubTime.contains("-")){
                    pubTime = pubTime.split("-")[0];
                }

                try {
                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRequestRecord, httpPage)
                            .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, interaction.enumVal(), commentId))
                            .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                            .url(crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleUrl"))
                            .releaseTime(new Long(pubTime))
                            .addContentKV(Field_I_Likes,likes)
                            .resultLabelTag(interaction)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .requestLabelTag(result)
                            .flowInPipelineTag("console")
                            .flowInPipelineTag("file")
                            .flowInPipelineTag("kafka")
                            .build();
                    crawlerData.setFilterPipelineResult(true);
                    crawlerInteractionDataList.add(crawlerData);
                } catch (Exception e) {
                    logger.error(e.getMessage(),"parse date error");
                }
            }
        }
        return crawlerInteractionDataList;
    }

    private List<CrawlerData> washComment(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerCommentDataList = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
        JSONArray commentList = jsonObject.getJSONArray("commentlist");
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");


        for (Object o : commentList) {
            JSONObject commentJson = (JSONObject)o;
            String commentId = commentJson.getString("ReplyId");
            String author = commentJson.getString("RMemberName");
            String authorId = commentJson.getString("RMemberId");
            String content = commentJson.getString("RContent");
            String floor = commentJson.getString("RFloor");
            String pubTime = commentJson.getString("RReplyDate");
            pubTime = pubTime.split("\\(")[1];
            if (pubTime.contains("+")){
                pubTime = pubTime.split("\\+")[0];
            }
            if (pubTime.contains("-")){
                pubTime = pubTime.split("-")[0];
            }

            try {
                CrawlerData crawlerCommentData = CrawlerData.builder()
                        .data(crawlerRequestRecord, httpPage)
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                        .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, article.enumVal(), crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleKey")))
                        .url(lastRequest.getUrl())
                        .releaseTime(new Long(pubTime))
                        .addContentKV(Field_Author,author)
                        .addContentKV(Field_Author_Id,authorId)
                        .addContentKV(Field_Content,content)
                        .addContentKV(Field_Floor,floor)
                        .resultLabelTag(comment)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .requestLabelTag(result)
                        .flowInPipelineTag("console")
                        .flowInPipelineTag("file")
                        .flowInPipelineTag("kafka")
                        .build();
                crawlerCommentData.setFilterPipelineResult(true);
                crawlerCommentDataList.add(crawlerCommentData);
            } catch (Exception e) {
                logger.error(e.getMessage(),"parse date error");
            }
        }
        return crawlerCommentDataList;
    }


    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {

        int count = 1;
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        if (crawlerBusinessTags.hasKVTag("download_retry_count")) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
            if (count >= 20) {
                logger.error("{} request download he number of retries exceeds the limit" +
                        ",request url {}", DOMAIN, crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        count++;
        crawlerBusinessTags.addCustomKV("download_retry_count", count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .notFilterRecord()
                .build();
        crawlerRequestRecords.add(crawlerRequestRecord);

        if (crawlerRecord.tagsCreator().requestTags().hasRequestType(turnPageItem)) {
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(turnPage);
            crawlerRequestRecord.tagsCreator().requestTags().addRequestType(turnPageItem);
        }
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(listUrlRegex);
        addUrlRegular(articleUrlRegex);
        addUrlRegular(commentRegex);
        addUrlRegular(COMMENTRegulars);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        String crawlerSite = crawlerRequestRecord.tagsCreator().bizTags().site();
        return crawlerSite.equalsIgnoreCase(Article_Site) || crawlerSite.equalsIgnoreCase(Video_Site);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return DOMAIN;
    }

    /**
     * 检查页面下载是否成功、完整
     *
     * @param crawlerRequestRecord last record
     * @param httpPage             page
     * @return boolean for page check
     */
    private boolean doHttpPageCheck(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = lastRequest.getUrl();
        int statusCode = httpPage.getStatusCode();
        if (statusCode != 200) {
            if (statusCode == 404) {
                if (httpPage.getRawText().contains("很抱歉，您访问的页面不存在")) {
                    logger.error("this page {} is right 404", lastRequestUrl);
                    return false;
                }
                logger.error("this page {} is not a right 404", statusCode);
                return true;
            }
            logger.error("download page {} error, status code is {}", lastRequestUrl, statusCode);
            return true;
        }
        if (!httpPage.isDownloadSuccess()) {
            logger.error("download page failed, check your link {}", lastRequestUrl);
            return true;
        }
        if (StringUtils.isBlank(httpPage.getRawText())) {
            logger.error("download page empty, check your link {}", lastRequestUrl);
            return true;
        }
        return false;
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA() {
        return agentList.get(RandomUtils.nextInt(0, agentList.size() - 1));
    }
}
