package com.chance.cc.crawler.development.scripts.xcar.carseries.forum;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.Downloader;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.record.CrawlerResultRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.CategoryTag;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.meta.core.bean.common.MetaResponse;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainKeys;
import com.google.common.collect.Maps;
import jdk.nashorn.api.scripting.NashornScriptEngineFactory;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.jsoup.Jsoup;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.script.*;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.text.ParseException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.result;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.*;

/**
 * @author bx
 * @date 2021/1/8 0008 12:11
 */
public class XCarSeriesForumCrawlerScript extends CrawlerCommonScript {

    private static final Logger logger = LoggerFactory.getLogger(XCarSeriesForumCrawlerScript.class);

    public static final String site= "forum";
    public static final String site_biz= "forum";

    public static final String carSeriesUrlRegular = "https://newcar.xcar.com.cn/\\d*/";
    public static final String carSeriesUrlFormat = "https://newcar.xcar.com.cn/%s/";

    public static final String carSeriesForumUrlRegular = "https://www.xcar.com.cn/bbs/xbbsapi/forumdisplay/get_thread_list.php\\S*";
    public static final String carSeriesForumeUrlFormat = "https://www.xcar.com.cn/bbs/xbbsapi/forumdisplay/get_thread_list.php" +
            "?fid=%s&orderby=%s&filter=&ondigest=0&page=%d&_=%s";

    public static final String carSeriesForumArticleUrlRegular = "https://www.xcar.com.cn/bbs/viewthread.php\\S*";
    public static final String carSeriesForumArticleUrlFormat = "https://www.xcar.com.cn/bbs/viewthread.php?tid=%s";

    public static final String carSeriesForumCommentUrlRegular = "https://www.xcar.com.cn/bbs/xbbsapi/viewthread/getmore.php\\S*";
    public static final String carSeriesForumCommentUrlFormat = "https://www.xcar.com.cn/bbs/xbbsapi/viewthread/getmore.php" +
            "?tid=%s&page=%d&pid=%s&isReply=0&pageSize=10&order=2&_=%s";

    public static final String xcarLogPrefix= "xcar series forum";

    public Map<String,CompiledScript> compiledScriptMap;
    public ScriptEngine scriptEngine;

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();

        String authInfo = getLocalAuthInfo();
        if (StringUtils.isNotBlank(authInfo)){
            requestRecord.getHttpRequest().addHeader("cookie",authInfo);
        }

        if (supportSourceRecords == null || supportSourceRecords.size() <1){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }

        CrawlerRequestRecord keywordSupportSourceRecord = null;
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            if (supportSourceRecord.getHttpRequest().getUrl().contains("/v1/meta/"+domain()+"/keys")){
                keywordSupportSourceRecord = supportSourceRecord;
                break;
            }
        }

        if (keywordSupportSourceRecord != null){

            try {
                initCompileScript();
            } catch (FileNotFoundException |ScriptException e) {
                logger.error("init compile scripts error : {}",e.getMessage());
            }

            initAllCrawlerRecordByKeyword(requestRecord,keywordSupportSourceRecord,allItemRecords);// cookies userAgents 初始完毕后，才能初始record

        } else {
            logger.error("{} start urls cant be empty!",xcarLogPrefix);
        }
        return allItemRecords;
    }

    public List<CrawlerRequestRecord> parseLinks(CrawlerRecordContext context) {
        HttpPage page = context.getPage();
        CrawlerRequestRecord crawlerRecord = context.getCrawlerRecord();
        List<CrawlerRequestRecord> crawlerRequestRecords =  new ArrayList<>();

        if (!page.isDownloadSuccess()){
            logger.error("{} download proxy has error ,will retry",xcarLogPrefix);
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord,page);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        if (page.getStatusCode() == 521){
            logger.error("{} download status code: 521 ,will retry",xcarLogPrefix);
            //内置下载 -- 更新cookie
            cookieUpdate(context.getPageDownloader(),crawlerRecord);

            addCrawlerRecords(crawlerRequestRecords,crawlerRecord,page);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        if (isUrlMatch(page.getRequest().getUrl(),carSeriesUrlRegular)){
            carSeriesParselinks(crawlerRequestRecords,crawlerRecord,page);
        } else if (isUrlMatch(page.getRequest().getUrl(),carSeriesForumUrlRegular)){
            carSeriesForumParselinks(crawlerRequestRecords,crawlerRecord,page);
        } else if (isUrlMatch(page.getRequest().getUrl(),carSeriesForumArticleUrlRegular)){
            carSeriesForumArticleParselinks(crawlerRequestRecords,crawlerRecord,page);
        }   else if (isUrlMatch(page.getRequest().getUrl(),carSeriesForumCommentUrlRegular)){
            carSeriesForumCommentParselinks(crawlerRequestRecords,crawlerRecord,page);
        }

        return crawlerRequestRecords;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDatas = new ArrayList<>();
        if (isUrlMatch(page.getRequest().getUrl(),carSeriesForumArticleUrlRegular)){
            carSeriesForumArticleWashPage(crawlerDatas,crawlerRecord,page);
        }   else if (isUrlMatch(page.getRequest().getUrl(),carSeriesForumCommentUrlRegular)){
            carSeriesForumCommentWashPage(crawlerDatas,crawlerRecord,page);
        }
        return crawlerDatas;
    }

    private void carSeriesParselinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        try {
            if (StringUtils.isBlank(articleDataInfo(page))){
                throw new Exception("article data info is null!");
            }
        } catch (Exception e) {
            logger.error("{} download car series {} page has error: {},will retry",xcarLogPrefix,page.getRequest().getUrl(),e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord,page);
            return;
        }

        String order = (String) crawlerRecord.tagsCreator().scheduleTags().getCategoryTag().getKVTag("order").getVal();
        List<String> urls = page.getHtml().xpath("//ul[@class=\"tt_nav\"]/li/a/@href").all();
        for (String url : urls) {
            if (isUrlMatch(url,"//www.xcar.com.cn/bbs/forumdisplay.php\\?fid=\\d*")){

                String dataInfo = articleDataInfo(page);
                Json dataInfoJson = new Json(dataInfo);
                String bbsFid = url.substring(url.indexOf("=")+1);
                String carSeriesForumUrl = String.format(carSeriesForumeUrlFormat,bbsFid,order,1,System.currentTimeMillis());
                CrawlerRequestRecord crawlerTurnPaeRequestRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRecord)
                        .httpUrl(carSeriesForumUrl)
                        .httpHeads(page.getRequest().getHeaders())
                        .releaseTime(System.currentTimeMillis())
                        .notFilterRecord()
                        .copyBizTags()
                        .build();
                crawlerTurnPaeRequestRecord.getHttpRequest().addExtra("brand",dataInfoJson.jsonPath($_type + ".brand").get());
                crawlerTurnPaeRequestRecord.getHttpRequest().addExtra("series",dataInfoJson.jsonPath($_type + ".series").get());
                crawlerTurnPaeRequestRecord.getHttpRequest().addExtra("series_id",dataInfoJson.jsonPath($_type + ".series_id").get());
                crawlerTurnPaeRequestRecord.getHttpRequest().addExtra("forum_fid",bbsFid);
                crawlerTurnPaeRequestRecord.getHttpRequest().addExtra("forum_article_list_current_number","1");
                crawlerTurnPaeRequestRecord.getHttpRequest().addExtra("forum_list_order",order);
                crawlerRequestRecords.add(crawlerTurnPaeRequestRecord);
                break;
            }
        }
    }

    private void carSeriesForumParselinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){
        try {
            Json rawTextJson = new Json(page.getRawText());
            List<String> dataList = rawTextJson.jsonPath($_type + ".data.data.thread_list").all();
            String isLoading = rawTextJson.jsonPath($_type + ".data.data.is_loading").get();
            String forumName = null;
            try{
                forumName  = rawTextJson.jsonPath($_type + ".data.data.forum_info.name").get();
            }catch (Exception e){
                logger.warn("not forum name");
            }

            if ("1".equals(isLoading)){
                String forumFid = (String) page.getRequest().getExtras().get("forum_fid");
                String order = (String) page.getRequest().getExtras().get("forum_list_order");
                int currentNumber = Integer.valueOf(String.valueOf(page.getRequest().getExtras().get("forum_article_list_current_number")));
                String carSeriesForumUlr= String.format(carSeriesForumeUrlFormat,forumFid,order,currentNumber + 1,System.currentTimeMillis());
                CrawlerRequestRecord crawlerTurnPaeRequestRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRecord)
                        .httpUrl(carSeriesForumUlr)
                        .httpHeads(page.getRequest().getHeaders())
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                crawlerTurnPaeRequestRecord.getHttpRequest().setExtras(Maps.newHashMap(page.getRequest().getExtras()));
                crawlerTurnPaeRequestRecord.getHttpRequest().addExtra("forum_article_list_current_number",String.valueOf(currentNumber + 1));
                crawlerRequestRecords.add(crawlerTurnPaeRequestRecord);
            }

            if (dataList != null && dataList.size() > 0){
                for (String data : dataList) {
                    Json dataJson = new Json(data);
                    String carSeriesForumArticleUrl = String.format(carSeriesForumArticleUrlFormat,dataJson.jsonPath($_type+".tid").get());

                    KVTag orderTag = crawlerRecord.tagsCreator().scheduleTags().getCategoryTag().getKVTag("order");
                    long releaseTime = 0;
                    if ("dateline".equals(orderTag.getVal())){
                        releaseTime = dateToTimestamp(dataJson.jsonPath($_type + ".dateline").get());
                    }
                    if ("lastpost".equals(orderTag.getVal())) {
                        releaseTime = Long.valueOf(dataJson.jsonPath($_type + ".lastpost").get()) * 1000;
                    }

                    CrawlerRequestRecord crawlerItemRecord = CrawlerRequestRecord.builder()
                            .itemPageRequest(crawlerRecord)
                            .httpUrl(carSeriesForumArticleUrl)
                            .httpHeads(page.getRequest().getHeaders())
                            .releaseTime(releaseTime)
                            .copyBizTags()
                            .build();

                    crawlerItemRecord.getHttpRequest().setExtras(Maps.newHashMap(page.getRequest().getExtras()));

                    crawlerItemRecord.getHttpRequest().addExtra("forum_tid",dataJson.jsonPath($_type+".tid").get());
                    crawlerItemRecord.getHttpRequest().addExtra(Tag_Field_Forum_Name,forumName);
                    crawlerRequestRecords.add(crawlerItemRecord);
                }
            }


        } catch (Exception e) {
            logger.error("{} request url {} download page error:{}",xcarLogPrefix,page.getRequest().getUrl(),e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord,page);
        }
    }

    private void carSeriesForumArticleParselinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        try {
            if (StringUtils.isBlank(articleDataInfo(page))){
                throw new Exception("article data info is null!");
            }
        } catch (Exception e) {
            logger.error("{} download forum article {} page has error: {},will retry",xcarLogPrefix,page.getRequest().getUrl(),e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord,page);
            return;
        }

        String forumTid = (String) page.getRequest().getExtras().get("forum_tid");
        CategoryTag categoryTag = crawlerRecord.tagsCreator().scheduleTags().getCategoryTag();
        if (categoryTag.getLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal()) != null) { //生成评论record
            if(!crawlerRecord.tagsCreator().bizTags().hasKVTag("comment_record_filter_info")){
                logger.error("{} crawler comment need to filter information!",xcarLogPrefix);
                return;
            }
            KVTag filterInfoTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
            CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(),CrawlerRecord.class);

            String comments = page.getHtml().xpath("//div[@class=\"commentNum\" or @class=\"fl commentNum\"]/span/text()").get();
            if(StringUtils.isBlank(comments)){
                logger.error("article request url {}  comments xpath is null!",page.getRequest().getUrl());
                return;
            }

            if (!"0".equals(comments)){
                List<Selectable> nodes =
                        page.getHtml().xpath("//div[@class=\"floor_div\" or @class=\"floor_div floor_div_e\"]").nodes();

                for (Selectable node : nodes) {
                    String dataPid ;
                    if (nodes.size() == 1){
                        dataPid = node.xpath("//div[@class=\"comment\"]/@data-pid ").get();
                    } else {
                        dataPid =  node.xpath("./div[@class=\"comment\"]/@data-pid").get();
                    }
                    String carSeriesForumCommentUrl = String.format(carSeriesForumCommentUrlFormat,forumTid,1,dataPid,System.currentTimeMillis());
                    CrawlerRequestRecord crawlerCommentTurnPageRequestRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(crawlerRecord)
                            .httpUrl(carSeriesForumCommentUrl)
                            .httpHeads(page.getRequest().getHeaders())
                            .releaseTime(System.currentTimeMillis())
                            .needWashed(true)
                            .copyBizTags()
                            .notFilterRecord()
                            .build();
                    /*crawlerCommentTurnPageRequestRecord.setFilter(filterInfoRecord.getFilter());
                    crawlerCommentTurnPageRequestRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                    crawlerCommentTurnPageRequestRecord.setTurnPageFilterInfo(filterInfoRecord.getTurnPageFilterInfo());*/
                    crawlerCommentTurnPageRequestRecord.getHttpRequest().setExtras(Maps.newHashMap(page.getRequest().getExtras()));
                    crawlerCommentTurnPageRequestRecord.getHttpRequest().addExtra("forum_comment_list_current_number","1");
                    crawlerCommentTurnPageRequestRecord.getHttpRequest().addExtra("data_pid",dataPid);
                    crawlerRequestRecords.add(crawlerCommentTurnPageRequestRecord);
                }
            }

        }
    }

    private void carSeriesForumCommentParselinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        try {
            String dataPid = (String) crawlerRecord.getHttpRequest().getExtras().get("data_pid");
            String forumTid = (String) page.getRequest().getExtras().get("forum_tid");
            Json rawTextJson = new Json(page.getRawText());
            int pcount = Integer.valueOf(rawTextJson.jsonPath($_type + ".data.data.pcount." + dataPid).get());
            int currentNumber = Integer.valueOf(String.valueOf(page.getRequest().getExtras().get("forum_comment_list_current_number")));

            if (pcount == 0){
                crawlerRecord.setNeedWashPage(false);
            }

            int pages = (int) Math.ceil((double)pcount/10);
            if (pages == 1 || currentNumber < pages){
                String carSeriesForumCommentUrl = String.format(carSeriesForumCommentUrlFormat,forumTid,currentNumber+1,dataPid,System.currentTimeMillis());
                CrawlerRequestRecord crawlerTurnPaeRequestRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRecord)
                        .httpUrl(carSeriesForumCommentUrl)
                        .httpHeads(page.getRequest().getHeaders())
                        .releaseTime(System.currentTimeMillis())
                        .needWashed(true)
                        .copyBizTags()
                        .build();
                crawlerTurnPaeRequestRecord.getHttpRequest().setExtras(Maps.newHashMap(page.getRequest().getExtras()));
                crawlerTurnPaeRequestRecord.getHttpRequest().addExtra("forum_comment_list_current_number",String.valueOf(currentNumber+1));
                crawlerRequestRecords.add(crawlerTurnPaeRequestRecord);
            }
        } catch (Exception e) {
            logger.error("{} download forum comments {} page has error: {},will retry",xcarLogPrefix,page.getRequest().getUrl(),e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord,page);
            return;
        }
    }

    private void carSeriesForumArticleWashPage(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord, HttpPage page) {
        KVTag filterInfoTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("custom_record_filter_info");
        if(filterInfoTag !=null ){
            CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(),CrawlerRecord.class);
            crawlerRecord.setFilter(filterInfoRecord.getFilter());
            crawlerRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
            crawlerRecord.setTurnPageFilterInfo(filterInfoRecord.getTurnPageFilterInfo());
        }
        Html responseHtml = page.getHtml();

        String autorName = responseHtml.xpath("//div[@class=\"user\"]//span[@class=\"name\"]/text()").get().trim();
        String authorUrl = responseHtml.xpath("//div[@class=\"user\"]/a/@href").get().trim();
        String authorId = authorUrl.substring(authorUrl.indexOf("=") + 1);
        String title = responseHtml.xpath("//h1/pre/text() | //h2/span/text() | //h1/span/text()").get().trim();
        String is_elite = responseHtml.xpath("//i[@class=\"h2-bg icon-details-Title-bg2x icon-details-Title-bg\"]/@class | //i[@class=\"icon-youji-jh-bg2x icon-youji-jh-bg\"]/@class").get();
        boolean isElite= false;

        if (StringUtils.isNotBlank(is_elite)){
            isElite = true;
        }

        String brand = (String) page.getRequest().getExtras().get("brand");
        String series = (String) page.getRequest().getExtras().get("series");
        String series_id = (String) page.getRequest().getExtras().get("series_id");
        String forumName = (String) page.getRequest().getExtras().get(Tag_Field_Forum_Name);

        String province = responseHtml.xpath("//span[@class=\"fl province\"]/text()").get();
        String city = responseHtml.xpath("//span[@class=\"fl city\"]/text()").get();

        List<Selectable> floorNodes = responseHtml.xpath("//div[@class=\"floor_div\" or @class=\"floor_div floor_div_e\"]").nodes();
        for(Selectable floorNode : floorNodes){

            try {
                String floorNum = floorNode.xpath(".//div[@class=\"fl floorNum\"]/span/text()").get();
                if (StringUtils.isBlank(floorNum)){
                    floorNum = "1";
                }
                List<String> imgs = floorNode.xpath("./div[@class=\"content\"]//img/@src").all();
                List<String> xcarImgs = new ArrayList<>();
                for (String img : imgs) {
                    if (img.startsWith("//image.xcar.com.cn")){
                        xcarImgs.add("https:"+img);
                    }
                    if (img.startsWith("https://image.xcar.com.cn")){
                        xcarImgs.add(img);
                    }
                }
                String imgUrls = StringUtils.joinWith("\\x01",xcarImgs.toArray());
                List<String> contents =  floorNode.xpath("./div[@class=\"content\"]//text()").all();
                String content = StringEscapeUtils.unescapeHtml(StringUtils.joinWith("",contents.toArray()).trim());
                String content_id = floorNode.xpath("./div[@class=\"content\"]/@data-pid").get().trim();
                String releaseTime = floorNode.xpath(".//span[@class=\"time\"]/text() | //div[@class=\"fl time\"]/text()").get().trim();
                if (!isDateRange(crawlerRecord,dateToTimestamp(releaseTime))){
                    return;
                }
                CrawlerData crawlerArticleData = CrawlerData.builder()
                        .data(crawlerRecord, page)
                        .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), content_id))
                        .releaseTime(dateToTimestamp(releaseTime))
                        .addContentKV(Field_Author, autorName)
                        .addContentKV(Field_Author_Id, authorId)
                        .addContentKV(Field_Title, title)
                        .addContentKV(Field_Content,content)
                        .addContentKV(Field_Floor,floorNum)
                        .addContentKV(Field_Images,imgUrls)
                        .resultLabelTag(article)
                        .build();

                Map<String, String> seriesMap = new HashMap<>();
                seriesMap.put("series_name",series);
                seriesMap.put("series_url","https://newcar.xcar.com.cn/"+series_id);
                seriesMap.put("series_id",series_id);
                crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Series,Arrays.asList(seriesMap));
                crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Brand,brand);
                crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Forum_Name,forumName);

                Map<String, String> addr = new HashMap<>();
                addr.put("province",province);
                addr.put("city",city);
                crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Addr_Info,addr);

                crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Is_Elite,isElite);
                crawlerArticleData.tagsCreator().bizTags().addDomain(domain());
                crawlerArticleData.tagsCreator().bizTags().addSite(site);
                crawlerArticleData.tagsCreator().bizTags().addSiteBiz(site_biz);

                if ("1".equals(floorNum)){ //主楼才有互动量
                    carSeriesForumArticleInteractionWashPage(crawlerDatas,crawlerRecord,page,content_id);
                }

                crawlerDatas.add(crawlerArticleData);
            } catch (Exception e) {
                logger.error("{} parse crawler data error: {},stackTrace: {}",xcarLogPrefix,e.getMessage(),e.getStackTrace()[0]);
            }

        }
    }

    private void carSeriesForumCommentWashPage(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord, HttpPage page) {
        Json dataInfoJson = new Json(page.getRawText());
        String dataPid = (String) crawlerRecord.getHttpRequest().getExtras().get("data_pid");
        List<String> dataList = null;
        /*try {*/
            dataList = dataInfoJson.jsonPath($_type + ".data.data.postlist." + dataPid).all();
       /* }catch (Exception e){
            logger.warn("No next page");
            return;
        }*/
        KVTag filterInfoTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
        if(filterInfoTag !=null ){
            CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(),CrawlerRecord.class);
            crawlerRecord.setFilter(filterInfoRecord.getFilter());
            crawlerRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
            crawlerRecord.setTurnPageFilterInfo(filterInfoRecord.getTurnPageFilterInfo());
        }
        for (String data : dataList) {
            Json dataJson = new Json(data);
            String pid = dataJson.jsonPath($_type + ".pid").get();
            String tid = dataJson.jsonPath($_type + ".tid").get();
            String pppid = dataJson.jsonPath($_type + ".pppid").get();
            try {
                Long time = dateToTimestamp(dataJson.jsonPath($_type + ".dateline").get());
                if (!isDateRange(crawlerRecord,time)){
                    continue;
                }
                CrawlerData crawlerCommentnData = CrawlerData.builder()
                        .data(crawlerRecord, page)
                        .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, comment.enumVal(), pid))
                        .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), pppid))
                        .url("http://www.xcar.com.cn/bbs/viewthread.php?tid=" + tid)
                        .releaseTime(dateToTimestamp(dataJson.jsonPath($_type + ".dateline").get()))
                        .addContentKV(Field_Author, dataJson.jsonPath($_type + ".author").get())
                        .addContentKV(Field_Author_Id, dataJson.jsonPath($_type + ".authorid").get())
                        .addContentKV(Field_Content, Jsoup.parse( dataJson.jsonPath($_type + ".message").get().trim()).text())
                        .addContentKV(Field_Floor, dataJson.jsonPath($_type + ".floor").get())
                        .resultLabelTag(comment)
                        .requestLabelTag(result)
                       // .requestLabelTag(filter)
                        .build();
                crawlerCommentnData.tagsCreator().bizTags().addDomain(domain());
                crawlerCommentnData.tagsCreator().bizTags().addSite(site);
                crawlerCommentnData.tagsCreator().bizTags().addSiteBiz(site_biz);
                crawlerDatas.add(crawlerCommentnData);
            } catch (Exception e) {
                logger.error("{} comment wash data error:{}",xcarLogPrefix,e.getMessage());
            }
        }

        carSeriesForumCommentInteractionWashPage(crawlerDatas,crawlerRecord,page,dataInfoJson);

    }

    private void carSeriesForumCommentInteractionWashPage(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord, HttpPage page,Json dataInfoJson) {
        List<String> praises = dataInfoJson.jsonPath($_type + ".data.data.praises").all();
        if (praises != null && praises.size() > 0){
            for (String praise : praises) {
                try {
                    Map map = JSON.parseObject(praise, Map.class);
                    Set<String> keys = map.keySet();
                    for(String key : keys){
                        System.out.println(key);
                        System.out.println(((Map)map.get(key)).get("praisenum"));

                        CrawlerData crawlerInteractionData = CrawlerData.builder()
                                .data(crawlerRecord, page)
                                .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site,comment.enumVal(), interaction.enumVal(), key))
                                .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, comment.enumVal(), key))
                                .addContentKV(Field_I_Likes, String.valueOf(((Map)map.get(key)).get("praisenum")))
                                .resultLabelTag(interaction)
                                .build();
                        crawlerInteractionData.tagsCreator().bizTags().addDomain(domain());
                        crawlerInteractionData.tagsCreator().bizTags().addSite(site);
                        crawlerInteractionData.tagsCreator().bizTags().addSiteBiz(site_biz);
                        crawlerDatas.add(crawlerInteractionData);
                    }
                } catch (Exception e) {
                    logger.error(e.getMessage());
                }
            }
        }
    }

    private void carSeriesForumArticleInteractionWashPage(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord,
                                                   HttpPage page, String contentId){
        Html responseHtml = page.getHtml();

        String comments = responseHtml.xpath("//div[@class=\"commentNum\" or @class=\"fl commentNum\"]/span/text()").get().trim();
        String views = responseHtml.xpath("//div[@class=\"preview\" or @class=\"fl preview\"]/text()").get().trim();
        String[] viewArrays = views.split(" ");
        views = viewArrays[0];
        String collections = responseHtml.xpath("//a[@class=\"fl details_btn details_btn_hover details_btn_like collection\"]/span/text()").get();
        String like = responseHtml.xpath("//button[@class=\"fl riokin_btn recommend\"]/div[@class=\"clearfix btn_txt\"]/span[@class=\"fl btn_num\"]/text()").get();
        String authorFollowers = null;
        String authorTopics = null;
        List<Selectable> autorInfoNodes = responseHtml.xpath("//div[@class=\"fl\"]").nodes();
        for (Selectable autorInfoNode : autorInfoNodes) {
            if ("粉丝".equals(autorInfoNode.xpath("./span[@class=\"text\"]").get())){
                authorFollowers = autorInfoNode.xpath("./span[@class=\"num\"]").get();
            }
            if ("内容".equals(autorInfoNode.xpath("./span[@class=\"text\"]").get())){
                authorTopics = autorInfoNode.xpath("./span[@class=\"num\"]").get();
            }
        }

        CrawlerData crawlerInteractionData = CrawlerData.builder()
                .data(crawlerRecord, page)
                .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, interaction.enumVal(), contentId))
                .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), contentId))
                .addContentKV(Field_I_Comments, comments)
                .addContentKV(Field_I_Views, views)
                .addContentKV(Field_I_Collection, collections)
                .addContentKV(Field_I_Likes,like)
                .addContentKV(Field_Author_Topic_Count,authorTopics)
                .addContentKV(Field_Author_Follows,authorFollowers)
                .resultLabelTag(interaction)
                .build();
        crawlerInteractionData.tagsCreator().bizTags().addDomain(domain());
        crawlerInteractionData.tagsCreator().bizTags().addSite(site);
        crawlerInteractionData.tagsCreator().bizTags().addSiteBiz(site_biz);
        crawlerDatas.add(crawlerInteractionData);
    }
    /**
     * 判断是否在时间范围内
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord,Long releaseTimeToLong){
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis()-60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime){
                isRange = true;
            }
        }else{
            isRange = true;
        }
        return isRange;
    }
    @Override
    public void initUrlRegulars() {
        addUrlRegular(carSeriesUrlRegular);
        addUrlRegular(carSeriesForumUrlRegular);
        addUrlRegular(carSeriesForumArticleUrlRegular);
        addUrlRegular(carSeriesForumCommentUrlRegular);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {

        String tag_site = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        if (!site.equals(tag_site)){
            return false;
        }

        KVTag orderTag = crawlerRecord.tagsCreator().scheduleTags().getCategoryTag().getKVTag("order");
        if (isUrlMatch(crawlerRecord.getHttpRequest().getUrl(),carSeriesForumCommentUrlRegular)){
            return true;
        }
        if (orderTag == null || orderTag.getVal() ==null ||
                (!"dateline".equals(orderTag.getVal()) && !"lastpost".equals(orderTag.getVal()))){
            logger.error(xcarLogPrefix + "crawler need order regular!");
            return false;
        }
        return true;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return "xcar";
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        int count = 1;
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        if (crawlerBusinessTags.hasKVTag("download_retry_count")){
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
            if (count >= 20){
                logger.error("xcar {} download he number of retries exceeds the limit,request url {},page rawText [{}]",
                        xcarLogPrefix, crawlerRecord.getHttpRequest().getUrl(),page.getRawText());
                return;
            }
        }

        count++;
        crawlerBusinessTags.addCustomKV("download_retry_count",count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .build();
        crawlerRequestRecord.setNeedWashPage(true);
        crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    /**
     * @param requestRecord
     * @param supportSourceRecord
     * @param allItemRecords
     */
    private void initAllCrawlerRecordByKeyword(CrawlerRequestRecord requestRecord,
                                               CrawlerRequestRecord supportSourceRecord,
                                               List<CrawlerRecord> allItemRecords){
        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            MetaResponse metaResponse = JSON.parseObject(httpPage.getRawText(), MetaResponse.class);
            if (metaResponse.getStatus() == 0 && metaResponse.getContent() != null){
                List<String> contents = (List<String>) metaResponse.getContent();
                for (String content : contents) {
                    CrawlerDomainKeys crawlerDomainKeys = JSON.parseObject(content, CrawlerDomainKeys.class);
                    String keyword = crawlerDomainKeys.getKeyword();
                    CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(requestRecord)
                            .httpUrl(String.format(carSeriesUrlFormat,keyword))
                            .httpHeads(requestRecord.getHttpRequest().getHeaders())
                            .releaseTime(System.currentTimeMillis())
                            .copyBizTags()
                            .notFilterRecord()
                            .build();
                    crawlerRequestRecord.getHttpRequest().addExtra("xcar_series",keyword);
                    allItemRecords.add(crawlerRequestRecord);
                }
            }
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private void initCompileScript() throws FileNotFoundException, ScriptException {
        if (compiledScriptMap == null){
            compiledScriptMap = new HashMap<>();
        }

        ScriptEngineManager sm = new ScriptEngineManager();
        NashornScriptEngineFactory factory = null;
        for (ScriptEngineFactory f : sm.getEngineFactories()) {
            if (f.getEngineName().equalsIgnoreCase("Oracle Nashorn")) {
                factory = (NashornScriptEngineFactory)f;
                break;
            }
        }
        String[] stringArray = ArrayUtils.toArray("-doe", "--global-per-engine");
        scriptEngine = factory.getScriptEngine(stringArray);
        List<String> ciphers = Arrays.asList("md5", "sha1", "sha256");
        //String filePathPrefix = "E:\\chance-crawler-development\\crawler-dev-bootstrap\\src\\main\\java\\com\\chance\\cc\\crawler\\development\\bootstrap\\xcar\\js\\";
        String filePathPrefix = "/data/chance_crawler_runner/domain/xcar/cipher_js/";

        for (String cipher : ciphers) {
            String filePath = filePathPrefix+cipher+".js";
            logger.info("xcar cipher js file path {}",filePath);
            final CompiledScript compiled = ((Compilable)scriptEngine).compile(new FileReader(filePath));
            compiledScriptMap.put(cipher,compiled);
        }
    }

    private String articleDataInfo(HttpPage page){
        String dataStr = "data: ";
        String dataInfo = null;
        Matcher mtAuthor = Pattern.compile("key: '\\S*',\\s*data:\\s*\\{([^}])*\\}").matcher(page.getRawText());
        if (mtAuthor.find()){
            dataInfo = mtAuthor.group();
            dataInfo = dataInfo.substring(dataInfo.indexOf(dataStr)+dataStr.length());

        }
        return dataInfo;
    }

    private void cookieUpdate(Downloader downloader,CrawlerRequestRecord crawlerRequestRecord){
        // 1. 生成 第一个 加密 cookie
        HttpPage httpPage = downloader.download(crawlerRequestRecord.getHttpRequest(), crawlerRequestRecord.getHttpConfig());
        String js = httpPage.getRawText().substring(httpPage.getRawText().indexOf("("), httpPage.getRawText().lastIndexOf(")")+1);
        Map<String,String> cookieMap = new HashMap<>();
        try {
            String eval = (String) scriptEngine.eval(js);
            eval =  eval.substring(eval.indexOf("=")+1, eval.indexOf(";"));
            cookieMap.put("__jsl_clearance_s",eval);
            logger.info("xcar cookie second update result __jsl_clearance_s={}",eval);
            crawlerRequestRecord.getHttpRequest().addHeader("cookie",processCookie(cookieMap));

            // 2. 生成 第二个 加密 cookie
            httpPage = downloader.download(crawlerRequestRecord.getHttpRequest(), crawlerRequestRecord.getHttpConfig());
            Matcher mtAuthor = Pattern.compile("};go\\((.*?)\\)</script>").matcher(httpPage.getRawText());
            if (mtAuthor.find()){
                String cookieGen = mtAuthor.group(1);
                Json cookieGenJson = new Json(cookieGen);
                String ha = cookieGenJson.jsonPath($_type + ".ha").get();
                logger.info("xcar cookie cipher code {}",ha);
                compiledScriptMap.get(ha).eval();
                Invocable invocable = (Invocable) compiledScriptMap.get(ha).getEngine();
                String result = (String) invocable.invokeFunction("go",JSON.parseObject(cookieGen, Map.class));
                result =  result.substring(result.indexOf("=")+1, result.indexOf(";"));
                cookieMap.put("__jsl_clearance_s",result);
                result = processCookie(cookieMap);
                updateLocalAuthInfo(result);
                logger.info("xcar cookie second update result {}",result);
            }

        } catch (Exception e) {
            logger.error("xcar cookie update error {}",e.getMessage());
        }
    }

    private String processCookie(Map<String,String> cookieMap){
        StringBuffer cookieSB = new StringBuffer();
        for (Map.Entry<String, String> entry : cookieMap.entrySet()) {
            cookieSB.append(entry.getKey())
                    .append("=")
                    .append(entry.getValue())
                    .append(";");
        }
        return cookieSB.substring(0,cookieSB.lastIndexOf(";"));
    }

    public long dateToTimestamp(String dataStr) throws ParseException {
        String regEx="[^0-9]+"; //正则表达式，用于匹配非数字串，+号用于匹配出多个非数字串
        Pattern pattern = Pattern.compile(regEx);

        if (dataStr.equals("刚刚")){
            return System.currentTimeMillis();
        } else if (Pattern.matches("\\d*秒前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*1000L);
        } else if (Pattern.matches("\\d*分钟前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*1000L);
        } else if (Pattern.matches("\\d*小时前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*60*1000L);
        } else if (Pattern.matches("\\d*小时\\d*分钟前",dataStr)){
            int hourNumber = Integer.valueOf(pattern.split(dataStr)[0]);
            int secondNumber = Integer.valueOf(pattern.split(dataStr)[1]);
            long totalMillis = hourNumber*60*60*1000L + secondNumber*60*1000L;
            return (System.currentTimeMillis() - totalMillis);
        } else if (dataStr.startsWith("今天")){
            String currentTime = DateFormatUtils.format(System.currentTimeMillis(), TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("今天", currentTime);
        } else if (dataStr.startsWith("昨天")){
            String yesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("昨天", yesterdayTime);
        } else if (dataStr.startsWith("前天")){
            String beforeYesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 2*60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("前天", beforeYesterdayTime);
        } else if (Pattern.matches("\\d{2}月\\d{2}[日]*", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"年"+dataStr;
        } else if (Pattern.matches("\\d{2}-\\d{2}", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"-"+dataStr;
        }
        return DateUtils.parseDateStrictly(dataStr, TimeForamtEnum.allFormats()).getTime();
    }

    public enum TimeForamtEnum {
        format1("yyyy年MM月dd日"),
        format1_1("yyyy年MM月dd日 HH:mm:ss"),
        format1_2("yyyy年MM月dd日 HH:mm"),
        format1_3("yyyy年MM月dd日HH:mm"),
        format1_4("yyyy年MM月dd日 HH点mm分"),
        format1_5("yyyy年MM月dd日 HH点mm分ss秒"),
        format1_6("yyyy年MM月dd日HH点mm分"),
        format1_7("yyyy年MM月dd日HH点mm分ss秒"),


        format4("yyyy-MM-dd HH:mm:ss"),
        format5("yyyy-MM-dd HH:mm"),
        format6("yyyy-MM-dd"),

        format7("yyyy/MM/dd HH:mm:ss"),
        format7_1("MM/dd/yyyy HH:mm:ss"),
        format8("yyyy/MM/dd HH:mm"),
        format9("yyyy/MM/dd"),

        format10("yyyy.MM.dd HH:mm:ss"),
        format11("yyyy.MM.dd HH:mm"),
        format12("yyyy.MM.dd"),

        format13("EEE MMM d HH:mm:ss +0800 yyyy");

        private String format;

        private TimeForamtEnum(String format) {
            this.format = format;
        }

        public static String[] allFormats() {
            TimeForamtEnum[] timeForamtEnums = TimeForamtEnum.values();
            String[] formats = new String[timeForamtEnums.length];
            for (int count = 0; count < timeForamtEnums.length; count++) {
                formats[count] = timeForamtEnums[count].format;
            }
            return formats;
        }

        public String getFormat() {
            return format;
        }
    }

    /**
     *
     * 重写 crawlerProcess 以支持 download 下载
     * @param context
     */

    @Override
    public void crawlerProcess(CrawlerRecordContext context) {
        //parse request list
        if (context.getCrawlerRecord().isNeedParsedPage()){
            parsePage(context);
        }

        //need wash request
        if(context.getCrawlerRecord().isNeedWashPage()){
            washResult(context);
        }
    }

    private void parsePage(CrawlerRecordContext context){
        //parse page links
        List<CrawlerRequestRecord> links = parseLinks(context);
        //internal download
        internalDownload(context,links);
        //after internal download
        if (context.hasInternalDownloadLinks()){
            afterInternalDownload(context.getCrawlerRecord(),context.getInternalDownloadLinks(),links);
        }
        //add to context parsed links
        if (links != null && links.size() >0){
            for (CrawlerRequestRecord requestRecord : links){
                context.addCrawlerRecord(requestRecord);
            }
        }else{
            logger.warn("record [{}] parsed links is null!",context.getCrawlerRecord());
        }
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        return null;
    }

    private void internalDownload(CrawlerRecordContext context,List<CrawlerRequestRecord> links){
        //extract internal download links
        List<CrawlerRequestRecord> internalDownloadLinks = new ArrayList<>();
        for (CrawlerRequestRecord requestRecord : links){
            if (requestRecord.tagsCreator().requestTags().hasRequestType(CrawlerEnum.CrawlerRequestType.internalDownload)){
                internalDownloadLinks.add(requestRecord);
            }
        }
        if (internalDownloadLinks.size()>0){
            links.removeAll(internalDownloadLinks);
        }
        //download links
        if (internalDownloadLinks == null || internalDownloadLinks.size()<=0){
            return;
        }
        for (CrawlerRequestRecord requestRecord : internalDownloadLinks){
            HttpPage page = context.getPageDownloader()
                    .download(requestRecord.getHttpRequest(),requestRecord.getHttpConfig());
            requestRecord.setInternalDownloadPage(page);
        }
        //set to context
        context.setInternalDownloadLinks(internalDownloadLinks);
    }

    private void washResult(CrawlerRecordContext context){
        CrawlerRequestRecord crawlerRecord = context.getCrawlerRecord();
        HttpPage page = context.getPage();

        List<CrawlerResultRecord> requestResults = new ArrayList<>();
        List<CrawlerData> crawlerDataList = washPage(crawlerRecord,page);
        if (crawlerDataList !=null && crawlerDataList.size()>0){
            for (CrawlerData crawlerData : crawlerDataList){
                //生成result record
                CrawlerResultRecord crawlerResultRecord = new CrawlerResultRecord();
                if (crawlerData.getFilter()!=null){
                    //filter info
                    crawlerResultRecord.setFilter(crawlerData.getFilter());
                    crawlerResultRecord.setFilterInfos(crawlerData.getFilterInfos());
                }
                crawlerResultRecord.setRecordKey(crawlerData.getDataId());
                crawlerResultRecord.setReleaseTime(crawlerData.getReleaseTime());
                //request tags
                crawlerResultRecord.setTags(crawlerData.getTags());
                //翻入过滤标签
                crawlerResultRecord.setFilterPipelineResult(crawlerData.isFilterPipelineResult());
                crawlerResultRecord.setJsonStr(JSON.toJSONString(crawlerData));
                requestResults.add(crawlerResultRecord);
            }
        }
        context.setRequestResults(requestResults);
    }

}
