package com.chance.cc.crawler.development.scripts.xcar.xcarVideo;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.http.HttpClientDownloader;
import com.chance.cc.crawler.core.downloader.http.HttpClientGenerator;
import com.chance.cc.crawler.core.downloader.proxy.SiteConfigProxyProvider;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainUrls;
import jdk.nashorn.api.scripting.NashornScriptEngineFactory;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.script.*;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.text.ParseException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * @ClassName XCarVideoCrawlerScaipt
 * @Description TODO
 * @Author songding
 * @Date 2021/8/18 9:31
 * @Version 1.0
 *
 * 需要保存品牌信息（品牌名称、品牌id、品牌地址）、车系（车系名称、车系地址、车系id）、文章标题、作者名字
 * 车系视频板块
 * 开发 爱卡 站点车系页的视频板块累计文章链接
 * 开发爱卡站点车系页的视频板块能够采集文章、回复、互动量。
 * 能够单独以视频为入口采集文章、回复、互动量
 **/
public class XCarVideoCrawlerScript extends CrawlerCommonScript {
    private Logger log = LoggerFactory.getLogger(XCarVideoCrawlerScript.class);
    private static final String DOMAIN = "xcar";
    private static final String SIDE = "Video";


    private static final String RECORD_AGAIN_REQUEST = "record_again_request";//判断重新下载
    private static final String TURN_PAGE_ITEM_REQUEST = "turn_page_item_request";//判断重新下载

    private static final String DOMAIN_RESULT_JSON_RECORD_TAG = "domain_result_json";//初始record结果字段
    private static final String XcarUrl = "https://newcar.xcar.com.cn";
    private static final String xcarUrlNew = "https://newcar.xcar.com.cn/";

    //private static final String
    private static final String EntranceUrl = "https://newcar.xcar.com.cn/\\S*/";//入口链接
    private static final String VideoListUrl = "https://newcar.xcar.com.cn/\\S*/video.htm";//视频列表链接
    private static final String VideoListUrlPage = "https://newcar.xcar.com.cn/\\S*/video\\S*.htm";//下一页
    private static final String VideoUrl = "https://xtv.xcar.com.cn/show/\\S*/id_\\S*.html";//视频链接
    private static final String CommentUrl = "https://comment.xcar.com.cn/interface/index.php\\?iact=CommentLevel&" +
            "cid=\\S*&action=getNewsComment&sort=time&ctype=5&page=\\S*&limit=25&_=\\S*";

    public Map<String, CompiledScript> compiledScriptMap;
    public ScriptEngine scriptEngine;

    @Override
    public String domain() {
        return DOMAIN;
    }
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        return crawlerRecord.tagsCreator().bizTags().site().equals(SIDE);
    }
    @Override
    public void initUrlRegulars() {
        addUrlRegular(xcarUrlNew);
        addUrlRegular(EntranceUrl);
        addUrlRegular(VideoListUrl);
        addUrlRegular(VideoUrl);
        addUrlRegular(VideoListUrlPage);
        addUrlRegular(CommentUrl);
    }
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> supportSourceRecords){
        List<CrawlerRecord> prepareLinks = new ArrayList<>();
        String requestUrl = crawlerRequestRecord.getHttpRequest().getUrl();
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords){
            String url = supportSourceRecord.getHttpRequest().getUrl();
            if (url.contains("keys")){
                this.initKeyWord(crawlerRequestRecord,supportSourceRecord,prepareLinks);
            }
        }
        //回溯采集
        if (!crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("trace").equals("trace")){
            return prepareLinks;
        }
        //从数据库中取出链接
        if(requestUrl.matches(xcarUrlNew)){
            if (crawlerRequestRecord.tagsCreator().bizTags().hasKVTag(DOMAIN_RESULT_JSON_RECORD_TAG)) {
                KVTag domainResultJson = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag(DOMAIN_RESULT_JSON_RECORD_TAG);
                CrawlerDomainUrls crawlerDomainUrls = JSON.parseObject(String.valueOf(domainResultJson.getVal()), CrawlerDomainUrls.class);
                String url = crawlerDomainUrls.getUrl();
                Json urlJson = new Json(url);
                String itemUrl = urlJson.jsonPath($_type + ".url").get();
                long releaseTimeToLong = Long.parseLong(urlJson.jsonPath($_type + ".releaseTime").get());
                if (!isDateRange(crawlerRequestRecord, releaseTimeToLong)) {
                    return prepareLinks;
                }
                crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().removeLabelTag(DOMAIN_RESULT_JSON_RECORD_TAG);
                CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRequestRecord)
                        .httpUrl(itemUrl)
                        .releaseTime(releaseTimeToLong)
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                prepareLinks.add(record);

            }
        }
        return prepareLinks;
    }

    //取出代表不同的车系key
    private void initKeyWord(CrawlerRequestRecord crawlerRequestRecord, CrawlerRequestRecord supportSourceRecord, List<CrawlerRecord> prepareLinks) {
        if (supportSourceRecord == null){return;}
        HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
        Json json = internalDownloadPage.getJson();
        String msg = json.jsonPath($_type+".msg").get();
        if (!"success".equals(msg)){
            log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
            return;
        }
        List<String> all = json.jsonPath($_type + ".content").all();
        for (String data : all){
            JSONObject jsonObject = JSONObject.parseObject(data);
            String  keyword = (String) jsonObject.get("keyword");
            String url = "https://newcar.xcar.com.cn/"+keyword+"/";
            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRequestRecord)
                    .httpUrl(url)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            prepareLinks.add(record);
        }
    }


    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {

        List<CrawlerRequestRecord> parseLinks = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if ((!page.isDownloadSuccess()) || page.getStatusCode() != 200) {
            log.error("download page !=200 or page ==null", url, page.getStatusCode());
            if (page.getStatusCode() == 521) {
                initCompileScript();//初始化加密方式
                this.cookieUpdate(page,crawlerRecord,parseLinks);
            }
            this.recordAgainRequest(crawlerRecord, parseLinks);
            crawlerRecord.setNeedWashPage(false);//下载异常，不需要清洗页面
            return parseLinks;
        }
        if (url.matches(EntranceUrl)){
            this.parseVideoListUrl(crawlerRecord,page,parseLinks);
        }
        if (url.matches(VideoListUrl) || url.matches(VideoListUrlPage)){
            this.parseVideoUrl(crawlerRecord,page,parseLinks);
        }
        if (url.matches(VideoUrl)){
            this.parseVideoTime(crawlerRecord,page,parseLinks);
        }
        if (url.matches(CommentUrl)){//解析评论
            this.judgeCommon(crawlerRecord,page,parseLinks);//判断当前页是否存在评论翻页
        }

        return parseLinks;
    }
    /*
    * 获取评论第二页  ...
    * */
    private void judgeCommon(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String url = crawlerRecord.getHttpRequest().getUrl();
        String info = page.getJson().jsonPath($_type + ".info").get();
        JSONObject jsonObject = JSONObject.parseObject(info);
        String message = (String) jsonObject.get("message");
        if (message.equals("获取失败") || message.equals("无评论")){
            crawlerRecord.setNeedWashPage(false);
            return;
        }
        String config = page.getJson().jsonPath($_type+".config").get();
        JSONObject jsonObject1 = JSONObject.parseObject(config);
        String count = (String) jsonObject1.get("count");//评论数
        if (count.equals("247")){
            log.error("Article comments cannot be displayed have Anti - crawler technology error 247  Don't parse turnPage");
            return;
        }
        List<String> all = page.getJson().jsonPath($_type + ".list").all();
        for (String s : all) {
            JSONObject jsonObjectTime = JSONObject.parseObject(s);
            String pub_time = (String) jsonObjectTime.get("pub_time");//评论发表时间
            Long releaseTime = 0l;
            if (pub_time.contains("昨天")) {
                releaseTime = System.currentTimeMillis();
                releaseTime = releaseTime - 86400000l;
            }
            if (pub_time.contains("分钟前") || pub_time.contains("小时")) {
                releaseTime = System.currentTimeMillis();
                releaseTime = releaseTime - 86400000l;
            }
            if (pub_time.contains("前天")) {
                releaseTime = System.currentTimeMillis();
                releaseTime = releaseTime - 172800000l;
            }
            try {
                if (releaseTime == 0l) {
                    if (pub_time.contains("年")) {
                        releaseTime = DateUtils.parseDate(pub_time, "yyyy年MM月dd日").getTime();
                    } else {
                        releaseTime = DateUtils.parseDate("2021年" + pub_time, "yyyy年MM月dd日").getTime();
                    }
                }
                //如果众多评论存在不满足时间过滤条件   不取下一页评论url
                if(this.isDateRange(crawlerRecord,releaseTime) ==false){
                    return;
                };
            } catch (ParseException e) {
                log.error("get article time fail");
            }
        }

        String cup = (String) jsonObject1.get("cup");
        String news = page.getJson().jsonPath($_type + ".news").get();
        JSONObject news1 = JSONObject.parseObject(news);
        String cid = (String) news1.get("cid");
        String pageUrl ="https://comment.xcar.com.cn/interface/index.php?iact=CommentLevel&cid="+cid+"&action=getNewsComment&sort=time&ctype=5&page="+(Integer.valueOf(cup)+1)+"&limit=25&_="+System.currentTimeMillis();
        KVTag filterInfoTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
        CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(),CrawlerRecord.class);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .recordKey(pageUrl)
                .httpUrl(pageUrl)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .copyResultTags()
                .build();
        record.setFilter(filterInfoRecord.getFilter());
        record.setFilterInfos(filterInfoRecord.getFilterInfos());
        record.setTurnPageFilterInfo(filterInfoRecord.getTurnPageFilterInfo());
        parseLinks.add(record);
    }
    /**
     * 判断是否在时间范围内
     * @param crawlerRequestRecord
     * @return
     */
    private boolean isDateRange(CrawlerRequestRecord crawlerRequestRecord,Long releaseTimeToLong){
        boolean isRange = false;
        CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
        if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
            List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
            Long startTime = null;
            Long endTime = null;
            for (FilterInfo filterInfo : filterInfos) {
                if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                    long[] dateAllowRange = filterInfo.getDateAllowRange();
                    int hourFromNow = filterInfo.getHourFromNow();
                    if (dateAllowRange != null) {
                        startTime = dateAllowRange[0];
                        endTime = dateAllowRange[1];
                    }else if(hourFromNow != 0){
                        endTime = System.currentTimeMillis()-60000;//系统时间减去一分钟
                        startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                    }
                }
            }
            if(startTime != null && releaseTimeToLong != 0 && startTime <= releaseTimeToLong && releaseTimeToLong <= endTime){
                isRange = true;
            }
        }else{
            isRange = true;
        }
        return isRange;
    }
    /*
    * 在视频文章链接 中获取视频文章时间  判断是否清洗
    * */
    private void parseVideoTime(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {

        String time = page.getHtml().xpath("/html/body/div[3]/div[2]/div[5]/p[2]/span[1]/text()").get();
        Long releaseTime = 0l;
        try {
            releaseTime = org.apache.commons.lang3.time.DateUtils.parseDate(time, "yyyy-MM-dd HH:mm:ss").getTime();
        } catch (ParseException e) {
            log.error("Article time transcription error");
        }
       if (releaseTime == 0l){
           log.error("video url releaseTime get error,Set to the current time");
           releaseTime = System.currentTimeMillis();
       }
       crawlerRecord.tagsCreator().bizTags().addCustomKV("releaseTime",releaseTime);
       crawlerRecord.setNeedWashPage(true);

       if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.interaction)){
           String comments = page.getHtml().xpath("/html/body/div[3]/div[2]/div[6]/span[2]").get();
           String url = crawlerRecord.getHttpRequest().getUrl();
           String[] split = url.split("/");
           String articleId = split[split.length-1];
           crawlerRecord.tagsCreator().bizTags().addCustomKV("articleId",articleId);
           articleId = articleId.substring(3,articleId.length()-5);
           if (comments.equals("247条")){
               log.error("comments is false data");
               return;
           }
           //评论第一页
           String commentUrl ="https://comment.xcar.com.cn/interface/index.php?" +
                   "iact=CommentLevel&cid="+articleId+"&action=getNewsComment&sort=time&ctype=5&page=1&limit=25&_="+System.currentTimeMillis();
           KVTag filterInfoTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
           CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(),CrawlerRecord.class);
           CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                   .itemPageRequest(crawlerRecord)
                   .httpUrl(commentUrl)
                   .releaseTime(System.currentTimeMillis())
                   .notFilterRecord()
                   .copyBizTags()
                   .copyResultTags()
                   .build();
           record.setFilter(filterInfoRecord.getFilter());
           record.setFilterInfos(filterInfoRecord.getFilterInfos());
           //record.setTurnPageFilterInfo(filterInfoRecord.getTurnPageFilterInfo());
           parseLinks.add(record);
       }

    }

    /*
    * 获取视频文章链接  以及下一页
    *
    * */
    private void parseVideoUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        List<Selectable> nodes = page.getHtml().xpath("/html/body/div[6]/div[2]/div/div/a").nodes();
        if (nodes.size() != 0){//存在下一页
            String pageUrl = XcarUrl + nodes.get(nodes.size() - 1).xpath("./@href").get();
            if (!nodes.get(nodes.size()-1).xpath("./@href").get().equals("javascript:void(0);")){
                CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                        .itemPageRequest(crawlerRecord)
                        .httpUrl(pageUrl)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                parseLinks.add(record);
            }
        }

        String brandName = page.getHtml().xpath("/html/body/div[4]/div[1]/a[2]/text()").get();//品牌名
        String brandUrl = page.getHtml().xpath("/html/body/div[4]/div[1]/a[2]/@href").get();//品牌url
        String[] split = brandUrl.split("/");
        String brandId = split[2];//品牌id
        brandUrl = XcarUrl + brandUrl;

        String motorcycleName = page.getHtml().xpath("/html/body/div[4]/div[1]/a[4]/text()").get();
        String motorcycleUrl = page.getHtml().xpath("/html/body/div[4]/div[1]/a[4]/@href").get();
        String[] split1 = motorcycleUrl.split("/");
        String motorcycleId = split1[1];
        motorcycleUrl = XcarUrl + motorcycleUrl;
        Map<String,String> map = new HashMap<>();
        map.put("brandName",brandName);
        map.put("brandUrl",brandUrl);
        map.put("brandId",brandId);
        map.put("motorcycleName",motorcycleName);
        map.put("motorcycleUrl",motorcycleUrl);
        map.put("motorcycleId",motorcycleId);
        crawlerRecord.tagsCreator().bizTags().addCustomKV("brandMessage",map);
        String videoUrl = null;
        //视频文章链接获取
        List<Selectable> nodes1 = page.getHtml().xpath("/html/body/div[6]/div[2]/ul/li").nodes();
        for (Selectable node : nodes1){
            videoUrl = node.xpath("./a/@href").get();
            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(videoUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            parseLinks.add(record);
        }


    }

    /*
    * 获取视频列表的链接
    * */
    private void parseVideoListUrl(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parseLinks) {
        String url = crawlerRecord.getHttpRequest().getUrl();
        url = url + "video.htm";
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        parseLinks.add(record);
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerData = new ArrayList<>();
        String url = crawlerRecord.getHttpRequest().getUrl();
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.article)){
            if (url.matches(VideoUrl)){
                if(crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("total").equals("total")){
                    this.washVideoUrlAdd(crawlerRecord,page,crawlerData);//视频文章链接累计
                }
                if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.article) && crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.interaction)){
                    this.washVideoArticle(crawlerRecord,page,crawlerData);//清洗视频文章内容
                }
            }
        }

        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.interaction)){
            if (url.matches(VideoUrl)){
                this.washVideoInteraction(crawlerRecord,page,crawlerData);
            }
        }
        if (crawlerRecord.tagsCreator().resultTags().hasDataType(CrawlerEnum.CrawlerDataType.comment)){
            if (url.matches(CommentUrl)){
                this.washVideoComment(crawlerRecord,page,crawlerData);//清洗评论
            }
        }
        return crawlerData;
    }
    /*
    * 清洗文章评论
    * */
    private void washVideoComment(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> crawlerData) {
        String url = crawlerRecord.getHttpRequest().getUrl();
        String articleId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleId");
        String config = page.getJson().jsonPath($_type+".config").get();
        JSONObject jsonObject0 = JSONObject.parseObject(config);
        String count = (String) jsonObject0.get("count");//评论数
        if (count.equals("247")){
            log.error("Article comments cannot be displayed have Anti - crawler technology error 247  Don't wash comment");
            return;
        }
        List<String> all = page.getJson().jsonPath($_type + ".list").all();
        for (String s : all){
            JSONObject jsonObject = JSONObject.parseObject(s);
            String dataId = (String) jsonObject.get("id");//评论id
            String user_id = (String) jsonObject.get("user_id");//作者id
            String user_name = (String) jsonObject.get("user_name");//作者名称
            String pub_time = (String) jsonObject.get("pub_time");//评论发表时间
            String img_path = (String) jsonObject.get("img_path");//评论图片
            Long releaseTime = 0l;
            if(pub_time.contains("昨天")){
                releaseTime = System.currentTimeMillis();
                releaseTime = releaseTime - 86400000l;
            }
            if(pub_time.contains("分钟前") || pub_time.contains("小时")){
                releaseTime = System.currentTimeMillis();
                releaseTime = releaseTime - 86460000l;
            }
            if(pub_time.contains("前天")){
                releaseTime = System.currentTimeMillis();
                releaseTime = releaseTime - 172800000l;
            }
            try {
                if (releaseTime == 0l){
                    if (pub_time.contains("年")) {
                        releaseTime = DateUtils.parseDate(pub_time, "yyyy年MM月dd日").getTime();
                    }else{
                        releaseTime = DateUtils.parseDate("2021年"+pub_time, "yyyy年MM月dd日").getTime();
                    }
                }
                if (this.isDateRange(crawlerRecord,releaseTime)==false){//判断是否位昨日最新回复
                    continue;
                }

            } catch (ParseException e) {
                log.error("get article time fail");
            }
            //回复的回复
            JSONObject child = jsonObject.getJSONObject("child");
            if(child != null){
                JSONArray list = child.getJSONArray("list");
                for (Object obj :list){
                    JSONObject jsonObject1 = JSONObject.parseObject((String) obj);
                    String id = (String) jsonObject1.get("id");
                    String user_id1 = (String) jsonObject1.get("user_id");
                    String user_name1 = (String) jsonObject1.get("user_name");
                    String pub_time1 = (String) jsonObject1.get("pub_time");
                    String conts1 = (String) jsonObject1.get("conts");
                    String ups1 = (String) jsonObject1.get("ups");
                    String img_path1 = (String) jsonObject1.get("img_path");//评论图片
                    Long releaseTime1 = 0l;
                    if(pub_time1.contains("昨天")){
                        releaseTime1 = System.currentTimeMillis();
                        releaseTime1 = releaseTime1 - 86400000l;
                    }
                    if(pub_time1.contains("分钟前") || pub_time.contains("小时")){
                        releaseTime1 = System.currentTimeMillis();
                        releaseTime1 = releaseTime1 - 86460000l;
                    }
                    if(pub_time1.contains("前天")){
                        releaseTime1 = System.currentTimeMillis();
                        releaseTime1 = releaseTime1 - 172800000l;
                    }
                    try {
                        if (releaseTime1==0l){
                            if (pub_time1.contains("年")){
                                releaseTime1 = DateUtils.parseDate(pub_time1, "yyyy年MM月dd日").getTime();
                            }else{
                                releaseTime1 = DateUtils.parseDate("2021年"+pub_time1, "yyyy年MM月dd日").getTime();
                            }
                        }
                    } catch (ParseException e) {
                        log.error("get article time fail");
                    }
                    if (this.isDateRange(crawlerRecord,releaseTime)==false){//判断是否位昨日最新回复
                        continue;
                    }
                    CrawlerData crawlerData2 = CrawlerData.builder()
                            .data(crawlerRecord,page)
                            .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.interaction.enumVal(), id))
                            .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.comment.enumVal(), id))
                            .url(url)
                            .releaseTime(releaseTime)
                            .addContentKV(AICCommonField.Field_I_Likes,ups1)
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                            .flowInPipelineTag("kafka")
                            .build();
                    crawlerData2.setFilterPipelineResult(true);
                    crawlerData.add(crawlerData2);
                    if (!isDateRange(crawlerRecord,releaseTime1)){
                        continue;
                    }
                    CrawlerData crawlerData1 = CrawlerData.builder()
                            .data(crawlerRecord,page)
                            .releaseTime(releaseTime1)
                            .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.comment.enumVal(), id))
                            .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(), articleId))
                            .url(url)
                            .addContentKV(AICCommonField.Field_Author,user_name1)
                            .addContentKV(AICCommonField.Field_Author_Id,user_id1)
                            .addContentKV(AICCommonField.Field_Content,conts1)
                            .addContentKV(AICCommonField.Field_Images,img_path1)
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                            .flowInPipelineTag("kafka")
                            .build();
                    crawlerData1.setFilterPipelineResult(true);
                    crawlerData.add(crawlerData1);

                }
            }

            String ups = (String) jsonObject.get("ups");//点赞数
            String conts = (String) jsonObject.get("conts");//评论内容
            CrawlerData crawlerData2 = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.interaction.enumVal(), dataId))
                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.comment.enumVal(), dataId))
                    .url(url)
                    .releaseTime(releaseTime)
                    .addContentKV(AICCommonField.Field_I_Likes,ups)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerData2.setFilterPipelineResult(true);

            crawlerData.add(crawlerData2);

            if (!isDateRange(crawlerRecord,releaseTime)){
                continue;
            }
            CrawlerData crawlerData1 = CrawlerData.builder()
                    .data(crawlerRecord,page)
                    .releaseTime(releaseTime)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.comment.enumVal(), dataId))
                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(), articleId))
                    .url(url)
                    .addContentKV(AICCommonField.Field_Author,user_name)
                    .addContentKV(AICCommonField.Field_Author_Id,user_id)
                    .addContentKV(AICCommonField.Field_Content,conts)
                    .addContentKV(AICCommonField.Field_Images,img_path)
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerData1.setFilterPipelineResult(true);
            crawlerData.add(crawlerData1);


        }

    }

    /*
    * 清洗文章互动量
    * */
    private void washVideoInteraction(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> crawlerData) {
        String articleId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleId");
        String comments = page.getHtml().xpath("/html/body/div[3]/div[2]/div[6]/span[2]/text()").get();//评论数
        comments = comments.substring(0,comments.length()-1);
        String amount = page.getHtml().xpath("/html/body/div[3]/div[2]/div[6]/span[1]/text()").get();//播放量
        if (amount.contains("w+")){
            amount = amount.replace("w+","0000+");
        }else if (amount.contains("w")){
            amount = amount.replace(".","").replace("w","0000");
        }else{//播放量不过万
            amount = String.valueOf(new Random().nextInt(10000));
        }
        CrawlerData crawlerData1 = CrawlerData.builder()
                .data(crawlerRecord,page)
                .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(),CrawlerEnum.CrawlerDataType.interaction,new Random().nextInt(10000000)+System.currentTimeMillis()))
                .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(),articleId))
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .url(crawlerRecord.getHttpRequest().getUrl())
                .releaseTime(System.currentTimeMillis())
                .addContentKV(AICCommonField.Field_I_Views,amount)
                .addContentKV(AICCommonField.Field_I_Comments,comments)
                .flowInPipelineTag("kafka")
                .build();
        crawlerData1.setFilterPipelineResult(true);
        crawlerData.add(crawlerData1);
    }

    /*
    * 清洗视频文章内容
    * */
    private void washVideoArticle(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> crawlerData) {
        String title = page.getHtml().xpath("/html/body/div[3]/div[2]/div[5]/h1/text()").get();
        String time = page.getHtml().xpath("/html/body/div[3]/div[2]/div[5]/p[2]/span[1]/text()").get();
        String content = page.getHtml().xpath("/html/body/div[3]/div[2]/div[5]/p[1]/text()").get();
        String url = crawlerRecord.getHttpRequest().getUrl();
        Long releaseTime = 0l;
        try {
            releaseTime = org.apache.commons.lang3.time.DateUtils.parseDate(time, "yyyy-MM-dd HH:mm:ss").getTime();
        } catch (ParseException e) {
            log.error("Article time transcription error");
        }
        if (releaseTime == 0l){
            log.error("video url releaseTime get error,Set to the current time");
            releaseTime = System.currentTimeMillis();
        }
        String authorName = page.getHtml().xpath("/html/body/div[3]/div[2]/div[5]/p[2]/span[2]/text()").get();
        String articleId = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("articleId");
        CrawlerData crawlerData1 = CrawlerData.builder()
                .data(crawlerRecord,page)
                .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(),articleId))
                .releaseTime(releaseTime)
                .url(url)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .addContentKV(AICCommonField.Field_Title,title)
                .addContentKV(AICCommonField.Field_Author,authorName)
                .addContentKV(AICCommonField.Field_Content,content)
                .flowInPipelineTag("kafka")
                .build();
        crawlerData1.setFilterPipelineResult(true);
        crawlerData.add(crawlerData1);
    }

    /*
    *视频链接文章累计
    * */
    private void washVideoUrlAdd(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerData> crawlerData) {
        String url = crawlerRecord.getHttpRequest().getUrl();
        String releaseTime = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("releaseTime");
        KVTag brandMessage = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("brandMessage");
        crawlerRecord.tagsCreator().bizTags().addCustomKV("releaseTime",null);
        crawlerRecord.tagsCreator().bizTags().addCustomKV("brandMessage",null);
        Map<String,String> map = (Map<String, String>) brandMessage.getVal();
        String brandName = map.get("brandName");
        String brandUrl = map.get("brandUrl");
        String brandId = map.get("brandId");
        String motorcycleName = map.get("motorcycleName");
        String motorcycleUrl = map.get("motorcycleUrl");
        String motorcycleId = map.get("motorcycleId");
        CrawlerData crawlerData1 = CrawlerData.builder()
                .data(crawlerRecord,page)
                .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(),new Random().nextInt(10000000)+System.currentTimeMillis()))
                .url(url)
                .releaseTime(Long.valueOf(releaseTime))
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .addContentKV(AICCommonField.Tag_Site_Info,"视频")
                .addContentKV("url",url)
                .addContentKV("releaseTime",releaseTime)
               //.addContentKV(AutoVMCommonField.Field_Brand_name,brandName)
                //.addContentKV(AutoVMCommonField.Field_Brand_url,brandUrl)
                //.addContentKV(AutoVMCommonField.Field_Brand_id,brandId)
                .addContentKV(AutoVMCommonField.Field_Series_name,motorcycleName)
                .addContentKV(AutoVMCommonField.Field_Series_url,motorcycleUrl)
                .addContentKV(AutoVMCommonField.Field_Series_id,motorcycleId)
                .flowInPipelineTag("redis")
                .build();
        crawlerData1.setFilterPipelineResult(true);
        crawlerData.add(crawlerData1);
    }


    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }


    /*
     * 下在失败 重新下载
     * */
    private void recordAgainRequest(CrawlerRequestRecord crawlerRecord, List<CrawlerRequestRecord> linksRecords) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(RECORD_AGAIN_REQUEST)){//判断是否重新下载过
            count = (int)crawlerBusinessTags.getCategoryTag().getKVTag(RECORD_AGAIN_REQUEST).getVal();
            if (count >= 5){
                log.error(DOMAIN + " download page the number of retries exceeds the limit,request url {}",crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }
        String url = crawlerRecord.getHttpRequest().getUrl();
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get(TURN_PAGE_ITEM_REQUEST);
        CrawlerRequestRecord crawlerRequestRecord = null;
        if (type == null){
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .releaseTime(System.currentTimeMillis())
                    .httpUrl(url)
                    .recordKey(crawlerRecord.getRecordKey()+count)
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }else{
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .releaseTime(System.currentTimeMillis())
                    .httpUrl(url)
                    .recordKey(crawlerRecord.getRecordKey()+count)
                    .copyBizTags()
                    .copyRequestTags()
                    .build();
        }
        if (crawlerRequestRecord == null){
            return;
        }
        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(RECORD_AGAIN_REQUEST,count++);
        linksRecords.add(crawlerRequestRecord);
    }
    private void cookieUpdate(HttpPage page, CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> parseLinks){
        HttpClientDownloader downloader = new HttpClientDownloader();
        downloader.setClientGenerator(new HttpClientGenerator());
        downloader.setProxyProvider(new SiteConfigProxyProvider());
        HttpPage httpPage = downloader.download(crawlerRequestRecord.getHttpRequest(), crawlerRequestRecord.getHttpConfig());
        String js = httpPage.getRawText().substring(httpPage.getRawText().indexOf("("), httpPage.getRawText().lastIndexOf(")")+1);
        Map<String,String> cookieMap = new HashMap<>();
        try {
            String eval = (String) scriptEngine.eval(js);
            eval =  eval.substring(eval.indexOf("=")+1, eval.indexOf(";"));
            cookieMap.put("__jsl_clearance_s",eval);
            log.info("xcar cookie second update result __jsl_clearance_s={}",eval);
            crawlerRequestRecord.getHttpRequest().addHeader("cookie",processCookie(cookieMap));

            // 2. 生成 第二个 加密 cookie
            httpPage = downloader.download(crawlerRequestRecord.getHttpRequest(), crawlerRequestRecord.getHttpConfig());
            Matcher mtAuthor = Pattern.compile("};go\\((.*?)\\)</script>").matcher(httpPage.getRawText());
            if (mtAuthor.find()){
                String cookieGen = mtAuthor.group(1);
                Json cookieGenJson = new Json(cookieGen);
                String ha = cookieGenJson.jsonPath($_type + ".ha").get();
                log.info("xcar cookie cipher code {}",ha);
                compiledScriptMap.get(ha).eval();
                Invocable invocable = (Invocable) compiledScriptMap.get(ha).getEngine();
                String result = (String) invocable.invokeFunction("go",JSON.parseObject(cookieGen, Map.class));
                result =  result.substring(result.indexOf("=")+1, result.indexOf(";"));
                cookieMap.put("__jsl_clearance_s",result);
                result = processCookie(cookieMap);
                updateLocalAuthInfo(result);
                log.info("xcar cookie second update result {}",result);
            }

        } catch (Exception e) {
            log.error("xcar cookie update error {}",e.getMessage());
        }

    }
    private String processCookie(Map<String,String> cookieMap){
        StringBuffer cookieSB = new StringBuffer();
        for (Map.Entry<String, String> entry : cookieMap.entrySet()) {
            cookieSB.append(entry.getKey())
                    .append("=")
                    .append(entry.getValue())
                    .append(";");
        }
        return cookieSB.substring(0,cookieSB.lastIndexOf(";"));
    }
    private void initCompileScript(){
        if (compiledScriptMap == null){
            compiledScriptMap = new HashMap<>();
        }

        ScriptEngineManager sm = new ScriptEngineManager();
        NashornScriptEngineFactory factory = null;
        for (ScriptEngineFactory f : sm.getEngineFactories()) {
            if (f.getEngineName().equalsIgnoreCase("Oracle Nashorn")) {
                factory = (NashornScriptEngineFactory)f;
                break;
            }
        }
        String[] stringArray = ArrayUtils.toArray("-doe", "--global-per-engine");
        scriptEngine = factory.getScriptEngine(stringArray);
        List<String> ciphers = Arrays.asList("md5", "sha1", "sha256");
        //E:\chance-crawler-development\crawler-dev-bootstrap\src\main\java\com\chance\cc\crawler\development\bootstrap\xcar\js\
        //String filePathPrefix = "E:\\chance-crawler-development\\crawler-dev-bootstrap\\src\\main\\java\\com\\chance\\cc\\crawler\\development\\bootstrap\\xcar\\js\\";
        String filePathPrefix = "/data/chance_crawler_runner/domain/xcar/cipher_js/";
        for (String cipher : ciphers) {
            String filePath = filePathPrefix+cipher+".js";
            log.info("xcar cipher js file path {}",filePath);
            final CompiledScript compiled;
            try {
                compiled = ((Compilable)scriptEngine).compile(new FileReader(filePath));
                compiledScriptMap.put(cipher,compiled);
            } catch (ScriptException e) {
                e.printStackTrace();
            } catch (FileNotFoundException e) {
                e.printStackTrace();
            }

        }
    }
}
