package com.chance.cc.crawler.development.scripts.sina.carSeriesInformation;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.development.scripts.allfeild.AICCommonField;
import com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;

public class SinaCarSeriesInformationCrawlerScript extends CrawlerCommonScript {
    private Logger log = LoggerFactory.getLogger(SinaCarSeriesInformationCrawlerScript.class);//日志
    private static final String DOMAIN = "sina";
    private static final String SITE = "carSeriesInformation";
    private static final String REQUEST_AGAIN_TAG = DOMAIN + "_request_again";

    private static final String HomeRegulars = "https://db.auto.sina.com.cn";//动态链接

    //    https://db.auto.sina.com.cn/news/982/?page=1
    private static final String InformationRegulars = "https://db.auto.sina.com.cn/news/\\S*/\\?page=\\d*";//资讯链接
    //    https://k.sina.com.cn/article_5771693183_158050c7f00100umds.html?from=auto
    //    https://t.cj.sina.com.cn/articles/view/6393807823/m17d19c3cf00101162w?from=auto
    //    https://auto.sina.com.cn/zz/hy/2021-08-12/detail-ikqciyzm0946824.shtml
    //    https://k.sina.com.cn/article_5662793920_1518760c002001cc93.html?from=auto&amp;subch=oauto
    private static final String ContentRegulars = "https://k.sina.com.cn/article_\\S*_\\S*.html\\?from=auto";//资讯详情链接
    private static final String ContentTwoRegulars = "https://t.cj.sina.com.cn/articles/view/\\d*/\\S*\\?from=auto";//资讯详情链接
    private static final String ContentThreeRegulars = "https://auto.sina.com.cn/\\S*/\\S*/\\S*/\\S*.shtml";//资讯详情链接
    private static final String ContentFourRegulars = "https://k.sina.com.cn/article_\\S*_\\S*.html\\?from=auto\\S*";//资讯详情链接
    //    https://comment.sina.com.cn/page/info?channel=mp&newsid=6594024191-18908d2ff00100yqq0
    //    https://comment.sina.com.cn/page/info?channel=qc&newsid=comos-kqciyzi8214999
    //    https://comment.sina.com.cn/page/info?channel=mp&newsid=1916059077-7234bdc500100spki
    //    https://comment.sina.com.cn/page/info?channel=mp&newsid=2426410481-90a015f100100z2kj
    //    https://comment.sina.com.cn/page/info?channel=mp&newsid=5089299669-12f588cd50010193uk
    private static final String CommentRegulars = "https://comment.sina.com.cn/page/info\\?channel=\\S*&newsid=\\S*";//评论链接
//    https://weibo.com/u/1735546403
//private static final String AuthorRegulars = "https://weibo.com/u/\\S*";//作者页面链接

    @Override
    public String domain() {
        return DOMAIN;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(HomeRegulars);
        addUrlRegular(InformationRegulars);
        addUrlRegular(ContentRegulars);
        addUrlRegular(ContentTwoRegulars);
        addUrlRegular(ContentThreeRegulars);
        addUrlRegular(ContentFourRegulars);
        addUrlRegular(CommentRegulars);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        String site = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        return SITE.equals(site);
    }

    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord crawlerRequestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> prepareLinks = new ArrayList<>();
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            String url = supportSourceRecord.getHttpRequest().getUrl();
            if (url.contains("keys")) {
                this.initKeyWord(crawlerRequestRecord, supportSourceRecord);
            }
        }
        return prepareLinks;
    }

    //取出代表不同的车系key
    private void initKeyWord(CrawlerRequestRecord crawlerRequestRecord, CrawlerRequestRecord supportSourceRecord) {
        HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
        Json json = internalDownloadPage.getJson();
        String msg = json.jsonPath($_type + ".msg").get();
        if (!"success".equals(msg)) {
            log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
            return;
        }
        List<String> keys = new ArrayList<>();
        List<String> all = json.jsonPath($_type + ".content").all();
        for (String data : all) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            String keyword = jsonObject.getString("keyword");
            keys.add(keyword);
        }
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("keys", keys);
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList();
        //        判断页面是否加载成功
        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        if (page.getStatusCode() != 200 || (!page.isDownloadSuccess())) {
            log.error("download page url == {} error status is {}", requestUrl, page.getStatusCode());
            //如果没有成功的下载  进行重新下载
            this.requestAgainCrawlerRecord(parsedLinks, crawlerRecord);
            crawlerRecord.setNeedWashPage(false);
            return parsedLinks;
        }
        if (crawlerRecord.getHttpRequest().getUrl().matches(HomeRegulars)) {
            homeRequest(crawlerRecord, page, parsedLinks);//解析动态车系
        }  else if (crawlerRecord.getHttpRequest().getUrl().matches(InformationRegulars)) {
            informationRequest(crawlerRecord, page, parsedLinks);//解析文章列表
        }
        return parsedLinks;
    }


    //    解析动态车系
    private void homeRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {
        KVTag keys = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("keys");
        List<String> name = (List<String>) keys.getVal();
        for (int i = 0; i < name.size(); i++) {//name.size()
            String url = "https://db.auto.sina.com.cn/news/" + name.get(i).trim()+ "/?page=1";//得到车系页面
            crawlerRecord.tagsCreator().bizTags().addCustomKV("carSeriesUrl",url);
            crawlerRecord.tagsCreator().bizTags().addCustomKV("keyUrl",name.get(i).trim());
            CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .releaseTime(Long.valueOf(System.currentTimeMillis()))
                    .copyBizTags()
                    .copyResultTags()
                    .httpHead("Connection", "keep-alive")
                    .httpHead("Upgrade-Insecure-Requests", "1")
                    .httpHead("User-Agent", getRandomUA())
                    .httpHead("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9")
                    .httpHead("Referer", url)
                    .httpHead("Accept-Language", "zh-CN,zh;q=0.9")
                    .httpHead("Cookie", "SINAGLOBAL=180.164.54.239_1626685948.636541; name=sinaAds; post=massage; page=23333; Apache=180.164.52.239_1630392143.142991; ULV=1630392176155:4:2:2:180.164.52.239_1630392143.142991:1630392142856; UM_distinctid=17b9af31c4b3bd-0ae049c4650c0c-6950762d-144000-17b9af31c4c2f0; __gads=ID=4ed28e14974db8fe-22db5a013ccb0075:T=1630392230:RT=1630392230:S=ALNI_Ma6OpsRdt7lLkyVG6naN-tJJZq74A; FSINAGLOBAL=180.164.54.239_1626685948.636541; __gpi=00000000-0000-0000-0000-000000000000&c2luYS5jb20uY24=&Lw==; UOR=,cj.sina.com.cn,spr_auto_trackid_1e17744b130ea936:1630425828532; U_TRS1=000000ef.b1724961.612ede0c.055fc7cd; U_TRS2=000000ef.b1864961.612ede0c.16eb677e; SGUID=1630481829906_54077859; ULOGIN_IMG=gz-6864520c90be563e84c3cdaab3b59f309083; SCF=Ajdx7oujlT4HS1U9AjavkE08oZYWs9_6O_Z06xYdvqY8opS6PpV2fQOwMBZBh2RptA_eza0fm9kv2XItNsU5nvY.; SUBP=0033WrSXqPxfM725Ws9jqgMF55529P9D9WWJxB3wLxgJvq5ugU3n4Xai5NHD95Qceh.EShM71hepWs4DqcjVTCH8SCHWSF-RxbH8SCHW1F-ReCH8SCHWSFHFSEH8SC-4BEHFeFH8SE-4SE-4xBtt; historyNum=4373; NowDate=Thu Sep 23 2021 10:32:01 GMT+0800 (ä¸\u00ADå›½æ ‡å‡†æ—¶é—´); ALF=1663900418; SUB=_2A25MT5fUDeRhGeBO4lAV9y7EyD2IHXVvszmcrDV_PUJbm9AfLWPkkW1NRYLLCV2s4gAAe7AqAGIbNuaxthjtT3_z")
                    .httpHead("Cache-Control", "max-age=0")
                    .build();
            parsedLinks.add(record);
        }

    }


    //解析文章列表
    private void informationRequest(CrawlerRequestRecord crawlerRecord, HttpPage page, List<CrawlerRequestRecord> parsedLinks) {

        if (page.getRequest().getUrl().matches(InformationRegulars)) {
            String url = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("carSeriesUrl");
            String keyUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("keyUrl");
            if (keyUrl==null){
                url=page.getRequest().getUrl();
                String[] split = url.split("/");
                keyUrl =split[split.length-2];
            }
            String[] uid = url.replaceAll("page=","/").split("/");//url的关键字
            String nextPageUrl = "https://db.auto.sina.com.cn/news/" + keyUrl + "/?page=" + (Integer.parseInt(uid[uid.length-1])+1);
            CrawlerRequestRecord turnPageRequest = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .recordKey(nextPageUrl)
                    .httpUrl(nextPageUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .copyResultTags()
                    .httpHead("authority", "db.auto.sina.com.cn")
                    .httpHead("cache-control", "max-age=0")
                    .httpHead("sec-ch-ua", "\"Google Chrome\";v=\"93\", \" Not;A Brand\";v=\"99\", \"Chromium\";v=\"93\"")
                    .httpHead("sec-ch-ua-mobile", "?0")
                    .httpHead("upgrade-insecure-requests", "1")
                    .httpHead("user-agent", getRandomUA())
                    .httpHead("accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9")
                    .httpHead("sec-fetch-site", "none")
                    .httpHead("sec-fetch-mode", "navigate")
                    .httpHead("sec-fetch-user", "?1")
                    .httpHead("sec-fetch-dest", "document")
                    .httpHead("accept-language", "zh-CN,zh;q=0.9")
                    .httpHead("cookie", "SINAGLOBAL=180.164.54.239_1626685948.636541; name=sinaAds; post=massage; page=23333; Apache=180.164.52.239_1630392143.142991; ULV=1630392176155:4:2:2:180.164.52.239_1630392143.142991:1630392142856; UM_distinctid=17b9af31c4b3bd-0ae049c4650c0c-6950762d-144000-17b9af31c4c2f0; __gads=ID=4ed28e14974db8fe-22db5a013ccb0075:T=1630392230:RT=1630392230:S=ALNI_Ma6OpsRdt7lLkyVG6naN-tJJZq74A; FSINAGLOBAL=180.164.54.239_1626685948.636541; U_TRS1=000000ef.b1724961.612ede0c.055fc7cd; U_TRS2=000000ef.b1864961.612ede0c.16eb677e; SGUID=1630481829906_54077859; ULOGIN_IMG=gz-6864520c90be563e84c3cdaab3b59f309083; SCF=Ajdx7oujlT4HS1U9AjavkE08oZYWs9_6O_Z06xYdvqY8opS6PpV2fQOwMBZBh2RptA_eza0fm9kv2XItNsU5nvY.; SUBP=0033WrSXqPxfM725Ws9jqgMF55529P9D9WWJxB3wLxgJvq5ugU3n4Xai5NHD95Qceh.EShM71hepWs4DqcjVTCH8SCHWSF-RxbH8SCHW1F-ReCH8SCHWSFHFSEH8SC-4BEHFeFH8SE-4SE-4xBtt; ALF=1663900418; SUB=_2A25MT5fUDeRhGeBO4lAV9y7EyD2IHXVvszmcrDV_PUJbm9AfLWPkkW1NRYLLCV2s4gAAe7AqAGIbNuaxthjtT3_z; __gpi=00000000-0000-0000-0000-000000000000; UOR=,cj.sina.com.cn,; historyNum=4373; NowDate=Fri Oct 15 2021 16:01:17 GMT+0800 (ä¸\u00ADå›½æ ‡å‡†æ—¶é—´)")
                    .build();
            parsedLinks.add(turnPageRequest);
        }

//        明细页解析
        List<Selectable> nodes = page.getHtml().xpath("//div[@class=\"article_imgstr\"]").nodes();//所有资讯
        for (Selectable node : nodes) {
            String contentUrl = node.xpath(".//div[@class=\"article_tit newtit\"]//a/@href").get();//内容链接
            crawlerRecord.tagsCreator().bizTags().addCustomKV(AICCommonField.Field_Urls, contentUrl);
            String pubTime = node.xpath(".//span[@class=\"d\"]/text()").get();//时间

            try {
                CrawlerRequestRecord contentRecord = CrawlerRequestRecord.builder()//解析资讯
                        .itemPageRequest(crawlerRecord)
                        .recordKey(contentUrl)
                        .httpUrl(contentUrl)
                        .releaseTime(DateUtils.parseDate(pubTime, "yyyy-MM-dd").getTime())
                        .copyBizTags()
                        .copyResultTags()
                        .needWashed(true)
                        .build();
                parsedLinks.add(contentRecord);

                String[] urlId = new String[2];
                String commetUrl = null;
                if (contentUrl.matches(ContentRegulars) || contentUrl.matches(ContentFourRegulars)) {
                    urlId = contentUrl.replaceAll("https://k.sina.com.cn/article_", "").replaceAll(".html\\?from=auto", "_").trim().split("_");//url的关键字
                    commetUrl = "https://comment.sina.com.cn/page/info?channel=mp&newsid=" + urlId[0] + "-" + urlId[1];
                }
                if (contentUrl.matches(ContentTwoRegulars)) {
                    urlId = contentUrl.replaceAll("https://t.cj.sina.com.cn/articles/view/", "").replaceAll("\\?from=auto", "").trim().split("/");
                    commetUrl = "https://comment.sina.com.cn/page/info?channel=mp&newsid=" + urlId[0] + "-" + urlId[1];
                }

                if (contentUrl.matches(ContentThreeRegulars)) {
                    urlId = contentUrl.replaceAll("detail-i", " ").replaceAll(".shtml", "").trim().split(" ");
                    commetUrl = "https://comment.sina.com.cn/page/info?channel=qc&newsid=comos-" + urlId[1];

                }
//                https://comment.sina.com.cn/page/info?channel=mp&newsid=6594024191-18908d2ff00100yqq0
                    if (commetUrl != null) {
                        KVTag filterInfoTag = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
                        CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);
                        CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()//解析评论
                                .itemPageRequest(crawlerRecord)
                                .recordKey(commetUrl)
                                .httpUrl(commetUrl)
                                .releaseTime(System.currentTimeMillis())
                                .notFilterRecord()
                                .copyBizTags()
                                .copyResultTags()
                                .needWashed(true)
                                .build();
                        commentRecord.setFilter(filterInfoRecord.getFilter());
                        commentRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
                        parsedLinks.add(commentRecord);
                    }
            } catch (Exception e) {
                log.error(e.getMessage(), e);
            }
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        if (crawlerRecord.getHttpRequest().getUrl().matches(ContentRegulars) || crawlerRecord.getHttpRequest().getUrl().matches(ContentTwoRegulars) || crawlerRecord.getHttpRequest().getUrl().matches(ContentThreeRegulars) || crawlerRecord.getHttpRequest().getUrl().matches(ContentFourRegulars)) {//文章
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(article)) {
                crawlerDataList.addAll(articleWash(crawlerRecord, page));
            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(article)) {//文章链接
                crawlerDataList.addAll(articleListWash(crawlerRecord, page));
            }
        }

        if (crawlerRecord.getHttpRequest().getUrl().matches(CommentRegulars)) {//评论
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(comment)) {//评论
                crawlerDataList.addAll(commentWash(crawlerRecord, page));
            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(comment)) {//评论的回复
                crawlerDataList.addAll(replyWash(crawlerRecord, page));
            }
            if (crawlerRecord.tagsCreator().resultTags().hasDataType(interaction)) {//评论互动量
                crawlerDataList.addAll(commentInteractionWash(crawlerRecord, page));
            }
        }
        return crawlerDataList;
    }

    //清洗文章链接
    public List<CrawlerData> articleListWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        log.info("清洗链接");
        String url = page.getRequest().getUrl();//资讯url
        String[] urlId = url.replaceAll("https://k.sina.com.cn/article_", "").replaceAll(".html\\?from=auto&amp;subch=oauto", "").replaceAll(".html\\?from=auto", "").trim().split("_");//url的关键字
        try {
            long time =0;
            String releaseTime = page.getHtml().xpath("//span[@class=\"date\"]").get();//时间
            if (releaseTime==null){
                String s = page.getHtml().xpath("//span[@class=\"weibo_time_day\"]").get();
                if (s!=null){
                    String[] s1 = s.split(" ");
                    releaseTime=s1[1]+" "+s1[2];
                }
                time = DateUtils.parseDate(releaseTime, "yyyy/MM/dd HH:mm").getTime();
            }else {
                time = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime();
                if (url.matches(ContentTwoRegulars)) {
                    time = DateUtils.parseDate(releaseTime, "yyyy年MM月dd日 HH:mm").getTime();
                    urlId = url.replaceAll("https://t.cj.sina.com.cn/articles/view/", "").replaceAll(".html\\?from=auto&amp;subch=oauto", "").replaceAll("\\?from=auto", "").trim().split("/");
                }
                if (url.matches(ContentThreeRegulars)) {
                    urlId = url.replaceAll(".shtml", "/").trim().split("/");
                }
            }
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), article.enumVal(), urlId[urlId.length - 1]))
                    .resultLabelTag(article)
                    .url(url)
                    .releaseTime(time)
                    .addContentKV("releaseTimeToLong", time + "")
                    .addContentKV("articleUrl", url)
                    .addContentKV(AICCommonField.Field_Author, "资讯")
                    .flowInPipelineTag("redis")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerData);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        return crawlerArticleDataList;
    }

    //解析文章
    public List<CrawlerData> articleWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        log.info("清洗文章");
        try {
            String url = page.getRequest().getUrl();
            String title = page.getHtml().xpath("//h1/text()").get();//标题
            String authorName = page.getHtml().xpath("//a[@class=\"source ent-source\"]/text()").get();//作者名
            if (authorName == null) {
                authorName = page.getHtml().xpath("//h2[@class=\"weibo_user\"]/text()").get();
            }
            String authorUrl = page.getHtml().xpath("//a[@class=\"source ent-source\"]/@href").get();//作者url
            if (authorUrl == null) {
                authorUrl = page.getHtml().xpath("//a[@class=\"weibo_info look_info\"]/@href").get();//作者url
            }
            String authorId = Pattern.compile("[^0-9]").matcher(authorUrl).replaceAll(" ").replaceAll("\\s+", " ").trim().split(" ")[0];//url的关键字

            List<String> tag = page.getHtml().xpath("//div[@class=\"keywords\"]//a/text()").all();//标签
            StringBuffer tags = new StringBuffer();//拼接标签
            for (String text : tag) {
                tags.append(text).append(" ");
            }
            List<String> texts = page.getHtml().xpath("//div[@class=\"article clearfix\"]//p//text()").all();//文章文本
            StringBuffer conents = new StringBuffer();//将文本拼接
            for (String text : texts) {
                conents.append(text).append(" ");
            }
            List<String> picture = page.getHtml().xpath("//div[@class=\"img_wrapper\"]//img/@src").all();//图片
            StringBuffer pictures = new StringBuffer();//拼接图片
            for (String text : picture) {
                pictures.append(text).append("\0x1");
            }


            String[] urlId = url.replaceAll("https://k.sina.com.cn/article_", "").replaceAll(".html\\?from=auto&amp;subch=oauto", "").replaceAll(".html\\?from=auto", "").trim().split("_");//url的关键字
            long time =0;
            String releaseTime = page.getHtml().xpath("//span[@class=\"date\"]").get();//时间
            if (releaseTime==null){
                String s = page.getHtml().xpath("//span[@class=\"weibo_time_day\"]").get();
                if (s!=null){
                    String[] s1 = s.split(" ");
                    releaseTime=s1[1]+" "+s1[2];
                }
                time = DateUtils.parseDate(releaseTime, "yyyy/MM/dd HH:mm").getTime();
            }else {
                time = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime();
                if (url.matches(ContentTwoRegulars)) {
                    time = DateUtils.parseDate(releaseTime, "yyyy年MM月dd日 HH:mm").getTime();
                    urlId = url.replaceAll("https://t.cj.sina.com.cn/articles/view/", "").replaceAll(".html\\?from=auto&amp;subch=oauto", "").replaceAll("\\?from=auto", "").trim().split("/");
                }
                if (url.matches(ContentThreeRegulars)) {
                    urlId = url.replaceAll(".shtml", "/").trim().split("/");
                }
            }


            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), article.enumVal(), urlId[urlId.length - 1]))
                    .resultLabelTag(article)
                    .url(url)
                    .releaseTime(time)
                    .addContentKV(AICCommonField.Field_Content, conents.toString().trim())
                    .addContentKV(AICCommonField.Field_Title, title)
                    .addContentKV(AICCommonField.Field_Author, authorName)
                    .addContentKV(AICCommonField.Field_Author_Id, authorId)
                    .addContentKV(AICCommonField.Field_Images, pictures.toString().trim())
                    .addContentKV(AutoVMCommonField.Tag_Field_Topic_Type, tags.toString().trim())
                    .flowInPipelineTag("kafka")
                    .build();
            crawlerData.setFilterPipelineResult(true);
            crawlerArticleDataList.add(crawlerData);
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        return crawlerArticleDataList;
    }

    //  评论清洗
    private Collection<? extends CrawlerData> commentWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        log.info("清洗评论");
        String totalStr = page.getJson().jsonPath($_type + "..total").get();
        if (totalStr == null) {
            totalStr = "0";
        }
        int total = Integer.parseInt(totalStr);
        if (total > 0) {
            List<Selectable> nodes = page.getJson().jsonPath($_type + "..cmntlist[*]").nodes();
            if (nodes.size() > 0) {
                for (Selectable node : nodes) {
                    try {
                        String content = new Json(node.get()).jsonPath(".content").get();//评论内容
                        String releaseTime = new Json(node.get()).jsonPath(".time").get();//回复时间
                        long time = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime();
                        String user = new Json(node.get()).jsonPath(".nick").get();//评论用户
                        String userId = new Json(node.get()).jsonPath(".uid").get();//评论用户id
                        String commentId = new Json(node.get()).jsonPath(".mid").get();//评论id
//                        String urlId = new Json(node.get()).jsonPath(".newsid").get();//资讯url

                        String url = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//资讯内容url
                        String[] urlId = url.replaceAll("https://k.sina.com.cn/article_", "").replaceAll(".html\\?from=auto&amp;subch=oauto", "").replaceAll(".html\\?from=auto", "").trim().split("_");//url的关键字
                        if (url.matches(ContentTwoRegulars)) {
                            urlId = url.replaceAll("https://t.cj.sina.com.cn/articles/view/", "").replaceAll(".html\\?from=auto&amp;subch=oauto", "").replaceAll("\\?from=auto", "").trim().split("/");
                        }
                        if (url.matches(ContentThreeRegulars)) {
                            urlId = url.replaceAll(".shtml", "/").trim().split("/");
                        }
                        CrawlerData crawlerData = CrawlerData.builder()
                                .data(crawlerRecord, page)
                                .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), comment.enumVal(), commentId))
                                .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), article.enumVal(), urlId[urlId.length - 1]))
                                .resultLabelTag(comment)
                                .url(url)
                                .releaseTime(time)
                                .addContentKV(AICCommonField.Field_Content, content)
                                .addContentKV(AICCommonField.Field_Author, user)
                                .addContentKV(AICCommonField.Field_Author_Id, userId)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                .flowInPipelineTag("kafka")
                                .build();
                        crawlerData.setFilterPipelineResult(true);
                        crawlerArticleDataList.add(crawlerData);
                    } catch (Exception e) {
                        log.error(e.getMessage(), e);
                    }
                }
            }
        }
        return crawlerArticleDataList;
    }

    //  评论互动量
    private Collection<? extends CrawlerData> commentInteractionWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        log.info("清洗互动量");
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        String totalStr = page.getJson().jsonPath($_type + "..total").get();
        if (totalStr == null) {
            totalStr = "0";
        }
        int total = Integer.parseInt(totalStr);
        if (total > 0) {
            List<Selectable> nodes = page.getJson().jsonPath($_type + "..cmntlist[*]").nodes();//所有评论
            for (Selectable node : nodes) {
                try {
                    String like = new Json(node.get()).jsonPath(".rank").get();//顶
                    String releaseTime = new Json(node.get()).jsonPath(".time").get();//回复时间
                    long time = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime();
//                String userId = new Json(node.get()).jsonPath(".uid").get();//用户id
                    String commentId = new Json(node.get()).jsonPath(".mid").get();//评论id
                    String articleUrl = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//文章url

                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRecord, page)
                            .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), interaction.enumVal(), commentId))
                            .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), comment.enumVal(), commentId))
                            .resultLabelTag(interaction)
                            .url(articleUrl)
                            .releaseTime(time)
                            .addContentKV(AICCommonField.Field_I_Likes, like)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .flowInPipelineTag("kafka")
                            .build();
                    crawlerData.setFilterPipelineResult(true);
                    crawlerArticleDataList.add(crawlerData);
                } catch (Exception e) {
                    log.error(e.getMessage(), e);
                }
            }
        }
        return crawlerArticleDataList;
    }

    //  评论回复
    private Collection<? extends CrawlerData> replyWash(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        log.info("清洗回复");
        List<CrawlerData> crawlerArticleDataList = new ArrayList<>();
        String totalStr = page.getJson().jsonPath($_type + "..total").get();
        if (totalStr == null) {
            totalStr = "0";
        }
        int total = Integer.parseInt(totalStr);
        if (total > 0) {
            List<Selectable> nodes = page.getJson().jsonPath($_type + "..cmntlist[*]").nodes();
            for (Selectable node : nodes) {
                String userId2 = new Json(node.get()).jsonPath(".parent_uid").get();//原评论用户id
                if (userId2 != null && !"".equals(userId2)) {
                    try {
                        String content1 = new Json(node.get()).jsonPath(".content").get();//回复内容
                        String releaseTime = new Json(node.get()).jsonPath(".time").get();//回复时间
                        long time = DateUtils.parseDate(releaseTime, "yyyy-MM-dd HH:mm:ss").getTime();
                        String user1 = new Json(node.get()).jsonPath(".nick").get();//回复用户
                        String userId1 = new Json(node.get()).jsonPath(".uid").get();//回复用户id
                        String commentId1 = new Json(node.get()).jsonPath(".mid").get();//回复评论id
                        String commentId2 = new Json(node.get()).jsonPath(".parent").get();//原评论id
                        String commentId = commentId1 + commentId2;
//                    String content2 = new Json(node.get()).jsonPath(".Quote.RContent").get();//原评论内容
                        String user2 = new Json(node.get()).jsonPath(".parent_nick").get();//原评论用户
                        String content = "回复：" + content1;
                        String user = "回复：" + user1 + "原评论：" + user2;
                        String userId = "回复：" + userId1 + "原评论：" + userId2;
//                        String urlId = new Json(node.get()).jsonPath(".newsid").get();//资讯url

                        String url = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(AICCommonField.Field_Urls);//资讯内容url
                        String[] urlId = url.replaceAll("https://k.sina.com.cn/article_", "").replaceAll(".html\\?from=auto&amp;subch=oauto", "").replaceAll(".html\\?from=auto", "").trim().split("_");//url的关键字
                        if (url.matches(ContentTwoRegulars)) {
                            urlId = url.replaceAll("https://t.cj.sina.com.cn/articles/view/", "").replaceAll(".html\\?from=auto&amp;subch=oauto", "").replaceAll("\\?from=auto", "").trim().split("/");
                        }
                        if (url.matches(ContentThreeRegulars)) {
                            urlId = url.replaceAll(".shtml", "/").trim().split("/");
                        }
                        CrawlerData crawlerData = CrawlerData.builder()
                                .data(crawlerRecord, page)
                                .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), comment.enumVal(), commentId))
                                .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(), article.enumVal(), urlId[urlId.length - 1]))
                                .resultLabelTag(comment)
                                .url(url)
                                .releaseTime(time)
                                .addContentKV(AICCommonField.Field_Content, content)
                                .addContentKV(AICCommonField.Field_Author, user)
                                .addContentKV(AICCommonField.Field_Author_Id, userId)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                .flowInPipelineTag("kafka")
                                .build();
                        crawlerData.setFilterPipelineResult(true);
                        crawlerArticleDataList.add(crawlerData);
                    } catch (Exception e) {
                        log.error(e.getMessage(), e);
                    }
                }
            }
        }
        return crawlerArticleDataList;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {
    }

    /*
     * 重新下载
     * */
    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 50) {
                log.error("autohome download page the number of retries exceeds the limit" +
                        ",request url {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        CrawlerRequestRecord crawlerRequestRecord = null;
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey()+ count)//+ count
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .recordKey(crawlerRecord.getRecordKey()+ count)//+ count
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }

        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA() {
        return agentList.get(RandomUtils.nextInt(0, agentList.size() - 1));
    }

}
