package com.chance.cc.crawler.development.scripts.baidu;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.CategoryTag;
import com.chance.cc.crawler.core.tags.KVTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.text.ParseException;
import java.util.*;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;

/**
 * @author lt
 * @version 1.0
 * @date 2021-04-13 16:47:52
 * @email okprog@sina.com
 */
public class BaiDuYDZXCrawlerScript extends CrawlerCommonScript {

    private static Logger logger = LoggerFactory.getLogger(BaiDuYDZXCrawlerScript.class);

    public static final String domain = "baidu";
    private static final String scriptSite = "yidianzixun";

    public static final String indexRegex = "https://www\\.baidu\\.com/";
    public static final String keysRegex = "https?://\\S*v1/meta/" + domain + "/keys\\S*";
    public static final String searchKwListUrlRegex = "https://www\\.baidu\\.com/s.*";
    public static final String BaiDuItemUrlRegex = "https?://www\\.baidu\\.com/link\\S*";
    public static final String commentUrlRegex = "https://coral\\.qq\\.com/article/\\d*/comment/v2\\S*";

    public static final String searchKwListUrlFormat = "https://www.baidu.com/s?wd=#keyword&oq=#keyword&pn=#pn&ct=2097152&si=yidianzixun.com&gpc=stf=#sTime%2C#eTime%7Cstftype=2";
    public static final String commentUrlFormat = "https://coral.qq.com/article/%s/comment/v2?orinum=20&oriorder=u&pageflag=%s&scorecursor=0&source=73";

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();
        if (supportSourceRecords == null || supportSourceRecords.size() < 1) {
            return super.prepareRequest(requestRecord, supportSourceRecords);
        }
        CrawlerRequestRecord keywordRecord = supportSourceRecords.get(0);
        String keywordUrl = keywordRecord.getHttpRequest().getUrl();
        if (keywordUrl.matches(keysRegex)) {
            try {
                int hour = 0;
                List<FilterInfo> filterInfos = requestRecord.getFilterInfos();
                for (FilterInfo filterInfo : filterInfos) {
                    if (filterInfo.getFilter().equals(CrawlerEnum.CrawlerRecordFilter.dateRange)){
                        hour = filterInfo.getHourFromNow();
                    }
                }
                if (hour == 0){
                    logger.error("get hour from now is empty, can not init search url");
                    return super.prepareRequest(requestRecord, supportSourceRecords);
                }
                long timeMillis = System.currentTimeMillis();
                long sTimeMillis = timeMillis - (hour * 60 * 60 * 1000);
                String startTime = String.valueOf(sTimeMillis).substring(0,10);
                String endTime = String.valueOf(timeMillis).substring(0,10);
                JSONObject jsonObject = JSONObject.parseObject(keywordRecord.getInternalDownloadPage().getRawText());
                if (jsonObject.getIntValue("status") == 0) {
                    JSONArray objects = jsonObject.getJSONArray("content");
                    for (Object object : objects) {
                        String keyword = ((JSONObject) object).getString("keyword");
                        String listUrl = searchKwListUrlFormat.replaceAll("#keyword",keyword).replace("#pn","0").replace("#sTime",startTime).replace("#eTime",endTime);
                        CrawlerRequestRecord listRecord = CrawlerRequestRecord.builder()
                                .turnPageRequest(requestRecord)
                                .httpUrl(listUrl)
                                .recordKey(listUrl)
                                .releaseTime(System.currentTimeMillis())
                                .notFilterRecord()
                                .copyBizTags()
                                .build();
                        listRecord.tagsCreator().bizTags().addKeywords(keyword);
                        listRecord.getHttpRequest().addExtra("startTime",startTime);
                        listRecord.getHttpRequest().addExtra("endTime",endTime);
                        listRecord.getHttpRequest().addHeader("Host","www.baidu.com");
                        listRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());

                        allItemRecords.add(listRecord);
                    }
                }
            } catch (Exception e) {
                logger.error("from keywords init urls failed");
                logger.error(e.getMessage(), e);
            }
        }
        if (allItemRecords.isEmpty()) {
            return super.prepareRequest(requestRecord, supportSourceRecords);
        }
        return allItemRecords;
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<>();
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        if (404 == httpPage.getStatusCode()){
            logger.info("status code is 404");
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }
        if (doHttpPageCheck(crawlerRequestRecord,httpPage)){
            CrawlerBusinessTags crawlerBusinessTags = crawlerRequestRecord.tagsCreator().bizTags();
            String downloadTimes = crawlerBusinessTags.getCategoryTag().getKVTagStrVal("downloadTimes");
            if (StringUtils.isBlank(downloadTimes)){
                crawlerBusinessTags.addCustomKV("downloadTimes",1);
            }else {
                int times = Integer.parseInt(downloadTimes);
                crawlerBusinessTags.addCustomKV("downloadTimes",times + 1);
                if (times > 10){
                    logger.error("link download too many times");
                    return parsedLinks;
                }
            }
            crawlerRequestRecord.setNeedWashPage(false);
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
            parsedLinks.add(crawlerRequestRecord);
            return parsedLinks;
        }
        crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove("downloadTimes");

        String lastRequestUrl = lastRequest.getUrl();
        if (lastRequestUrl.matches(searchKwListUrlRegex)){
            return parseListLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(BaiDuItemUrlRegex)){
            return parseItemLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        if (lastRequestUrl.matches(commentUrlRegex)){
            return parseCommentLinks(crawlerRequestRecord,httpPage,parsedLinks);
        }
        return null;
    }

    private List<CrawlerRequestRecord> parseCommentLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String httpRequestUrl = httpRequest.getUrl();
        Map<String, Object> urlParams = getUrlParams(httpRequestUrl);
        String rawText = httpPage.getRawText();
        JSONObject pageObj = JSONObject.parseObject(rawText);
        try {
            JSONObject dataObj = pageObj.getJSONObject("data");
            int commentCount = dataObj.getIntValue("oriretnum");
            if (commentCount > 0 && null != urlParams){
                crawlerRequestRecord.setNeedWashPage(true);
                JSONArray jsonArray = dataObj.getJSONArray("oriCommList");
                if (jsonArray.size() >= 20){
                    int pageflag = Integer.parseInt((String) urlParams.get("pageflag"));
                    pageflag += 20;
                    String commentId = dataObj.getString("targetid");
                    String commentUrl = String.format(commentUrlFormat,commentId,pageflag);
                    CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(crawlerRequestRecord)
                            .httpUrl(commentUrl)
                            .recordKey(commentUrl)
                            .releaseTime(System.currentTimeMillis())
                            .copyBizTags()
                            .resultLabelTag(comment)
                            .resultLabelTag(interaction)
                            .build();
                    commentRecord.getHttpRequest().setExtras(copyExtras(httpRequest.getExtras()));
                    parsedLinks.add(commentRecord);
                }
            }
        } catch (Exception e) {
            logger.error(e.getMessage());
        }

        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseItemLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        crawlerRequestRecord.setNeedWashPage(true);
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        String httpRequestUrl = httpRequest.getUrl();
        String rawText = httpPage.getRawText();
        if (rawText.contains("内容被删除")){
            logger.info("page {} is 404 not found", httpRequestUrl);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }
        Html html = httpPage.getHtml();
        String articleKey = html.xpath("//meta[@name=\"article-id\"]/@content").get();
        String commentId = html.xpath("//div[@id=\"commentData\"]/@targetid").get();
        httpRequest.addExtra("articleKey",articleKey);
        httpRequest.addExtra("articleUrl",httpRequestUrl);

        String commentUrl = String.format(commentUrlFormat,commentId,1);
        CrawlerRequestRecord commentsRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRequestRecord)
                .httpUrl(commentUrl)
                .recordKey(commentUrl)
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        parsedLinks.add(commentsRecord);

        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().scheduleTags().getCategoryTag();
        if (categoryTag.getLabelTag(CrawlerEnum.CrawlerDataType.comment.enumVal()) != null) {
            if (!crawlerRequestRecord.tagsCreator().bizTags().hasKVTag("comment_record_filter_info")) {
                logger.error("qqhao crawler comment need to filter information!");
                return parsedLinks;
            }

            KVTag filterInfoTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTag("comment_record_filter_info");
            CrawlerRecord filterInfoRecord = JSON.parseObject((String) filterInfoTag.getVal(), CrawlerRecord.class);
            CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(commentUrl)
                    .recordKey(commentUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .notFilterRecord()
                    .resultLabelTag(comment)
                    .resultLabelTag(interaction)
                    .build();

            commentRecord.setFilter(filterInfoRecord.getFilter());
            commentRecord.setFilterInfos(filterInfoRecord.getFilterInfos());
            HttpRequest commentRequest = commentRecord.getHttpRequest();
            commentRequest.addExtra("articleKey",articleKey);
            commentRequest.addExtra("commentId",commentId);
            commentRequest.addExtra("articleUrl",httpRequestUrl);
            parsedLinks.add(commentRecord);
        }

        return parsedLinks;
    }

    private List<CrawlerRequestRecord> parseListLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String httpRequestUrl = httpRequest.getUrl();

        Map<String, Object> urlParams = getUrlParams(httpRequestUrl);
        Html html = httpPage.getHtml();
        List<Selectable> itemNodes = html.xpath("//div[@id=\"content_left\"]/div[contains(@class,\"result\")]").nodes();
        if (null != urlParams && itemNodes.size() > 0){
            String keyword = (String) urlParams.get("wd");
            String startTime = (String) extras.get("startTime");
            String endTime = (String) extras.get("endTime");
            int pn = Integer.parseInt((String) urlParams.get("pn"));
            pn += 10;
            String listUrl = searchKwListUrlFormat.replaceAll("#keyword",keyword).replace("#pn",String.valueOf(pn)).replace("#sTime",startTime).replace("#eTime",endTime);

            CrawlerRequestRecord listRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRequestRecord)
                    .httpUrl(listUrl)
                    .recordKey(listUrl)
                    .releaseTime(System.currentTimeMillis())
                    .copyBizTags()
                    .build();
            listRecord.getHttpRequest().setExtras(extras);
            parsedLinks.add(listRecord);
        }
        for (Selectable itemNode : itemNodes) {
            String itemUrl = itemNode.xpath("./h3/a/@href").get();
            String timeStr = itemNode.xpath("./div[@class=\"c-abstract\"]/span/text()").get();
            long releaseTime = 0;
            try {
                releaseTime = cleanTime(unescapeHtml2J(timeStr), "yyyy年MM月dd日");
            } catch (ParseException e) {
                logger.error("parse item date error : {}",timeStr);
                continue;
            }
            CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRequestRecord)
                    .httpUrl(itemUrl)
                    .recordKey(itemUrl)
                    .releaseTime(releaseTime)
                    .copyBizTags()
                    .needParsed(true)
                    .needWashed(true)
                    .resultLabelTag(article)
                    .resultLabelTag(interaction)
                    .build();
            parsedLinks.add(itemRecord);
        }
        return parsedLinks;
    }

    @Override
    public void afterInternalDownload(CrawlerRequestRecord crawlerRecord, List<CrawlerRequestRecord> internalDownloadRecords, List<CrawlerRequestRecord> links) {
        for (CrawlerRequestRecord internalDownloadRecord : internalDownloadRecords) {
            HttpRequest httpRequest = internalDownloadRecord.getHttpRequest();
            String httpRequestUrl = httpRequest.getUrl();
            HttpPage internalDownloadPage = internalDownloadRecord.getInternalDownloadPage();
            if (!internalDownloadPage.isDownloadSuccess() || internalDownloadPage.getStatusCode() != 200){
                crawlerRecord.setNeedWashPage(false);
                crawlerRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
                links.add(crawlerRecord);
                break;
            }
            if (httpRequestUrl.matches(commentUrlRegex)){
                try {
                    String rawText = internalDownloadPage.getRawText();
                    JSONObject pageObj = JSONObject.parseObject(rawText);
                    JSONObject dataObj = pageObj.getJSONObject("data");
                    String comments = dataObj.getString("oriretnum");
                    crawlerRecord.getHttpRequest().addExtra("comments",comments);
                } catch (Exception e) {
                    crawlerRecord.getHttpRequest().addExtra("comments","0");
                }
            }

        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
        Map<String, Object> extras = httpRequest.getExtras();
        String requestUrl = httpRequest.getUrl();
        String topicId = (String) extras.get("articleKey");
        String rawText = httpPage.getRawText();
        if (StringUtils.isBlank(rawText) || rawText.contains("内容被删除")){
            logger.error("httpPage is empty !");
            return crawlerDataList;
        }
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(article)){
            Html html = httpPage.getHtml();
            String title = html.xpath("//h1[@class=\"title\"]/text()").get();
            String author = html.xpath("//span[@class=\"author\"]/text()").get();
            List<String> contents = html.xpath("//section/p//text()").all();
            List<String> images = html.xpath("//section//img/@src").all();
            StringBuffer sbContent = new StringBuffer();
            for (String content : contents) {
                sbContent.append(content.trim());
            }
            StringBuffer sbImage = new StringBuffer();
            for (String image : images) {
                sbImage.append(image).append("\\x01");
            }
            String comments = (String) extras.get("comments");
            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord,httpPage)
                    .url(requestUrl)
                    .dataId(StringUtils.joinWith("-",domain(),site,article.enumVal(),topicId))
                    .releaseTime(crawlerRequestRecord.getReleaseTime())
                    .addContentKV(Field_Title,unescapeHtml2J(title))
                    .addContentKV(Field_Author,author)
                    .addContentKV(Field_Content,sbContent.toString())
                    .addContentKV(Field_Images,sbImage.toString())
                    .resultLabelTag(article)
                    .build();
            crawlerDataList.add(crawlerData);
            if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
                CrawlerData crawlerInteractionData = CrawlerData.builder()
                        .data(crawlerRequestRecord,httpPage)
                        .url(requestUrl)
                        .dataId(StringUtils.joinWith("-",domain(),site,interaction.enumVal(),topicId))
                        .parentId(StringUtils.joinWith("-",domain(),site,article.enumVal(),topicId))
                        .releaseTime(crawlerRequestRecord.getReleaseTime())
                        .addContentKV(Field_I_Comments,comments)
                        .resultLabelTag(interaction)
                        .build();
                crawlerDataList.add(crawlerInteractionData);
            }

        }
        if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(comment)){
            String articleUrl = (String)extras.get("articleUrl");
            try {
                JSONObject pageObj = JSONObject.parseObject(rawText);
                JSONObject dataObj = pageObj.getJSONObject("data");
                JSONArray commentList = dataObj.getJSONArray("oriCommList");
                JSONObject userList = dataObj.getJSONObject("userList");
                for (Object cmt : commentList) {
                    JSONObject cmtObj = (JSONObject)cmt;
                    String commentId = cmtObj.getString("id");
                    String authorId = cmtObj.getString("userid");
                    String author = userList.getJSONObject(authorId).getString("nick");
                    String content = cmtObj.getString("content");
                    String likes = cmtObj.getString("up");
                    String createTime = cmtObj.getString("time");
                    long releaseTime = new Long(String.format("%s%s",createTime,"000"));
                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRequestRecord,httpPage)
                            .url(articleUrl)
                            .dataId(StringUtils.joinWith("-",domain(),site,comment.enumVal(),commentId))
                            .parentId(StringUtils.joinWith("-",domain(),site,article.enumVal(),topicId))
                            .releaseTime(releaseTime)
                            .addContentKV(Field_Author,author)
                            .addContentKV(Field_Author_Id,authorId)
                            .addContentKV(Field_Content,content)
                            .resultLabelTag(comment)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .build();
                    crawlerDataList.add(crawlerData);
                    if (crawlerRequestRecord.tagsCreator().resultTags().hasDataType(interaction)){
                        CrawlerData crawlerInteractionData = CrawlerData.builder()
                                .data(crawlerRequestRecord,httpPage)
                                .url(articleUrl)
                                .dataId(StringUtils.joinWith("-",domain(),site,interaction.enumVal(),commentId))
                                .parentId(StringUtils.joinWith("-",domain(),site,comment.enumVal(),commentId))
                                .releaseTime(releaseTime)
                                .addContentKV(Field_I_Likes,likes)
                                .resultLabelTag(interaction)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                .build();
                        crawlerDataList.add(crawlerInteractionData);
                    }
                }
            } catch (Exception e) {
                logger.error("parse comment json object failed ,{}",articleUrl);
            }
        }
        return crawlerDataList;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(indexRegex);
        addUrlRegular(keysRegex);
        addUrlRegular(searchKwListUrlRegex);
        addUrlRegular(BaiDuItemUrlRegex);
        addUrlRegular(commentUrlRegex);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        CategoryTag categoryTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag();
        String crawlerSite = categoryTag.getKVTagStrVal("site");
        return crawlerSite.equalsIgnoreCase(scriptSite);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return domain;
    }


    /**
     * 检查页面下载是否成功、完整
     *
     * @param crawlerRequestRecord last record
     * @param httpPage             page
     * @return boolean for page check
     */
    private boolean doHttpPageCheck(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        HttpRequest lastRequest = crawlerRequestRecord.getHttpRequest();
        String lastRequestUrl = lastRequest.getUrl();
        int statusCode = httpPage.getStatusCode();
        if (statusCode != 200) {
            logger.error("download page {} error, status code is {}", lastRequestUrl, statusCode);
            return true;
        }
        if (!httpPage.isDownloadSuccess()) {
            logger.error("download page failed, check your link {}", lastRequestUrl);
            return true;
        }
        if (StringUtils.isBlank(httpPage.getRawText())) {
            logger.error("download page empty, check your link {}", lastRequestUrl);
            return true;
        }
        return false;
    }

    /**
     * 将url参数转换成map
     *
     * @param url http://*.*.com?aa=11&bb=22&cc=33
     * @return map
     */
    private Map<String, Object> getUrlParams(String url) {
        Map<String, Object> map = new HashMap<String, Object>(0);
        String param = url;
        if (url.contains("?")) {
            param = url.split("\\?")[1];
        }
        if (StringUtils.isBlank(param)) {
            return null;
        }
        String[] params = param.split("&");
        for (String s : params) {
            String[] p = s.split("=");
            if (p.length == 2) {
                map.put(p[0], p[1]);
            }
        }
        return map;
    }

    public static Map<String, Object> copyExtras(Map<String, Object> inExtras) {
        Map<String, Object> extras = new HashMap<>();
        for (Map.Entry<String, Object> entry : inExtras.entrySet()) {
            extras.put(entry.getKey(), entry.getValue());
        }
        return extras;
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA() {
        return agentList.get(RandomUtils.nextInt(0, agentList.size() - 1));
    }

    public static String unescapeHtml2J(String str) {
        int times = 0;
        while (str.contains("&") && str.contains(";")) {
            str = StringEscapeUtils.unescapeHtml(str);
            times++;
            if (times > 5) {
                break;
            }
        }
        return str;
    }

    private long cleanTime(String timeStr,String pattern) throws ParseException {
        if (timeStr.contains("发布")){
            timeStr = timeStr.split("：")[1];
        }
        if (timeStr.contains("刚刚") || timeStr.contains("今天") || timeStr.contains("1天内")){
            return System.currentTimeMillis() - 5 * 60 * 1000;
        }else if (timeStr.endsWith("前")){
            return timeBefore(timeStr);
        }else if (timeStr.contains("昨天") || timeStr.contains("前天")){
            int amount = 0;
            if (timeStr.startsWith("昨")){
                amount = -1;
            }
            if (timeStr.startsWith("前")){
                amount = -2;
            }
            try {
                String time = timeStr.split("天")[1];
                int hour = Integer.parseInt(time.split(":")[0]);
                int minute = Integer.parseInt(time.split(":")[1]);
                Calendar calendar = Calendar.getInstance();
                calendar.add(Calendar.DATE, amount);
                calendar.set(Calendar.HOUR_OF_DAY,hour);
                calendar.set(Calendar.MINUTE,minute);
                return calendar.getTimeInMillis();
            } catch (NumberFormatException e) {
                Calendar calendar = Calendar.getInstance();
                calendar.add(Calendar.DATE,-1);
                return calendar.getTimeInMillis();
            }
        }else if (timeStr.matches("\\d*月\\d*日")){
            String[] strings = timeStr.split("月");
            int month = Integer.parseInt(strings[0]);
            int day = Integer.parseInt(strings[1].split("日")[0]);
            Calendar calendar = Calendar.getInstance();
            calendar.set(Calendar.MONTH,month - 1);
            calendar.set(Calendar.DAY_OF_MONTH,day);
            return calendar.getTimeInMillis();

        }else if (timeStr.matches("\\d*年\\d*月\\d*日")){
            return DateUtils.parseDate(timeStr,"yyyy年MM月dd日").getTime();
        }else if (timeStr.contains(":")){
            return DateUtils.parseDate(timeStr,pattern).getTime();
        }else {
            return 0;
        }

    }

    private static final long ONE_SECOND = 1000L;
    private static final long ONE_MINUTE = 60000L;
    private static final long ONE_HOUR = 3600000L;
    private static final long ONE_DAY = 86400000L;

    private long timeBefore(String timeStr) {
        if (timeStr.matches("\\d*天前")){
            int timeNum = Integer.parseInt(timeStr.split("天")[0]);
            return System.currentTimeMillis() - (timeNum * ONE_DAY);

        }else if (timeStr.matches("\\d*秒前")){
            int timeNum = Integer.parseInt(timeStr.split("秒")[0]);
            return System.currentTimeMillis() - (timeNum * ONE_SECOND);

        }else if (timeStr.matches("\\d*分钟前")){
            int timeNum = Integer.parseInt(timeStr.split("分钟")[0]);
            return System.currentTimeMillis() - (timeNum * ONE_MINUTE);

        }else if (timeStr.matches("\\d*分钟\\d*秒前")) {
            String[] split = timeStr.split("分钟");
            int minutes = Integer.parseInt(split[0]);
            int seconds = Integer.parseInt(split[1].split("秒")[0]);
            long times = (minutes * ONE_MINUTE) + (seconds * ONE_SECOND);
            return System.currentTimeMillis() - times;

        }else if (timeStr.matches("\\d*小时前")){
            int timeNum = Integer.parseInt(timeStr.split("小时")[0]);
            return System.currentTimeMillis() - (timeNum * ONE_HOUR);

        }else if (timeStr.matches("\\d*小时\\d*分钟前")){
            String[] split = timeStr.split("小时");
            int hours = Integer.parseInt(split[0]);
            int minutes = Integer.parseInt(split[1].split("分钟")[0]);
            long times = (hours * ONE_HOUR) + (minutes * ONE_MINUTE);
            return System.currentTimeMillis() - times;

        }else if (timeStr.matches("\\d*周前")){
            int timeNum = Integer.parseInt(timeStr.split("周")[0]);
            return System.currentTimeMillis() - (timeNum * ONE_DAY * 7);

        }else if (timeStr.matches("\\d*个月前")){
            int timeNum = Integer.parseInt(timeStr.split("个")[0]);
            return System.currentTimeMillis() - (timeNum * ONE_DAY * 31);
        }else {
            return 0;

        }
    }


    @Test
    public void test() throws ParseException {
        String date = "2021年3月21日 14时2分5秒";
        long time = DateUtils.parseDate(date, "yyyy年MM月dd日 HH时mm分ss秒").getTime();
        System.out.println(time);
    }
}
