package com.chance.cc.crawler.development.scripts.xiaohongshu;

import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.proxy.Proxy;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.security.NoSuchAlgorithmException;
import java.text.ParseException;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.turnPage;
import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.turnPageItem;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Field_Keword;
import static org.codehaus.groovy.runtime.EncodingGroovyMethods.md5;

/**
 * @author bx
 * @date 2021/1/8 0008 12:11
 */
public class XHSArticleCrawlerScript extends CrawlerCommonScript {

    private static final Logger logger = LoggerFactory.getLogger(XHSArticleCrawlerScript.class);

    public static final String site= "xhs-article";
    public static final String site_biz= "search";

    public static final String searchStartUrl = "https://start/api/sns/v10/search/notes";
    public static final String appSearchForTimeDescendingUrlRegular = "https://\\S*/api/sns/v10/search/notes\\?\\S*sort=time_descending\\S*";

    public static final String wxappArticleUrlRegular = "https://\\S*/fe_api/burdock/weixin/v2/note/\\S*/single_feed";
    public static final String wxappCommentRealTimeUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/note/realTime/%s";
    public static final String wxappArticleUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/note/%s/single_feed";
    public static final String wxappArticleBannerUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/banner?noteId=%s&platform=weixin";
    public static final String wxappArticleRelatedUrlFormat = "https://www.xiaohongshu.com/fe_api/burdock/weixin/v2/note/%s/related?openId=%s&pageSize=8&page=1&noteType=1&needGifCover=true";
    public static final String wxappArticleActivityBannerUrlFormat = "https://pages.xiaohongshu.com/data/sns/mp_activity_banner?sid=%s";

    private LinkedBlockingQueue<String> authorInfos;
    private Set<String> customKeywords;
    private Object lock = new Object();
    private Object keyLock = new Object();

    public static final String headerReferer = "referer";
    public static final String headerAuthorization = "authorization";
    public static final String headerDeviceFingerprint = "device-fingerprint";
    public static final String authorOpenId = "openId";
    public static final String authorSid = "sid";
    public static final String authorType = "type"; //类型

    private static Proxy proxy = new Proxy();

    static {
        //代理配置
        proxy.setHost("http-dyn.abuyun.com");
        proxy.setPort(9020);
        proxy.setUsername("HL89Q19E86E2987D");
        proxy.setPassword("71F33D94CE5F7BF2");
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {

        List<CrawlerRequestRecord> crawlerRequestRecords = new ArrayList<>();

        if (!page.isDownloadSuccess()){
            logger.error("xhs article request download has error, status code {},error info [{}] ,will retry",
                    page.getStatusCode(),page.getRawText());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        if (isUrlMatch(page.getRequest().getUrl(),wxappArticleUrlRegular)){
            parseArticleLinks(crawlerRequestRecords,crawlerRecord,page);
        }

        return crawlerRequestRecords;
    }

    private void parseArticleLinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        try {
            Json articleResultJson = new Json(page.getRawText());

            if (isReturn(articleResultJson,page)){
                crawlerRecord.setNeedWashPage(false);
                return;
            }

            String dataId = articleResultJson.jsonPath($_type + ".data.id").get();
            String image = articleResultJson.jsonPath($_type + ".data.user.image").get();

            //模拟真实操作流程，防止跳出滑动验证
            internalDownloadBanner(crawlerRequestRecords,crawlerRecord,dataId);
            internalDownloadRelated(crawlerRequestRecords,crawlerRecord,dataId);
            internalDownloadActivityBanner(crawlerRequestRecords,crawlerRecord,dataId);
            internalDownloadCommon(crawlerRequestRecords,crawlerRecord,image);

        } catch (Exception e) {
            logger.error("xhs article detail download error,header oauthor info {}, thread local author info {},request url: {}," +
                            "page rawText {},error info :{},will retry",
                    page.getRequest().getHeaders().get(headerAuthorization),getLocalAuthInfo(),page.getRequest().getUrl(),page.getRawText(),e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false);
        }
    }

    private boolean isReturn(Json articleResultJson,HttpPage page){

        String code = articleResultJson.jsonPath($_type + ".code").get();
        if (!"0".equals(code)){
            String msg = articleResultJson.jsonPath($_type + ".msg").get();
            if(!"Spam".equals(msg) && !"登录已过期".equals(msg) && !msg.contains("封号")){
                logger.warn("xhs note url {} download error: {}",page.getRequest().getUrl(),msg);
                return true;
            }
        }
        return false;
    }

    private void internalDownloadBanner(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String dataId) throws NoSuchAlgorithmException {
        String url = String.format(wxappArticleBannerUrlFormat,dataId);
        internalDownload(crawlerRequestRecords,crawlerRecord,url);
    }

    private void internalDownloadRelated(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String dataId) throws NoSuchAlgorithmException {
        String oauthInfo = fillThreadLocalAuthorInfo();
        Map oauthInfoMap = new Json(oauthInfo).toObject(Map.class);
        String url = String.format(wxappArticleRelatedUrlFormat,dataId,oauthInfoMap.get(authorOpenId).toString());
        internalDownload(crawlerRequestRecords,crawlerRecord,url);
    }

    private void internalDownloadActivityBanner(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String dataId) throws NoSuchAlgorithmException {
        String oauthInfo = fillThreadLocalAuthorInfo();
        Map oauthInfoMap = new Json(oauthInfo).toObject(Map.class);
        String url = String.format(wxappArticleActivityBannerUrlFormat,oauthInfoMap.get(authorSid).toString());
        CrawlerRequestRecord commentCountRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        crawlerRequestRecords.add(commentCountRecord);
    }

    private void internalDownloadCommon(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String url) throws NoSuchAlgorithmException {
        if (StringUtils.isNotBlank(url)){
            CrawlerRequestRecord commentCountRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .releaseTime(System.currentTimeMillis())
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .build();
            commentCountRecord.getHttpConfig().setResponseTextGenerateHtml(false);
            crawlerRequestRecords.add(commentCountRecord);
        }
    }

    private void internalDownload(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String url) throws NoSuchAlgorithmException {
        String urlPath = url.substring(url.indexOf("/fe_api/"));

        String x_sign = "X" + md5(urlPath + "WSUDD");
        CrawlerRequestRecord commentCountRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(url)
                .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                .httpHead("x-sign",x_sign)
                .httpHead("content-type","application/json")
                .releaseTime(System.currentTimeMillis())
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                .build();
        crawlerRequestRecords.add(commentCountRecord);

    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDatas = new ArrayList<>();
        if (isUrlMatch(page.getRequest().getUrl(),wxappArticleUrlRegular)){
            washArticle(crawlerDatas,crawlerRecord,page);
        }
        return crawlerDatas;
    }

    private void washArticle(List<CrawlerData> crawlerDatas,CrawlerRequestRecord crawlerRecord, HttpPage page){
        Json articleJson = new Json(page.getRawText());
        // article
        String releaseTime = articleJson.jsonPath($_type + ".data.time").get();
        String title = articleJson.jsonPath($_type + ".data.title").get();
        String content = articleJson.jsonPath($_type + ".data.desc").get();
        String contentId = articleJson.jsonPath($_type + ".data.id").get();
        String article_url = "https://www.xiaohongshu.com/discovery/item/"+contentId;
        String keyword = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal(Tag_Field_Keword);

        //user
        String author = articleJson.jsonPath($_type + ".data.user.nickname").get();
        String authorId = articleJson.jsonPath($_type + ".data.user.id").get();
        String redId = articleJson.jsonPath($_type + ".data.user.red_id").get();
        String location = articleJson.jsonPath($_type + ".data.user.location").get();
        String desc = articleJson.jsonPath($_type + ".data.user.desc").get();
        String fans = articleJson.jsonPath($_type + ".data.user.fans").get();
        String follows = articleJson.jsonPath($_type + ".data.user.follows").get();
        String collected = articleJson.jsonPath($_type + ".data.user.collected").get();
        String liked = articleJson.jsonPath($_type + ".data.user.liked").get();
        String gender = articleJson.jsonPath($_type + ".data.user.gender").get();
        String notes = articleJson.jsonPath($_type + ".data.user.notes").get();
        String profile_url = "https://www.xiaohongshu.com/user/profile/"+authorId;

        //interaction
        String like = articleJson.jsonPath($_type + ".data.likes").get();
        String collects = articleJson.jsonPath($_type + ".data.collects").get();
        String shareCount = articleJson.jsonPath($_type + ".data.shareCount").get();
        String comments = articleJson.jsonPath($_type + ".data.comments").get();

        try {
            CrawlerData crawlerArticleData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), contentId))
                    .releaseTime(dateToTimestamp(releaseTime))
                    .content(page.getRawText())
                    .url(article_url)
                    .resultLabelTag(article)
//                    .requestLabelTag(result)
//                    .requestLabelTag(filter)
                    .flowInPipelineTag("article_result")
                    .isEndTimeFromSchedulerTime(false)
                    .build();
            crawlerArticleData.setFilterPipelineResult(true);
            crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Keword,keyword);
            crawlerArticleData.tagsCreator().bizTags().addDomain(domain());
            crawlerArticleData.tagsCreator().bizTags().addSite(site);
            crawlerArticleData.tagsCreator().bizTags().addSiteBiz(site_biz);
            crawlerDatas.add(crawlerArticleData);

//            CrawlerData crawlerArticleInteractionData = CrawlerData.builder()
//                    .data(crawlerRecord, page)
//                    .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, interaction.enumVal(), contentId))
//                    .parentId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), contentId))
//                    .releaseTime(dateToTimestamp(releaseTime))
//                    .addContentKV(Field_I_Likes,like)
//                    .addContentKV(Field_I_Collection,collects)
//                    .addContentKV(Field_I_Forwards,shareCount)
//                    .addContentKV(Field_I_Comments,comments)
//                    .url(article_url)
//                    .resultLabelTag(interaction)
//                    .isEndTimeFromSchedulerTime(false)
//                    .flowInPipelineTag("article_result")
//                    .build();
//            crawlerArticleInteractionData.setFilterPipelineResult(true);
//            crawlerArticleData.tagsCreator().bizTags().addDomain(domain());
//            crawlerArticleData.tagsCreator().bizTags().addSite(site);
//            crawlerArticleData.tagsCreator().bizTags().addSiteBiz(site_biz);
//            crawlerDatas.add(crawlerArticleInteractionData);

            //主贴 评论数 -- 累积
            String time = DateFormatUtils.format(dateToTimestamp(releaseTime), TimeForamtEnum.format6.getFormat());
            CrawlerData crawlerArticleAccumulateData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", time,crawlerRecord.getDomain(), article.enumVal(), contentId))
                    .releaseTime(dateToTimestamp(releaseTime))
                    .addContentKV("currentComments",comments)
                    .addContentKV("articleId",contentId)
                    .addContentKV("releaseTime",releaseTime)
                    .url(article_url)
                    .resultLabelTag(article)
                    .flowInPipelineTag("article_accumulate")
                    .build();
            crawlerArticleAccumulateData.setFilterPipelineResult(true);
            crawlerDatas.add(crawlerArticleAccumulateData);

            //主贴 时间 -- 累积
            CrawlerData crawlerArticleTimeData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(contentId)
                    .releaseTime(dateToTimestamp(releaseTime))
                    .addContentKV("releaseTime",releaseTime)
                    .url(article_url)
                    .resultLabelTag(article)
                    .flowInPipelineTag("article_time")
                    .build();
            crawlerArticleTimeData.setFilterPipelineResult(true);
            crawlerDatas.add(crawlerArticleTimeData);

            // comment record -- list
            CrawlerData crawlerArticleIdListData = CrawlerData.builder()
                    .data(crawlerRecord, page)
                    .dataId(StringUtils.joinWith("-", time,crawlerRecord.getDomain(), article.enumVal(), contentId))
                    .releaseTime(dateToTimestamp(releaseTime))
                    .addContentKV("articleId",contentId)
                    .url(article_url)
                    .resultLabelTag(article)
                    .flowInPipelineTag("article_ids")
                    .build();
            crawlerArticleIdListData.setFilterPipelineResult(true);
            crawlerDatas.add(crawlerArticleIdListData);


        } catch (Exception e) {
            logger.error("xhs article wash data {} has error {}",page.getRawText(),e.getMessage());
        }
    }


    @Override
    public String domain() {
        return "xhs";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(searchStartUrl); //start url--只是为了进入脚本没有实际意义
        addUrlRegular(appSearchForTimeDescendingUrlRegular); //app 查询接口
        addUrlRegular(wxappArticleUrlRegular); //微信小程序文章入口
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {

        String recordSite = crawlerRecord.tagsCreator().bizTags().site();
        if (site.equals(recordSite)){
            return true;
        }
        return false;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {

        if(supportSourceRecords != null && supportSourceRecords.size() > 0 ){
            for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
                if (supportSourceRecord.getHttpRequest().getUrl().contains("/crawler/oauth/api/v1/xhs/userOauthInfos")){
                    if (authorInfos == null || authorInfos.size() < 1){
                        synchronized (lock){
                            if (authorInfos == null || authorInfos.size() < 1){
                                authorInfos = new LinkedBlockingQueue<>();
                                initAuthorInfos(supportSourceRecord); //初始 认证信息
                            }
                        }
                    }
                }
            }
        }

        downloadSleep();

        String authorInfo = fillThreadLocalAuthorInfo(); //填充认证信息
        fillCurrentRecordRequestHeader(requestRecord,authorInfo);
        logger.info("xhs current thread local author info {}",authorInfo);
        requestRecord.getHttpConfig().setResponseTextGenerateHtml(false);
        return super.prepareRequest(requestRecord, supportSourceRecords);
    }

    private void initAuthorInfos(CrawlerRequestRecord supportSourceRecord){
        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            Json rawText = new Json(httpPage.getRawText());
            String status = rawText.jsonPath($_type + ".status").get();
            List<String> contents = rawText.jsonPath($_type + ".content").all();

            if ("0".equals(status) && contents != null && contents.size() > 0){

                List<Map> userOauthInfos = new Json(contents.get(0)).toList(Map.class);
                for (Map userOauthInfo : userOauthInfos) {
                    String oauthInfo = String.valueOf(userOauthInfo.get("oauthInfo"));
                    Map oauthInfoMap = new Json(oauthInfo).toObject(Map.class);
                    if ("article".equals(oauthInfoMap.get(authorType).toString())){
                        authorInfos.put(oauthInfo);
                    }
                }
            }

        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private void downloadSleep(){
        Random rand = new Random();
        long sleepTime = (long)((rand.nextFloat()+1.5)*2000L);
        logger.info("xhs download sleep time {}",sleepTime);
        try {
            Thread.sleep(sleepTime);
        } catch (InterruptedException e) {
            logger.error(e.getMessage());
        }
    }

    private String fillThreadLocalAuthorInfo(){
        String authorInfo = getLocalAuthInfo();
        if (StringUtils.isBlank(authorInfo)){
            try {
                authorInfo = authorInfos.poll(3, TimeUnit.SECONDS);
                if (StringUtils.isBlank(authorInfo)){
                    logger.error("Unable to obtain authentication information. The current task will not continue!");
                    throw new RuntimeException("Unable to obtain authentication information. The current task will not continue!");
                }
                updateLocalAuthInfo(authorInfo);
            } catch (InterruptedException e) {
                logger.error(e.getMessage());
            }
        }
        return authorInfo;
    }

    private void fillSearchCurrentRecord(CrawlerRequestRecord requestRecord){
        requestRecord.setReleaseTime(System.currentTimeMillis());
        requestRecord.setDownload(false);
//        requestRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
//        requestRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(7*24,null));
        requestRecord.setNeedWashPage(false);
        requestRecord.setNeedParsedPage(true);
    }

    private void fillCurrentRecordRequestHeader(CrawlerRequestRecord requestRecord,String authorInfo){
        Map authorInfoMap = new Json(authorInfo).toObject(Map.class);
        Map<String, String> headers = requestRecord.getHttpRequest().getHeaders();
        if (headers != null && headers.size() > 0){
            headers.put("charset", "utf-8");
            headers.put("Accept-Encoding", "gzip");
            headers.put("referer", authorInfoMap.get(headerReferer).toString());
            headers.put("device-fingerprint",authorInfoMap.get(headerDeviceFingerprint).toString());
            headers.put("authorization", authorInfoMap.get(headerAuthorization).toString());
            headers.put("content-type", "application/json");
            headers.put("User-Agent", "Mozilla/5.0 (Linux; Android 6.0; DIG-AL00 Build/HUAWEIDIG-AL00; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/55.0.2883.91 Mobile Safari/537.36 MicroMessenger/7.0.3.1400(0x27000334) Process/appbrand0 NetType/WIFI Language/zh_CN");
            headers.put("Host", "www.xiaohongshu.com");
            headers.put("Connection", "Keep-Alive");
        }
        // set proxy
        requestRecord.getHttpConfig().setUseProxy(true);
        requestRecord.getHttpConfig().setProxy(proxy);
    }

    public static void main(String[] args) {
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord){

//        int count = 1;
//        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
//        if (crawlerBusinessTags.hasKVTag("download_retry_count")){
//            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
//            if (count >= 100){
//                logger.error("xhs search for time descending download he number of retries exceeds the limit" +
//                        ",request url {},download detail {}",crawlerRecord.getHttpRequest().getUrl());
//                return;
//            }
//        }
//
//        count++;
//        crawlerBusinessTags.addCustomKV("download_retry_count",count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .build();
        crawlerRequestRecords.add(crawlerRequestRecord);

        if(crawlerRecord.tagsCreator().requestTags().hasRequestType(turnPageItem)){
            crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(turnPage);
            crawlerRequestRecord.tagsCreator().requestTags().addRequestType(turnPageItem);
        }
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
    }


    public long dateToTimestamp(String dataStr) throws ParseException {
        String regEx="[^0-9]+"; //正则表达式，用于匹配非数字串，+号用于匹配出多个非数字串
        Pattern pattern = Pattern.compile(regEx);

        if (dataStr.equals("刚刚")){
            return System.currentTimeMillis();
        } else if (Pattern.matches("\\d*秒前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*1000L);
        } else if (Pattern.matches("\\d*分钟前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*1000L);
        } else if (Pattern.matches("\\d*小时前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*60*1000L);
        } else if (Pattern.matches("\\d*小时\\d*分钟前",dataStr)){
            int hourNumber = Integer.valueOf(pattern.split(dataStr)[0]);
            int secondNumber = Integer.valueOf(pattern.split(dataStr)[1]);
            long totalMillis = hourNumber*60*60*1000L + secondNumber*60*1000L;
            return (System.currentTimeMillis() - totalMillis);
        } else if (dataStr.startsWith("今天")){
            String currentTime = DateFormatUtils.format(System.currentTimeMillis(), TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("今天", currentTime);
        } else if (dataStr.startsWith("昨天")){
            String yesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("昨天", yesterdayTime);
        } else if (dataStr.startsWith("前天")){
            String beforeYesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 2*60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("前天", beforeYesterdayTime);
        } else if (Pattern.matches("\\d{2}月\\d{2}[日]*", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"年"+dataStr;
        } else if (Pattern.matches("\\d{2}-\\d{2}", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"-"+dataStr;
        }
        return DateUtils.parseDateStrictly(dataStr, TimeForamtEnum.allFormats()).getTime();
    }

    public enum TimeForamtEnum {
        format1("yyyy年MM月dd日"),
        format1_1("yyyy年MM月dd日 HH:mm:ss"),
        format1_2("yyyy年MM月dd日 HH:mm"),
        format1_3("yyyy年MM月dd日HH:mm"),
        format1_4("yyyy年MM月dd日 HH点mm分"),
        format1_5("yyyy年MM月dd日 HH点mm分ss秒"),
        format1_6("yyyy年MM月dd日HH点mm分"),
        format1_7("yyyy年MM月dd日HH点mm分ss秒"),


        format4("yyyy-MM-dd HH:mm:ss"),
        format5("yyyy-MM-dd HH:mm"),
        format6("yyyy-MM-dd"),

        format7("yyyy/MM/dd HH:mm:ss"),
        format7_1("MM/dd/yyyy HH:mm:ss"),
        format8("yyyy/MM/dd HH:mm"),
        format9("yyyy/MM/dd"),

        format10("yyyy.MM.dd HH:mm:ss"),
        format11("yyyy.MM.dd HH:mm"),
        format12("yyyy.MM.dd"),

        format13("EEE MMM d HH:mm:ss +0800 yyyy");

        private String format;

        private TimeForamtEnum(String format) {
            this.format = format;
        }

        public static String[] allFormats() {
            TimeForamtEnum[] timeForamtEnums = TimeForamtEnum.values();
            String[] formats = new String[timeForamtEnums.length];
            for (int count = 0; count < timeForamtEnums.length; count++) {
                formats[count] = timeForamtEnums[count].format;
            }
            return formats;
        }

        public String getFormat() {
            return format;
        }
    }


}
