package com.chance.cc.crawler.development.scripts.du;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import org.jsoup.Jsoup;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.text.ParseException;
import java.util.*;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.turnPageItem;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static org.codehaus.groovy.runtime.EncodingGroovyMethods.md5;

/**
 * @author bx
 * @date 2020/12/6 0006 12:52
 */
public class DUCommonCrawlerScript extends CrawlerCommonScript {

    private static Logger logger = LoggerFactory.getLogger(DUCommonCrawlerScript.class);

    public static final String duHotNewsStartUrl ="https://m.dewu.com/sns/v1/content/posts-note-list";
    public static final String duHotNewsListUrlRegular = "https://\\S*/sns/v1/content/posts-note-list\\S*";
    public static final String duTalentListUrlRegular = "https://\\S*/sns/v1/content/question-talent-homepage\\S*";
    public static final String duHotNewsCommentUrlRegular = "https://m.poizon.com/mapi/posts/detail\\S*";
    public static final String duHotNewsCommentUrlFormat = "https://m.poizon.com/mapi/posts/detail?postsId=%s&sign=%s";
    public static final String duTalentCommentUrlRegular = "https://m.poizon.com/question/share\\S*";
    public static final String duTalentCommentUrlFormat = "https://m.poizon.com/question/share?questionId=%s";

    public static final String hot_news_site = "hot_news";
    public static final String talent_site = "talent";

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        List<CrawlerRequestRecord> crawlerRequestRecords = new ArrayList<>();

        if(crawlerRecord.isDownload() && !page.isDownloadSuccess()){
            logger.error("du download url {} proxy has error ,will retry",requestUrl);
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        if(isUrlMatch(requestUrl,duHotNewsListUrlRegular)){
            parseDUHotNewsListLinks(crawlerRequestRecords,crawlerRecord,page);
        }else if(isUrlMatch(requestUrl,duHotNewsCommentUrlRegular)){
            parseDUHotNewsCommentLinks(crawlerRequestRecords,crawlerRecord,page);
        }else if(isUrlMatch(requestUrl,duTalentListUrlRegular)){
            parseDUTalentListLinks(crawlerRequestRecords,crawlerRecord,page);
        }


        return crawlerRequestRecords;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        if (isUrlMatch(requestUrl,duHotNewsListUrlRegular)){
            return washHotNewsArticle(crawlerRecord,page);
        }

        if (isUrlMatch(requestUrl,duHotNewsCommentUrlRegular)){
            return washHotNewsComment(crawlerRecord,page);
        }

        if (isUrlMatch(requestUrl,duTalentListUrlRegular)){
            return washTalentArticle(crawlerRecord,page);
        }
        
        if (isUrlMatch(requestUrl,duTalentCommentUrlRegular)){
            return washTalentComment(crawlerRecord,page);
        }

        return null;
    }

    private void parseDUHotNewsListLinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){
        Json dataJson = new Json(crawlerRecord.getInternalDownloadPage().getRawText());
        try {
            List<String> dataList = dataJson.jsonPath($_type + ".data.list").all();
            if (dataList != null && dataList.size() > 0){
                for (String data : dataList) {
                    Json newsJson = new Json(data);
                    if ("3".equals(newsJson.jsonPath($_type + ".type").get())){
                        int reply = Integer.valueOf(newsJson.jsonPath($_type + ".posts.reply").get());
                        if (reply > 0) { //生成评论record

                            String postsId = newsJson.jsonPath($_type + ".posts.postsId").get();
                            Map<String,String> params = new HashMap<>();
                            params.put("postsId",postsId);
                            String sign = genSignature(params, "048a9c4943398714b356a696503d2d36");
                            String duHotNewsCommentUrl = String.format(duHotNewsCommentUrlFormat, postsId, sign);

                            CrawlerRequestRecord crawlerCommentTurnPageRequestRecord = CrawlerRequestRecord.builder()
                                    .turnPageRequest(crawlerRecord)
                                    .httpUrl(duHotNewsCommentUrl)
                                    .releaseTime(System.currentTimeMillis())
                                    .needWashed(true)
                                    .notFilterRecord()
                                    .copyBizTags()
                                    .build();
                           crawlerRequestRecords.add(crawlerCommentTurnPageRequestRecord);

                        }
                    }
                }
            }
        } catch (Exception e) {
            logger.error(e.getMessage());
            crawlerRecord.setNeedWashPage(false);
        }
    }

    private void parseDUHotNewsCommentLinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        try {
            Json dataJson = new Json(page.getRawText());

            dataJson.jsonPath($_type + ".data.replyList").all();
        } catch (Exception e) {
            logger.error("du hot news comment request {} download error:{},will retry",page.getRequest().getUrl(),e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false);
        }
    }

    private void parseDUTalentListLinks(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        try {
            Json dataJson = new Json(crawlerRecord.getInternalDownloadPage().getRawText());

            List<String> dataList = dataJson.jsonPath($_type + ".data.list").all();
            for (String data : dataList) {
                Json questionJson = new Json(data);
                String replyCount = questionJson.jsonPath($_type + ".replyCount").get();
                if (Integer.valueOf(replyCount) > 0){
                    String questionId = questionJson.jsonPath($_type + ".questionId").get();
                    CrawlerRequestRecord crawlerCommentTurnPageRequestRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(crawlerRecord)
                            .httpUrl(String.format(duTalentCommentUrlFormat,questionId))
                            .releaseTime(System.currentTimeMillis())
                            .needWashed(true)
                            .notFilterRecord()
                            .copyBizTags()
                            .build();
                    crawlerRequestRecords.add(crawlerCommentTurnPageRequestRecord);
                }
            }
            
        } catch (Exception e) {
            logger.error("du talent qa list request {} download error:{},will retry",page.getRequest().getUrl(),e.getMessage());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false);
        }
    }

    private List<CrawlerData> washHotNewsArticle(CrawlerRequestRecord crawlerRecord, HttpPage page){
        HttpPage httpPage = crawlerRecord.getInternalDownloadPage();
        List<CrawlerData> crawlerDataList = new ArrayList<>();

        Json dataJson = new Json(httpPage.getRawText());
        List<String> datas = dataJson.jsonPath($_type + ".data.list").all();
        if(datas != null){
            for(String data : datas){
                try {
                    Json newsJson = new Json(data);
                    if ("3".equals(newsJson.jsonPath($_type + ".type").get())){

                        String postsId = newsJson.jsonPath($_type + ".posts.postsId").get();
                        String create_time = newsJson.jsonPath($_type + ".posts.addTime").get();
                        CrawlerData crawlerData = CrawlerData.builder()
                                .data(crawlerRecord, page)
                                .dataId(StringUtils.joinWith("-", domain(),hot_news_site, CrawlerEnum.CrawlerDataType.article.enumVal(), postsId))
                                .url("https://m.poizon.com/hybird/h5community/column?postsId="+postsId)
                                .releaseTime(Long.valueOf(create_time ) * 1000L)
                                .addContentKV(Field_Title,newsJson.jsonPath($_type + ".posts.title").get())
                                .addContentKV(Field_Content, Jsoup.parse(newsJson.jsonPath($_type + ".posts.content").get()).text())
                                .addContentKV(Field_Author, newsJson.jsonPath($_type + ".posts.userInfo.userName").get())
                                .addContentKV(Field_Author_Id, newsJson.jsonPath($_type + ".posts.userInfo.userId").get())
                                .addContentKV(Field_I_Views, newsJson.jsonPath($_type + ".posts.readCount").get())
                                .addContentKV(Field_I_Comments, newsJson.jsonPath($_type + ".posts.reply").get())
                                .addContentKV(Field_I_Likes, newsJson.jsonPath($_type + ".posts.fav").get())
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
//                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                                .build();
                        crawlerData.tagsCreator().bizTags().addDomain(domain());
                        crawlerData.tagsCreator().bizTags().addSite(hot_news_site);
                        crawlerData.tagsCreator().bizTags().addSiteBiz(hot_news_site);
                        crawlerDataList.add(crawlerData);
                    }
                } catch (Exception e) {
                    logger.error("du hot news article data normal,error: {}",e.getMessage());
                }
            }
        }
        return crawlerDataList;
    }

    private List<CrawlerData> washTalentArticle(CrawlerRequestRecord crawlerRecord, HttpPage page){
        HttpPage httpPage = crawlerRecord.getInternalDownloadPage();
        List<CrawlerData> crawlerDataList = new ArrayList<>();

        Json dataJson = new Json(httpPage.getRawText());
        List<String> datas = dataJson.jsonPath($_type + ".data.list").all();
        if(datas != null){
            for(String data : datas){
                try {
                    Json talentJson = new Json(data);
                    String questionId = talentJson.jsonPath($_type + ".questionId").get();
                    String create_time = talentJson.jsonPath($_type + ".formatTime").get();
                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRecord, page)
                            .dataId(StringUtils.joinWith("-", domain(),talent_site, CrawlerEnum.CrawlerDataType.article.enumVal(), questionId))
                            .url(String.format(duTalentCommentUrlFormat,questionId))
                            .releaseTime(dateToTimestamp(create_time))
                            .addContentKV(Field_Title,talentJson.jsonPath($_type + ".title").get())
                            .addContentKV(Field_Content,talentJson.jsonPath($_type + ".title").get())
                            .addContentKV(Field_Author, talentJson.jsonPath($_type + ".userInfo.userName").get())
                            .addContentKV(Field_Author_Id, talentJson.jsonPath($_type + ".userInfo.userId").get())
                            .addContentKV(Field_I_Comments, talentJson.jsonPath($_type + ".replyCount").get())
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                            .build();
                    crawlerData.tagsCreator().bizTags().addDomain(domain());
                    crawlerData.tagsCreator().bizTags().addSite(talent_site);
                    crawlerData.tagsCreator().bizTags().addSiteBiz(talent_site);
                    crawlerDataList.add(crawlerData);
                } catch (Exception e) {
                    logger.error("du talent article data normal,error: {}",e.getMessage());
                }
            }
        }
        return crawlerDataList;
    }

    private List<CrawlerData> washHotNewsComment(CrawlerRequestRecord crawlerRecord, HttpPage page){
        List<CrawlerData> crawlerDataList = new ArrayList<>();

        Json dataJson = new Json(page.getRawText());
        List<String> replyList = dataJson.jsonPath($_type + ".data.replyList").all();
        if(replyList != null){
            for(String reply : replyList){
                try {
                    Json replyJson = new Json(reply);
                    String postsId = replyJson.jsonPath($_type + ".postsId").get();
                    String postsReplyId = replyJson.jsonPath($_type + ".postsReplyId").get();
                    String create_time = replyJson.jsonPath($_type + ".addTime").get();
                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRecord, page)
                            .dataId(StringUtils.joinWith("-", domain(),hot_news_site, CrawlerEnum.CrawlerDataType.comment.enumVal(), postsId,postsReplyId))
                            .parentId(StringUtils.joinWith("-", domain(),hot_news_site, CrawlerEnum.CrawlerDataType.article.enumVal(), postsId))
                            .url("https://m.poizon.com/hybird/h5community/column?postsId="+postsId)
                            .releaseTime(Long.valueOf(create_time ) * 1000L)
                            .addContentKV(Field_Content, replyJson.jsonPath($_type + ".content").get())
                            .addContentKV(Field_Author, replyJson.jsonPath($_type + ".userInfo.userName").get())
                            .addContentKV(Field_Author_Id, replyJson.jsonPath($_type + ".userInfo.userId").get())
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                            .build();
                    crawlerData.tagsCreator().bizTags().addDomain(domain());
                    crawlerData.tagsCreator().bizTags().addSite(hot_news_site);
                    crawlerData.tagsCreator().bizTags().addSiteBiz(hot_news_site);
                    crawlerDataList.add(crawlerData);
                } catch (Exception e) {
                    logger.error("du talent comment data normal,error: {}",e.getMessage());
                }
            }
        }
        return crawlerDataList;
    }

    private List<CrawlerData> washTalentComment(CrawlerRequestRecord crawlerRecord, HttpPage page){
        List<CrawlerData> crawlerDataList = new ArrayList<>();

        List<Selectable> commentNodes = page.getHtml().xpath("//div[@class=\"comment\"]/ul/li").nodes();
        String questionId = page.getHtml().xpath("//div[@class=\"ask\"]/@data-id").get();

        if (commentNodes != null && commentNodes.size() > 0){
            for (Selectable commentNode : commentNodes) {
                try {
                    String realeaseTime = commentNode.xpath(".//div[@class=\"option\"]/span/text()").get();
                    String author = commentNode.xpath(".//p[@class=\"name\"]/text()").get();
                    String content = commentNode.xpath(".//p[@class=\"content\"]/text()").get();
                    String url = String.format(duTalentCommentUrlFormat,questionId);
                    String likes = commentNode.xpath(".//span[@class=\"zan\"]/text()").get();
                    String commentId = md5(author+content);
                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRecord, page)
                            .dataId(StringUtils.joinWith("-", domain(),talent_site, CrawlerEnum.CrawlerDataType.comment.enumVal(), questionId,commentId))
                            .parentId(StringUtils.joinWith("-", domain(),talent_site, CrawlerEnum.CrawlerDataType.article.enumVal(), questionId))
                            .url(url)
                            .releaseTime(dateToTimestamp(realeaseTime))
                            .addContentKV(Field_Content, content)
                            .addContentKV(Field_Author, author)
                            .addContentKV(Field_I_Likes,likes)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                            .build();
                    crawlerData.tagsCreator().bizTags().addDomain(domain());
                    crawlerData.tagsCreator().bizTags().addSite(talent_site);
                    crawlerData.tagsCreator().bizTags().addSiteBiz(talent_site);
                    crawlerDataList.add(crawlerData);
                } catch (Exception e) {
                    logger.error("du hot news comment data normal,error: {}",e.getMessage());
                }
            }
        }
        return crawlerDataList;
    }

    @Override
    public String domain() {
        return "du_app";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(duHotNewsStartUrl); //start url--只是为了进入脚本没有实际意义
        addUrlRegular(duHotNewsListUrlRegular);
        addUrlRegular(duHotNewsCommentUrlRegular);
        addUrlRegular(duTalentListUrlRegular);
        addUrlRegular(duTalentCommentUrlRegular);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        if ("https://m.dewu.com/sns/v1/content/posts-note-list"
                .equals(crawlerRecord.getHttpRequest().getUrl())){
            return false;
        }
        return true;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }


    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        String requestUrl = requestRecord.getHttpRequest().getUrl();
        if (isUrlMatch(requestUrl,duHotNewsListUrlRegular)){
            fillCurrentRecord(requestRecord);
        }
        return super.prepareRequest(requestRecord, supportSourceRecords);
    }

    private void fillCurrentRecord(CrawlerRequestRecord requestRecord){
        requestRecord.setReleaseTime(System.currentTimeMillis());
//        requestRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.key);
//        requestRecord.addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-","filter",requestRecord.getDomain(),"common","queue")));
        requestRecord.setDownload(false);
        requestRecord.setNeedWashPage(true);
        requestRecord.setNeedParsedPage(true);
    }

    public static void main(String[] args) throws UnsupportedEncodingException {
//        generateVideoCrawlerRecord();

        String requestUrl = "https://app.dewu.com/api/v1/h5/search/fire/search/list?title=%E5%86%B0%E6%B7%87%E6%B7%8B&page=0&sortType=0&sortMode=1&limit=20&showHot=1&isAggr=1";
        List<NameValuePair> parameters = URLEncodedUtils.parse(requestUrl.split("\\?")[1], Charset.forName("utf-8"));
        Map<String,String> params = new HashMap<>();
        for (NameValuePair parameter : parameters) {
            params.put(URLEncoder.encode(parameter.getName(),"utf-8"),URLEncoder.encode(parameter.getValue(),"utf-8"));
        }
        System.out.println(genSignature(params, "048a9c4943398714b356a696503d2d36"));
    }

    private static void generateVideoCrawlerRecord(){

        String domainId = "du_app";

        CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domainId,turnPageItem)
                .httpUrl(duHotNewsStartUrl)
                .build();

        itemRecord.setDownload(false);
        HttpPage httpPage = new HttpPage();
        HttpRequest httpRequest = new HttpRequest();
        httpRequest.setUrl(itemRecord.getHttpRequest().getUrl());
        httpPage.setRequest(httpRequest);
        httpPage.setRawText("");
        itemRecord.setInternalDownloadPage(httpPage);



        System.out.println(JSON.toJSONString(itemRecord));
    }

    public static String genSignature(Map<String,String> params, String salt) {
        if(params == null){
            return null;
        }
        String sign = "";
        StringBuffer sb = new StringBuffer();
        try {
            // 1. 字典升序排序
            SortedMap<String,String> sortedMap = new TreeMap<>(params);
            // 2. 拼按URL键值对
            Set<String> keySet = sortedMap.keySet();
            for(String key : keySet){
                //sign不参与算法
                if(key.equals("sig")){
                    continue;
                }
                String value = sortedMap.get(key);
                sb.append(key +  URLDecoder.decode(value,"UTF-8"));
            }
            String uriString = sb.toString();
            uriString = uriString + salt;
            // 3. MD5运算得到请求签名
            sign = md5(uriString);
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
        return sign;
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord){

        int count = 1;
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        if (crawlerBusinessTags.hasKVTag("download_retry_count")){
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
            if (count >= 20){
                logger.error("du hot news download he number of retries exceeds the limit" +
                        ",request url {},download detail {}",crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        count++;
        crawlerBusinessTags.addCustomKV("download_retry_count",count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .build();
        crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    public long dateToTimestamp(String dataStr) throws ParseException {
        String regEx="[^0-9]+"; //正则表达式，用于匹配非数字串，+号用于匹配出多个非数字串
        Pattern pattern = Pattern.compile(regEx);

        if (dataStr.equals("刚刚")){
            return System.currentTimeMillis();
        } else if (Pattern.matches("\\d*秒前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*1000L);
        } else if (Pattern.matches("\\d*分钟前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*1000L);
        } else if (Pattern.matches("\\d*小时前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*60*1000L);
        } else if (Pattern.matches("\\d*小时\\d*分钟前",dataStr)){
            int hourNumber = Integer.valueOf(pattern.split(dataStr)[0]);
            int secondNumber = Integer.valueOf(pattern.split(dataStr)[1]);
            long totalMillis = hourNumber*60*60*1000L + secondNumber*60*1000L;
            return (System.currentTimeMillis() - totalMillis);
        } else if (Pattern.matches("\\d*天前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*24*60*60*1000L);
        } else if (dataStr.startsWith("今天")){
            String currentTime = DateFormatUtils.format(System.currentTimeMillis(), TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("今天", currentTime);
        } else if (dataStr.startsWith("昨天")){
            String yesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("昨天", yesterdayTime);
        } else if (dataStr.startsWith("前天")){
            String beforeYesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 2*60*60*24*1000L, TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("前天", beforeYesterdayTime);
        } else if (Pattern.matches("\\d*月\\d*[日]*", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"年"+dataStr;
        } else if (Pattern.matches("\\d*-\\d*", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"-"+dataStr;
        }
        return DateUtils.parseDateStrictly(dataStr, TimeForamtEnum.allFormats()).getTime();
    }

    public enum TimeForamtEnum {
        format1("yyyy年MM月dd日"),
        format1_1("yyyy年MM月dd日 HH:mm:ss"),
        format1_2("yyyy年MM月dd日 HH:mm"),
        format1_3("yyyy年MM月dd日HH:mm"),
        format1_4("yyyy年MM月dd日 HH点mm分"),
        format1_5("yyyy年MM月dd日 HH点mm分ss秒"),
        format1_6("yyyy年MM月dd日HH点mm分"),
        format1_7("yyyy年MM月dd日HH点mm分ss秒"),


        format4("yyyy-MM-dd HH:mm:ss"),
        format5("yyyy-MM-dd HH:mm"),
        format6("yyyy-MM-dd"),

        format7("yyyy/MM/dd HH:mm:ss"),
        format7_1("MM/dd/yyyy HH:mm:ss"),
        format8("yyyy/MM/dd HH:mm"),
        format9("yyyy/MM/dd"),

        format10("yyyy.MM.dd HH:mm:ss"),
        format11("yyyy.MM.dd HH:mm"),
        format12("yyyy.MM.dd"),

        format13("EEE MMM d HH:mm:ss +0800 yyyy");

        private String format;

        private TimeForamtEnum(String format) {
            this.format = format;
        }

        public static String[] allFormats() {
            TimeForamtEnum[] timeForamtEnums = TimeForamtEnum.values();
            String[] formats = new String[timeForamtEnums.length];
            for (int count = 0; count < timeForamtEnums.length; count++) {
                formats[count] = timeForamtEnums[count].format;
            }
            return formats;
        }

        public String getFormat() {
            return format;
        }
    }
}
