package com.chance.cc.crawler.development.scripts.xcar.carseries.koubei;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.Downloader;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.record.CrawlerResultRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Html;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.selector.Selectable;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.meta.core.bean.common.MetaResponse;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainKeys;
import com.google.common.collect.Maps;
import jdk.nashorn.api.scripting.NashornScriptEngineFactory;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.script.*;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.text.ParseException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.*;

/**
 * @author bx
 * @date 2021/1/8 0008 12:11
 */
public class XCarSeriesPublicPraiseCrawlerScript extends CrawlerCommonScript {

    private static final Logger logger = LoggerFactory.getLogger(XCarSeriesPublicPraiseCrawlerScript.class);

    public static final String site= "publicPraise";
    public static final String site_biz= "praise";

    public static final String carSeriesPublicPraiseUrlRegular = "https://newcar.xcar.com.cn/auto/index.php\\S*";
    public static final String carSeriesPublicPraiseUrlFormat = "https://newcar.xcar.com.cn/auto/index.php?" +
            "r=reputation/reputation/GetAjaxKbList3&page=%d&pserid=%s&jh=0&wd=0";
    public static final String xcarSeriesPraiseUrlFormat = "https://newcar.xcar.com.cn/%s/review.htm";

    public static final String carSeriesUrlRegular = "https://newcar.xcar.com.cn/\\d*/";
    public static final String carSeriesUrlFormat = "https://newcar.xcar.com.cn/%s/";

    public static final String xcarPublicPraiseLogPrefix= "xcar series public praise ";

    public Map<String,CompiledScript> compiledScriptMap;
    public ScriptEngine scriptEngine;

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> allItemRecords = new ArrayList<>();

        String authInfo = getLocalAuthInfo();
        if (StringUtils.isNotBlank(authInfo)){
            requestRecord.getHttpRequest().addHeader("cookie",authInfo);
        }

        if (supportSourceRecords == null || supportSourceRecords.size() <1){
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }

        CrawlerRequestRecord keywordSupportSourceRecord = null;
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            if (supportSourceRecord.getHttpRequest().getUrl().contains("/v1/meta/"+domain()+"/keys?site=")){
                keywordSupportSourceRecord = supportSourceRecord;
                break;
            }
        }

        if (keywordSupportSourceRecord != null){

            try {
                initCompileScript();
            } catch (FileNotFoundException | ScriptException e) {
                logger.error("init compile scripts error : {}",e.getMessage());
            }

            initAllCrawlerRecordByKeyword(requestRecord,keywordSupportSourceRecord,allItemRecords);// cookies userAgents 初始完毕后，才能初始record
        } else {
            logger.error("{} start urls cant be empty!",xcarPublicPraiseLogPrefix);
        }
        return allItemRecords;
    }

    public List<CrawlerRequestRecord> parseLinks(CrawlerRecordContext context) {
        List<CrawlerRequestRecord> crawlerRequestRecords =  new ArrayList<>();
            HttpPage page = context.getPage();
            CrawlerRequestRecord crawlerRecord = context.getCrawlerRecord();
        if (!page.isDownloadSuccess()){
            logger.error("{} download proxy has error ,will retry",xcarPublicPraiseLogPrefix);
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord,page);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        if (page.getStatusCode() != 200){
            logger.error("{} download status code: {} ,will retry",page.getStatusCode(),xcarPublicPraiseLogPrefix);
            if (page.getStatusCode() == 521){
                //内置下载 -- 更新cookie
                cookieUpdate(context.getPageDownloader(),crawlerRecord);
            }
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord,page);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }
        
        if (isUrlMatch(page.getRequest().getUrl(),carSeriesUrlRegular)){
            initCarSeriesPraiseLinks(crawlerRequestRecords,crawlerRecord,page);
        } else if (isUrlMatch(page.getRequest().getUrl(),carSeriesPublicPraiseUrlRegular)){
            nextCarSeriesPagePraiseLinks(crawlerRequestRecords,crawlerRecord,page);
        }
        
        crawlerRecord.setNeedWashPage(true);
        return crawlerRequestRecords;
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDatas = new ArrayList<>();
        Html responseHtml = page.getHtml();
        
        List<Selectable> nodes = responseHtml.xpath("//div[@class=\"home_list clearfix\"]").nodes();
        for(Selectable node : nodes){

            try {
                Map<String,String> crawlerDataMap = new HashMap<>();
                listInfos(node,crawlerDataMap);
                userScoreInfos(node,crawlerDataMap);
                otherInfos(node,crawlerDataMap);
                CrawlerData crawlerArticleData = CrawlerData.builder()
                        .data(crawlerRecord, page)
                        .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(),site, article.enumVal(), crawlerDataMap.get("contentId")))
                        .url(String.format(xcarSeriesPraiseUrlFormat,page.getRequest().getExtras().get("series_id")))
                        .releaseTime(dateToTimestamp(crawlerDataMap.get("releaseTime")))
                        .addContentKV(Field_Author, crawlerDataMap.get(Field_Author))
                        .addContentKV(Field_Title, crawlerDataMap.get("model"))
                        .addContentKV(Field_Content,crawlerDataMap.get(Field_Content))
                        .addContentKV(Field_Praise_Time_Buy,crawlerDataMap.get(Field_Praise_Time_Buy))
                        .addContentKV(Field_Praise_Address_Buy,crawlerDataMap.get(Field_Praise_Address_Buy))
                        .addContentKV(Field_Praise_Price_Buy,crawlerDataMap.get(Field_Praise_Price_Buy))
                        .addContentKV(Field_Praise_Fuel_Economy,crawlerDataMap.get(Field_Praise_Fuel_Economy))
                        .addContentKV(Field_Praise_Target,crawlerDataMap.get(Field_Praise_Target))
                        .addContentKV(Field_Praise_Vendor,crawlerDataMap.get(Field_Praise_Vendor))
                        .addContentKV(Field_Praise_values,crawlerDataMap.get(Field_Praise_values))
                        .addContentKV(Field_Images,crawlerDataMap.get(Field_Images))
                        .addContentKV(Field_I_Likes,crawlerDataMap.get(Field_I_Likes))
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .resultLabelTag(article)
                        .build();

                Map<String, String> modelMap = new HashMap<>();
                modelMap.put("model_name",crawlerDataMap.get("model"));
                modelMap.put("model_url",crawlerDataMap.get("model_url"));
                crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Car_Model,modelMap);
                Map<String, String> addr = new HashMap<>();
                addr.put("province",crawlerDataMap.get(Field_Praise_Address_Buy));
                addr.put("city",crawlerDataMap.get(Field_Praise_Address_Buy));
                crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Addr_Info,addr);
                if("true".equals(crawlerDataMap.get(Tag_Field_Is_Elite))){
                   crawlerArticleData.setFilterKey(StringUtils.joinWith("-",crawlerArticleData.getDataId(),System.currentTimeMillis()));
                   crawlerArticleData.setFilterTime(crawlerRecord.getScheduleTime()-1000L);
                }
                Map<String, String> seriesMap = new HashMap<>();
                seriesMap.put("series_name",page.getRequest().getExtras().get("series_name").toString());
                seriesMap.put("series_url",page.getRequest().getExtras().get("series_url").toString());
                seriesMap.put("series_id",page.getRequest().getExtras().get("series_id").toString());
                crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Series,Arrays.asList(seriesMap));

                crawlerArticleData.tagsCreator().bizTags().addCustomKV(Tag_Field_Is_Elite,crawlerDataMap.get(Tag_Field_Is_Elite));
                crawlerArticleData.tagsCreator().bizTags().addDomain(domain());
                crawlerArticleData.tagsCreator().bizTags().addSite(site);
                crawlerArticleData.tagsCreator().bizTags().addSiteBiz(site_biz);
                crawlerDatas.add(crawlerArticleData);
            } catch (Exception e) {
                logger.error("{} parse crawler data error: {},stackTrace: {}",xcarPublicPraiseLogPrefix,e.getMessage(),e.getStackTrace()[0]);
            }

        }
        return crawlerDatas;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(carSeriesUrlRegular);
        addUrlRegular(carSeriesPublicPraiseUrlRegular);
    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {
        String tag_site = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        return site.equals(tag_site);
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public String domain() {
        return "xcar";
    }
    
    private void initCarSeriesPraiseLinks(List<CrawlerRequestRecord> crawlerRequestRecords,
                                                                CrawlerRequestRecord crawlerRecord, HttpPage page){
        try {
            String seriesName = page.getHtml().xpath("//h1/text()").get();
            String carSeries = (String) crawlerRecord.getHttpRequest().getExtras().get("series_id");
            String carSeriesUrl = "https://newcar.xcar.com.cn/"+carSeries;
            CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(String.format(carSeriesPublicPraiseUrlFormat,1,carSeries))
                    .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                    .releaseTime(System.currentTimeMillis())
                    .notFilterRecord()
                    .copyBizTags()
                    .needWashed(true)
                    .build();
            crawlerRequestRecord.getHttpRequest().addExtra("series_id",carSeries);
            crawlerRequestRecord.getHttpRequest().addExtra("series_name",seriesName);
            crawlerRequestRecord.getHttpRequest().addExtra("series_url",carSeriesUrl);
            crawlerRequestRecord.getHttpRequest().addExtra("current_num","1");
            crawlerRequestRecords.add(crawlerRequestRecord);
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private void nextCarSeriesPagePraiseLinks(List<CrawlerRequestRecord> crawlerRequestRecords,
                                                                CrawlerRequestRecord crawlerRecord, HttpPage page){
        try {
            String carSeries = (String) crawlerRecord.getHttpRequest().getExtras().get("series_id");
            List<Selectable> nodes = page.getHtml().xpath("//div[@class=\"home_list clearfix\"]").nodes();
            if (nodes != null && nodes.size() > 0){

                int currentNum = Integer.valueOf((String) crawlerRecord.getHttpRequest().getExtras().get("current_num"));
                CrawlerRequestRecord crawlerTurnPaeRequestRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRecord)
                        .httpUrl(String.format(carSeriesPublicPraiseUrlFormat,currentNum+1,carSeries))
                        .httpHeads(crawlerRecord.getHttpRequest().getHeaders())
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                crawlerTurnPaeRequestRecord.getHttpRequest().setExtras(Maps.newHashMap(crawlerRecord.getHttpRequest().getExtras()));
                crawlerTurnPaeRequestRecord.getHttpRequest().addExtra("current_num",String.valueOf(currentNum+1));
                crawlerRequestRecords.add(crawlerTurnPaeRequestRecord);
            }
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord, HttpPage page){

        int count = 1;
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        if (crawlerBusinessTags.hasKVTag("download_retry_count")){
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
            if (count >= 20){
                logger.error("{} download he number of retries exceeds the limit,request url {},page rawText [{}]",
                        xcarPublicPraiseLogPrefix, crawlerRecord.getHttpRequest().getUrl(),page.getRawText());
                return;
            }
        }

        count++;
        crawlerBusinessTags.addCustomKV("download_retry_count",count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .build();
        crawlerRequestRecord.tagsCreator().requestTags().removeRequestType(CrawlerEnum.CrawlerRequestType.filter);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    private void initAllCrawlerRecordByKeyword(CrawlerRequestRecord requestRecord,
                                               CrawlerRequestRecord supportSourceRecord,
                                               List<CrawlerRecord> allItemRecords){
        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            MetaResponse metaResponse = JSON.parseObject(httpPage.getRawText(), MetaResponse.class);
            if (metaResponse.getStatus() == 0 && metaResponse.getContent() != null){
                List<String> contents = (List<String>) metaResponse.getContent();
                for (String content : contents) {
                    CrawlerDomainKeys crawlerDomainKeys = JSON.parseObject(content, CrawlerDomainKeys.class);
                    String keyword = crawlerDomainKeys.getKeyword();
                    CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(requestRecord)
                            .httpUrl(String.format(carSeriesUrlFormat,keyword))
                            .httpHeads(requestRecord.getHttpRequest().getHeaders())
                            .releaseTime(System.currentTimeMillis())
                            .copyBizTags()
                            .notFilterRecord()
                            .build();
                    crawlerRequestRecord.getHttpRequest().addExtra("series_id",keyword);
                    allItemRecords.add(crawlerRequestRecord);
                }
            }
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private void listInfos(Selectable node,Map<String,String> crawlerDataMap){
        List<Selectable> listInforNodes = node.xpath(".//div[@class=\"list_infor\"]/dl[@class=\"clearfix\"]").nodes();
        for (Selectable listInforNode : listInforNodes) {
            String desc = listInforNode.xpath("./dt").get();
            switch (desc){
                case "爱车车型":
                    crawlerDataMap.put("model_url","https://newcar.xcar.com.cn"+listInforNode.xpath(".//a/@href").get().trim());
                    crawlerDataMap.put("model",listInforNode.xpath(".//a/text()").get().trim());
                    break;
                case "购入时间":
                    crawlerDataMap.put(Field_Praise_Time_Buy,listInforNode.xpath("./dd/text()").get().trim());
                    break;
                case "购车地点":
                    String vendor = listInforNode.xpath("./dd/text()").get().trim();
                    Matcher matcher = Pattern.compile("\\[(.*?)]").matcher(vendor);
                    if (matcher.find()){
                        crawlerDataMap.put(Field_Praise_Address_Buy,matcher.group(1));
                    }
                    crawlerDataMap.put(Field_Praise_Vendor,vendor);
                    break;
                case "裸车价格":
                    crawlerDataMap.put(Field_Praise_Price_Buy,listInforNode.xpath("./dd/text()").get().trim());
                    break;
                case "当前油耗":
                    crawlerDataMap.put(Field_Praise_Fuel_Economy,listInforNode.xpath("./dd/text()").get().trim());
                    break;
            }
        }
    }

    private void userScoreInfos(Selectable node,Map<String,String> crawlerDataMap){
        String userName = node.xpath(".//div[@class=\"name_lf\"]/dl[@class=\"clearfix\"]//span/text()").get();
        crawlerDataMap.put(Field_Author,userName);

        if (StringUtils.isNotBlank(node.xpath(".//div[@class=\"name_lf\"]//div[@class=\"score_tit\"]").get())){
            String scoreTit = node.xpath(".//div[@class=\"name_lf\"]//div[@class=\"score_tit\"]/text()").get()+
                    node.xpath(".//div[@class=\"name_lf\"]//div[@class=\"score_tit\"]/em/text()").get();
            StringBuffer scoreBuffer = new StringBuffer(scoreTit);
            List<Selectable> scoreNodeList = node.xpath(".//div[@class=\"name_lf\"]//ul[@class=\"score_list\"]/li").nodes();
            for (Selectable scoreNodeInfo : scoreNodeList) {
                scoreBuffer.append("\n")
                        .append(scoreNodeInfo.xpath("./span/text()").get())
                        .append(scoreNodeInfo.xpath("./em/text()").get());
            }
            crawlerDataMap.put(Field_Praise_values,scoreBuffer.toString());
        }
    }

    private void otherInfos(Selectable node,Map<String,String> crawlerDataMap){
        String contentId = node.xpath(".//div[@class=\"give_ment\"]/div[@class=\"zan_add1 praise_v1\"]/@data-id").get();
        contentId = contentId.replaceAll("#","");
        crawlerDataMap.put("contentId", contentId);

        List<String> targets = node.xpath(".//div[@class=\"purpose clearfix\"]/em/text()").all();
        crawlerDataMap.put(Field_Praise_Target, StringUtils.joinWith(",",targets.toArray()));

        List<String> images = node.xpath(".//ul[@class=\"photo_ul\"]//a/img/@src").all();
        crawlerDataMap.put(Field_Images,StringUtils.joinWith("\\x01",images.toArray()));

        List<String> content = node.xpath(".//div[@class=\"review_post\"]/dl/*/text()").all();
        crawlerDataMap.put(Field_Content,StringUtils.joinWith("",content.toArray()));

        String like = node.xpath(".//a[@class=\"zan_ment\"]/text()").get();
        crawlerDataMap.put(Field_I_Likes,like);

        String releaseTime = node.xpath(".//div[@class=\"publish\"]/text()").get().trim();
        String[] releaseTimes = releaseTime.split(" ");
        releaseTime = releaseTimes[releaseTimes.length-1];
        crawlerDataMap.put("releaseTime",releaseTime);

        List<String> spans = node.xpath(".//div[@class=\"publish\"]/span/@class").all();
        crawlerDataMap.put(Tag_Field_Is_Elite,"false");
        for (String span : spans) {
            if (span.startsWith("ssen_")){
                crawlerDataMap.put(Tag_Field_Is_Elite,"true");
            }
        }
    }

    private void initCompileScript() throws FileNotFoundException, ScriptException {
        if (compiledScriptMap == null){
            compiledScriptMap = new HashMap<>();
        }

        ScriptEngineManager sm = new ScriptEngineManager();
        NashornScriptEngineFactory factory = null;
        for (ScriptEngineFactory f : sm.getEngineFactories()) {
            if (f.getEngineName().equalsIgnoreCase("Oracle Nashorn")) {
                factory = (NashornScriptEngineFactory)f;
                break;
            }
        }
        String[] stringArray = ArrayUtils.toArray("-doe", "--global-per-engine");
        scriptEngine = factory.getScriptEngine(stringArray);
        List<String> ciphers = Arrays.asList("md5", "sha1", "sha256");
//        String filePathPrefix = "E:\\chance_work\\chance-crawler-development\\crawler-dev-bootstrap\\src\\main\\java\\com\\chance\\cc\\crawler\\development\\bootstrap\\xcar\\js\\";
        String filePathPrefix = "/data/chance_crawler_runner/domain/xcar/cipher_js/";

        for (String cipher : ciphers) {
            String filePath = filePathPrefix+cipher+".js";
            logger.info("xcar cipher js file path {}",filePath);
            final CompiledScript compiled = ((Compilable)scriptEngine).compile(new FileReader(filePath));
            compiledScriptMap.put(cipher,compiled);
        }
    }

    private void cookieUpdate(Downloader downloader, CrawlerRequestRecord crawlerRequestRecord){
        // 1. 生成 第一个 加密 cookie
        HttpPage httpPage = downloader.download(crawlerRequestRecord.getHttpRequest(), crawlerRequestRecord.getHttpConfig());
        String js = httpPage.getRawText().substring(httpPage.getRawText().indexOf("("), httpPage.getRawText().lastIndexOf(")")+1);
        Map<String,String> cookieMap = new HashMap<>();
        try {
            String eval = (String) scriptEngine.eval(js);
            eval =  eval.substring(eval.indexOf("=")+1, eval.indexOf(";"));
            cookieMap.put("__jsl_clearance_s",eval);
            logger.info("xcar cookie second update result __jsl_clearance_s={}",eval);
            crawlerRequestRecord.getHttpRequest().addHeader("cookie",processCookie(cookieMap));

            // 2. 生成 第二个 加密 cookie
            httpPage = downloader.download(crawlerRequestRecord.getHttpRequest(), crawlerRequestRecord.getHttpConfig());
            Matcher mtAuthor = Pattern.compile("};go\\((.*?)\\)</script>").matcher(httpPage.getRawText());
            if (mtAuthor.find()){
                String cookieGen = mtAuthor.group(1);
                Json cookieGenJson = new Json(cookieGen);
                String ha = cookieGenJson.jsonPath($_type + ".ha").get();
                logger.info("xcar cookie cipher code {}",ha);
                compiledScriptMap.get(ha).eval();
                Invocable invocable = (Invocable) compiledScriptMap.get(ha).getEngine();
                String result = (String) invocable.invokeFunction("go",JSON.parseObject(cookieGen, Map.class));
                result =  result.substring(result.indexOf("=")+1, result.indexOf(";"));
                cookieMap.put("__jsl_clearance_s",result);
                result = processCookie(cookieMap);
                updateLocalAuthInfo(result);
                logger.info("xcar cookie second update result {}",result);
            }

        } catch (Exception e) {
            logger.error("xcar cookie update error {}",e.getMessage());
        }
    }

    private String processCookie(Map<String,String> cookieMap){
        StringBuffer cookieSB = new StringBuffer();
        for (Map.Entry<String, String> entry : cookieMap.entrySet()) {
            cookieSB.append(entry.getKey())
                    .append("=")
                    .append(entry.getValue())
                    .append(";");
        }
        return cookieSB.substring(0,cookieSB.lastIndexOf(";"));
    }

    public long dateToTimestamp(String dataStr) throws ParseException {
        String regEx="[^0-9]+"; //正则表达式，用于匹配非数字串，+号用于匹配出多个非数字串
        Pattern pattern = Pattern.compile(regEx);

        if (dataStr.equals("刚刚")){
            return System.currentTimeMillis();
        } else if (Pattern.matches("\\d*秒前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*1000L);
        } else if (Pattern.matches("\\d*分钟前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*1000L);
        } else if (Pattern.matches("\\d*小时前",dataStr)){
            int number = Integer.valueOf(pattern.split(dataStr)[0]);
            return (System.currentTimeMillis() - number*60*60*1000L);
        } else if (Pattern.matches("\\d*小时\\d*分钟前",dataStr)){
            int hourNumber = Integer.valueOf(pattern.split(dataStr)[0]);
            int secondNumber = Integer.valueOf(pattern.split(dataStr)[1]);
            long totalMillis = hourNumber*60*60*1000L + secondNumber*60*1000L;
            return (System.currentTimeMillis() - totalMillis);
        } else if (dataStr.startsWith("今天")){
            String currentTime = DateFormatUtils.format(System.currentTimeMillis(),TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("今天", currentTime);
        } else if (dataStr.startsWith("昨天")){
            String yesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 60*60*24*1000L,TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("昨天", yesterdayTime);
        } else if (dataStr.startsWith("前天")){
            String beforeYesterdayTime = DateFormatUtils.format(System.currentTimeMillis() - 2*60*60*24*1000L,TimeForamtEnum.format1.getFormat());
            dataStr = dataStr.replace("前天", beforeYesterdayTime);
        } else if (Pattern.matches("\\d{2}月\\d{2}[日]*", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"年"+dataStr;
        } else if (Pattern.matches("\\d{2}-\\d{2}", dataStr)){
            Calendar cal = Calendar.getInstance();
            dataStr = cal.get(Calendar.YEAR)+"-"+dataStr;
        }else if(Pattern.matches("\\d{4}-\\d{2}",dataStr)){
            dataStr = dataStr + "-01";
        }
        return DateUtils.parseDateStrictly(dataStr,TimeForamtEnum.allFormats()).getTime();
    }

    public enum TimeForamtEnum {
        format1("yyyy年MM月dd日"),
        format1_1("yyyy年MM月dd日 HH:mm:ss"),
        format1_2("yyyy年MM月dd日 HH:mm"),
        format1_3("yyyy年MM月dd日HH:mm"),
        format1_4("yyyy年MM月dd日 HH点mm分"),
        format1_5("yyyy年MM月dd日 HH点mm分ss秒"),
        format1_6("yyyy年MM月dd日HH点mm分"),
        format1_7("yyyy年MM月dd日HH点mm分ss秒"),


        format4("yyyy-MM-dd HH:mm:ss"),
        format5("yyyy-MM-dd HH:mm"),
        format6("yyyy-MM-dd"),

        format7("yyyy/MM/dd HH:mm:ss"),
        format7_1("MM/dd/yyyy HH:mm:ss"),
        format8("yyyy/MM/dd HH:mm"),
        format9("yyyy/MM/dd"),

        format10("yyyy.MM.dd HH:mm:ss"),
        format11("yyyy.MM.dd HH:mm"),
        format12("yyyy.MM.dd"),

        format13("EEE MMM d HH:mm:ss +0800 yyyy");

        private String format;

        private TimeForamtEnum(String format) {
            this.format = format;
        }

        public static String[] allFormats() {
           TimeForamtEnum[] timeForamtEnums =TimeForamtEnum.values();
            String[] formats = new String[timeForamtEnums.length];
            for (int count = 0; count < timeForamtEnums.length; count++) {
                formats[count] = timeForamtEnums[count].format;
            }
            return formats;
        }

        public String getFormat() {
            return format;
        }
    }

    /**
     *
     * 重写 crawlerProcess 以支持 download 下载
     * @param context
     */

    @Override
    public void crawlerProcess(CrawlerRecordContext context) {
        //parse request list
        if (context.getCrawlerRecord().isNeedParsedPage()){
            parsePage(context);
        }

        //need wash request
        if(context.getCrawlerRecord().isNeedWashPage()){
            washResult(context);
        }
    }

    private void parsePage(CrawlerRecordContext context){
        //parse page links
        List<CrawlerRequestRecord> links = parseLinks(context);
        //internal download
        internalDownload(context,links);
        //after internal download
        if (context.hasInternalDownloadLinks()){
            afterInternalDownload(context.getCrawlerRecord(),context.getInternalDownloadLinks(),links);
        }
        //add to context parsed links
        if (links != null && links.size() >0){
            for (CrawlerRequestRecord requestRecord : links){
                context.addCrawlerRecord(requestRecord);
            }
        }else{
            logger.warn("record [{}] parsed links is null!",context.getCrawlerRecord());
        }
    }

    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        return null;
    }

    private void internalDownload(CrawlerRecordContext context,List<CrawlerRequestRecord> links){
        //extract internal download links
        List<CrawlerRequestRecord> internalDownloadLinks = new ArrayList<>();
        for (CrawlerRequestRecord requestRecord : links){
            if (requestRecord.tagsCreator().requestTags().hasRequestType(CrawlerEnum.CrawlerRequestType.internalDownload)){
                internalDownloadLinks.add(requestRecord);
            }
        }
        if (internalDownloadLinks.size()>0){
            links.removeAll(internalDownloadLinks);
        }
        //download links
        if (internalDownloadLinks == null || internalDownloadLinks.size()<=0){
            return;
        }
        for (CrawlerRequestRecord requestRecord : internalDownloadLinks){
            HttpPage page = context.getPageDownloader()
                    .download(requestRecord.getHttpRequest(),requestRecord.getHttpConfig());
            requestRecord.setInternalDownloadPage(page);
        }
        //set to context
        context.setInternalDownloadLinks(internalDownloadLinks);
    }

    private void washResult(CrawlerRecordContext context){
        CrawlerRequestRecord crawlerRecord = context.getCrawlerRecord();
        HttpPage page = context.getPage();

        List<CrawlerResultRecord> requestResults = new ArrayList<>();
        List<CrawlerData> crawlerDataList = washPage(crawlerRecord,page);
        if (crawlerDataList !=null && crawlerDataList.size()>0){
            for (CrawlerData crawlerData : crawlerDataList){
                //生成result record
                CrawlerResultRecord crawlerResultRecord = new CrawlerResultRecord();
                if (crawlerData.getFilter()!=null){
                    //filter info
                    crawlerResultRecord.setFilter(crawlerData.getFilter());
                    crawlerResultRecord.setFilterInfos(crawlerData.getFilterInfos());
                }
                crawlerResultRecord.setRecordKey(crawlerData.getDataId());
                crawlerResultRecord.setReleaseTime(crawlerData.getReleaseTime());
                //request tags
                crawlerResultRecord.setTags(crawlerData.getTags());
                //翻入过滤标签
                crawlerResultRecord.setFilterPipelineResult(crawlerData.isFilterPipelineResult());
                crawlerResultRecord.setJsonStr(JSON.toJSONString(crawlerData));
                requestResults.add(crawlerResultRecord);
            }
        }
        context.setRequestResults(requestResults);
    }
}
