package com.chance.cc.crawler.development.scripts.youku;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.tags.LabelTag;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.core.tags.crawler.CrawlerResultTags;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.http.Header;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.text.ParseException;
import java.time.LocalDate;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AutoVMCommonField.Field_Author_Follows;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2021/4/15 14:45
 * @Description 优酷视频
 **/
public class YouKuCrawlerScript extends CrawlerCommonScript {

    private static Logger log = LoggerFactory.getLogger(YouKuCrawlerScript.class);
    private static final String DOMAIN = "youku";
    private static final String SITE = "searchKw";
    private static final String REQUEST_AGAIN_TAG = DOMAIN + "_request_again";
    private static final String SEARCH_KW_SOURCE_URL = "searchKwSourceUrl";

    private static final String ENTRANCE_URL = "https://www.youku.com/";
    private static final String SEARCH_URL = "http://api.appsdk.soku.com/i/s\\S*";
    private static final String ITEM_SOURCE_URL = "https://v.youku.com/v_show/id_%s.html";
    private static final String ITEM_URL = "https://[a-z]*.youku.com/[a-z\\_]*/id_\\S*.html";
    private static final String SIGN_SOURCE_URL = "https://acs.youku.com/h5/mtop.youku.ycp.comment.mainpage.get/1.0/?t=%d&sign=31e7083b9ea7f18cfa60adc4d418f059&appKey=%s";
    private static final String SIGN_URL = "https://acs.youku.com/h5/mtop.youku.ycp.comment.mainpage.get/1.0/\\?t=\\d+&sign=31e7083b9ea7f18cfa60adc4d418f059&appKey=\\d+";
    private static final String COMMENT_URL = "https://acs.youku.com/h5/mtop.youku.ycp.comment.mainpage.[module.]*get/1.0/\\S*data\\S*";

    private static final long currentTimeMillis = System.currentTimeMillis();
    private static final String appKey = "24679788";

    /**
     * 脚本domain定义
     *
     * @return
     */
    @Override
    public String domain() {
        return DOMAIN;
    }

    /**
     * 进入脚本的正则列表
     */
    @Override
    public void initUrlRegulars() {
        addUrlRegular(ENTRANCE_URL);
        addUrlRegular(SEARCH_URL);
        addUrlRegular(ITEM_URL);
        addUrlRegular(SIGN_URL);
        addUrlRegular(COMMENT_URL);
    }

    /**
     * 是否执行脚本 ： 输入数据检查，合格的才进入脚本
     *
     * @param crawlerRequestRecord
     * @return
     */
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
//        String siteTag = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
//        return siteTag.equals(SITE);
        return true;
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> crawlerRecords = new ArrayList<>();
        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            String url = supportSourceRecord.getHttpRequest().getUrl();
            if (url.contains("keys")) {
                initKeyword(requestRecord, supportSourceRecord, crawlerRecords);
            }
        }

        requestRecord.getHttpRequest().addHeader("User-Agent",getRandomUA());

        return crawlerRecords;
    }

    /**
     * 解析链接方法
     *
     * @param crawlerRequestRecord
     * @param httpPage
     * @return
     */
    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<CrawlerRequestRecord>();
        String requestUrl = crawlerRequestRecord.getHttpRequest().getUrl();

        if (!httpPage.isDownloadSuccess() || httpPage.getStatusCode() != 200) {
            log.error("{} status code : [{}]",requestUrl,httpPage.getStatusCode());
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }

        if (requestUrl.matches(SEARCH_URL)) {
            searchUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }

        if (requestUrl.matches(ITEM_URL)) {
            itemUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }

        if (requestUrl.matches(SIGN_URL)) {
            getCommentRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }

        if (requestUrl.matches(COMMENT_URL)) {
            commentUrlRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }
        return parsedLinks;
    }


    private String getTurnUrl(String requestUrl, String page, String keyword) {
        String[] split = requestUrl.split("\\?");
        String nextUrl = split[0] + "?";
        String time = String.valueOf(System.currentTimeMillis());
        time = time.substring(0, time.length() - 3);
        List<NameValuePair> parse = URLEncodedUtils.parse(split[1], Charset.defaultCharset());
        for (NameValuePair nameValuePair : parse) {
            String name = nameValuePair.getName();
            String value = nameValuePair.getValue();
            if (StringUtils.isNotBlank(page) && name.equals(page)) {
                nextUrl = nextUrl + name + "=" + (Integer.parseInt(value) + 1) + "&";
            } else if (StringUtils.isNotBlank(keyword) && name.equals(keyword)) {
                try {
                    nextUrl = nextUrl + name + "=" + URLEncoder.encode(value, "UTF-8") + "&";
                } catch (UnsupportedEncodingException e) {
                    log.error(e.getMessage());
                }
            } else if ("_t_".equals(name)) {
                nextUrl = nextUrl + name + "=" + time + "&";
            } else if ("_s_".equals(name)) {
                nextUrl = nextUrl + name + "=" + getTurnSignValue(time) + "&";
            } else {
                nextUrl = nextUrl + name + "=" + value + "&";
            }
        }

        return nextUrl.substring(0, nextUrl.length() - 1);
    }

    private void searchUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String keyword = (String) httpPage.getRequest().getExtras().get("keyword");
        String status = httpPage.getJson().jsonPath($_type + ".status").get();
        if("failed".equals(status)){
            log.error("search url download is error!will retry");
            requestAgainCrawlerRecord(parsedLinks,crawlerRequestRecord);
            return;
        }
        String requestUrl = httpPage.getRequest().getUrl();
        List<String> results = httpPage.getJson().jsonPath($_type + ".results").all();

        //翻页
        String nextPage = getTurnUrl(requestUrl, "pg", "keyword");
        CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(nextPage)
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .copyResultTags()
                .build();
        turnRecord.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
        parsedLinks.add(turnRecord);


        for (String s : results) {
            JSONObject jsonObject = JSONObject.parseObject(s);
            Long releaseTimeToLong = jsonObject.getLong("create_time");
            String itemUrl = jsonObject.getString("videoid");
            if (releaseTimeToLong == null || StringUtils.isBlank(itemUrl)) {
                continue;
            }
            itemUrl = String.format(ITEM_SOURCE_URL, itemUrl);
            CrawlerRequestRecord itemRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRequestRecord)
                    .httpUrl(itemUrl)
                    .recordKey(itemUrl + keyword)
                    .releaseTime(releaseTimeToLong)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
            parsedLinks.add(itemRecord);

        }
    }

    private void itemUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String comments = washContent("\"totalComment\":\\d*", httpPage.getRawText());
        if (StringUtils.isBlank(comments)) {
            log.error("item page download is error!will retry!");
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            return;
        }
        //没有时间的不进行清洗
        String releaseTime = httpPage.getHtml().xpath("//div[@class=\"desc\"]/span/text()[last()]").get();
        if(StringUtils.isBlank(releaseTime)){
            log.error("itemUrl : [{}],not to wash!",crawlerRequestRecord.getHttpRequest().getUrl());
            crawlerRequestRecord.setNeedWashPage(false);
            return;
        }
        CrawlerResultTags resultTags = crawlerRequestRecord.tagsCreator().resultTags();
        if (resultTags.hasDataType(comment)) {
            resultTags.getCategoryTag().removeLabelTag("comment");

            comments = comments.split(":")[1];
            //评论数要大于0
            if (Integer.parseInt(comments.trim()) > 0) {
                getSignRecord(crawlerRequestRecord, httpPage, parsedLinks);
            }
        }
    }


    private void getSignRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String requestUrl = crawlerRequestRecord.getHttpRequest().getUrl();

        String url = String.format(SIGN_SOURCE_URL, currentTimeMillis, appKey);
        CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(url)
                .recordKey(crawlerRequestRecord.getRecordKey() + url)
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .notFilterRecord()
                .copyBizTags()
                .build();
        if (requestUrl.matches(ITEM_URL)) {
            CrawlerRequestRecord filterRecord = crawlerRequestRecord.tagsCreator().resultTags().getCategoryTag().getKVTagObjVal("comment_filter_record", CrawlerRequestRecord.class);
            if (filterRecord == null) {
                log.error("filter record can not null !");
                return;
            }
            record.setFilter(filterRecord.getFilter());
            record.setFilterInfos(filterRecord.getFilterInfos());
            record.tagsCreator().resultTags().addResultDataType(comment);
            record.getHttpRequest().addExtra("articleUrl", requestUrl);
            record.getHttpRequest().addExtra("pageNum", "1");
        } else if (requestUrl.matches(COMMENT_URL)) {
            record.tagsCreator().addCategoryTag(crawlerRequestRecord.tagsCreator().resultTags().getCategoryTag());
            record.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
        }
        parsedLinks.add(record);

    }


    private void getCommentRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        Header[] responseHeaders = httpPage.getResponseHeaders();
        Map<String, String> tkOrEncByHead = getTkOrEncByHead(responseHeaders);
        String tk = tkOrEncByHead.get("tk");
        String enc = tkOrEncByHead.get("enc");
        if (StringUtils.isBlank(tk) || StringUtils.isBlank(enc)) {
            log.error("sign url download is error!will retry!");
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return;
        }

        String articleUrl = (String) httpPage.getRequest().getExtras().get("articleUrl");
        String id = articleUrl.substring(articleUrl.lastIndexOf("_") + 1, articleUrl.lastIndexOf("."));
        String pageNum = (String) httpPage.getRequest().getExtras().get("pageNum");
        //拼接请求的地址
        String url = "https://acs.youku.com/h5/mtop.youku.ycp.comment.mainpage.get/1.0/?";
        String data = "{\"app\":\"100-DDwODVkv\",\"time\":1616488297160,\"objectCode\":\"" + id + "\",\"objectType\":1,\"sign\":\"a3eef9a440b84861ab73ea4ed0cfc7c7\"}";
        if (!"1".equals(pageNum)) {
            url = "https://acs.youku.com/h5/mtop.youku.ycp.comment.mainpage.module.get/1.0/?";
            String lastId = (String) crawlerRequestRecord.getHttpRequest().getExtras().get("lastId");
            data = "{\"app\":\"100-DDwODVkv\",\"time\":1616488297160,\"objectCode\":\"" + id + "\",\"objectType\":1,\"dataSource\":\"ALL_COMMENT_DATASOURCE\",\"page\":" + pageNum + ",\"limit\":10,\"lastId\":" + lastId + ",\"sign\":\"a3eef9a440b84861ab73ea4ed0cfc7c7\"}";
        }
        String signValue = getSignValue(tk, data);
        if (StringUtils.isBlank(signValue)) {
            log.error("sign value get error!");
            return;
        }
        Map<String, Object> urlParams = new HashMap<>();
        urlParams.put("appKey", appKey);
        urlParams.put("t", currentTimeMillis);
        urlParams.put("sign", signValue);
        String dataParam = null;
        try {
            dataParam = URLEncoder.encode(data, "UTF-8");
        } catch (UnsupportedEncodingException e) {
            log.error(e.getMessage());
        }
        urlParams.put("data", dataParam);
        String commentUrl = mapToString(urlParams, url, "=", "&");

        //拼接cookie值
        Map<String, Object> cookieMap = new HashMap<>();
        cookieMap.put("cna", "tP/hF4OrRWkCAXL6lgpWdwlE");
        cookieMap.put("__ysuid", "1604481425963Xxi");
        cookieMap.put("UM_distinctid", "t175da5f09bd288-042a3d7bd8b0fa-c781f38-e1000-175da5f09be6ad");
        cookieMap.put("__aysid", "16056864632110aI");
        cookieMap.put("xlly_s", "1");
        cookieMap.put("modalFrequency", "{\"UUID\":\"9\"}");
        cookieMap.put("youku_history_word", "%5B%22%25E7%258E%258B%25E4%25B8%25AD%25E7%258E%258B%25E7%2594%25B5%25E8%25A7%2586%25E5%2589%25A7%22%5D");
        cookieMap.put("__ayft", "1605749794413");
        cookieMap.put("__ayscnt", "1");
        cookieMap.put("_m_h5_tk", tk);
        cookieMap.put("_m_h5_tk_enc", enc);
        cookieMap.put("P_ck_ctl", "F8C2FF80C20A5FC424C9692D9FD65534");
        cookieMap.put("__arpvid", "1605767282009xWQVly-1605767282070");
        cookieMap.put("__aypstp", "42");
        cookieMap.put("__ayspstp", "44");
        cookieMap.put("tfstk", "ctFcB_0dT-kjZMMudsGbJLiNTyjda3Iq05PYa7RKLBS_zsyEuscWUpSR6amzm5p1.");
        cookieMap.put("l", "eBx32D4uOGT5oj1WBO5wnurza77OaQAfCsPzaNbMiIncC6Zd1l9OJLKQKh6HgptRR8XVi9LM4h8SpzeTpe48-y8b-wBsSNk_-hGeCeTC.");
        cookieMap.put("isg", "BNXVBH2aJqLRDQLcE-hQbO965NGP0onkxJmWW1d4zM2srvCgGSO6tfEoeLIYrqGc");
        String cookie = mapToString(cookieMap, "", "=", ";").trim();

        CrawlerRequestRecord commentRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(commentUrl)
                .releaseTime(crawlerRequestRecord.getReleaseTime())
                .httpHead("Cookie", cookie)
                .notFilterRecord()
                .copyBizTags()
                .needWashed(true)
                .copyResultTags()
                .build();
        commentRecord.getHttpRequest().setExtras(crawlerRequestRecord.getHttpRequest().getExtras());
        parsedLinks.add(commentRecord);
    }

    private void commentUrlRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        try {
            String requestUrl = crawlerRequestRecord.getHttpRequest().getUrl();
            Json json = null;
            if (!requestUrl.contains("module")) {
                List<String> modules = httpPage.getJson().jsonPath($_type + ".data.data.modules").all();
                json = new Json(modules.get(modules.size() - 1));
            } else {
                json = new Json(httpPage.getJson().jsonPath($_type + ".data.data").get());
            }
            httpPage.setJson(json);
            List<String> nodes = json.jsonPath($_type + ".nodes").all();
            int pageNum = Integer.parseInt((String) crawlerRequestRecord.getHttpRequest().getExtras().get("pageNum"));

            //判断是否要进行翻页
            int currentTotal = pageNum * 10;
            String totalCount = json.jsonPath($_type + ".totalCount").get();
            if (currentTotal >= Integer.parseInt(totalCount)) {
                return;
            }
            CrawlerEnum.CrawlerRecordFilter filter = crawlerRequestRecord.getFilter();
            String releaseTime = new Json(nodes.get(nodes.size() - 1)).jsonPath($_type + ".content.gmtCreate").get();
            if (filter == CrawlerEnum.CrawlerRecordFilter.keyOrDateRange || filter == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                List<FilterInfo> filterInfos = crawlerRequestRecord.getFilterInfos();
                Long startTime = null;
                Long endTime = null;
                for (FilterInfo filterInfo : filterInfos) {
                    if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange) {
                        long[] dateAllowRange = filterInfo.getDateAllowRange();
                        int hourFromNow = filterInfo.getHourFromNow();
                        if (dateAllowRange != null) {
                            startTime = dateAllowRange[0];
                            endTime = dateAllowRange[1];
                        } else if (hourFromNow != 0) {
                            endTime = System.currentTimeMillis();
                            startTime = endTime - DateUtils.MILLIS_PER_HOUR * hourFromNow;
                        }
                    }
                }
                if (startTime == null || StringUtils.isBlank(releaseTime)) {
                    return;
                }
                long releaseTimeToLong = Long.parseLong(releaseTime);
                if (releaseTimeToLong < startTime || releaseTimeToLong > endTime) {
                    return;
                }
            }

            String lastId = new Json(nodes.get(nodes.size() - 1)).jsonPath($_type + ".commentId").get();
            crawlerRequestRecord.getHttpRequest().addExtra("pageNum", String.valueOf(pageNum + 1));
            crawlerRequestRecord.getHttpRequest().addExtra("lastId", lastId);
        } catch (Exception e) {
            log.error("comment url download is error!");
            crawlerRequestRecord.setNeedWashPage(false);
            getSignRecord(crawlerRequestRecord, httpPage, parsedLinks);
            return;
        }

        getSignRecord(crawlerRequestRecord, httpPage, parsedLinks);
    }

    private String getSignValue(String tk, String data) {
        String token = tk.split("_")[0];
        String sign = StringUtils.joinWith("&", token, currentTimeMillis, appKey, data);
        String signMd5 = null;
        try {
            signMd5 = md5(sign);
        } catch (Exception e) {
            log.error(e.getMessage());
        }
        return signMd5;
    }

    private Map<String, String> getTkOrEncByHead(Header[] headArray) {
        Map<String, String> headsResult = new HashMap<>();
        String tk = "";
        String enc = "";
        for (Header responseHeader : headArray) {
            String name = responseHeader.getName();
            String value = responseHeader.getValue();
            if ("Set-Cookie".equals(name)) {
                String[] split = value.split(";")[0].split("=");
                if ("_m_h5_tk".equals(split[0])) {
                    tk = split[1];
                }
                if ("_m_h5_tk_enc".equals(split[0])) {
                    enc = split[1];
                }
            }
        }
        headsResult.put("tk", tk);
        headsResult.put("enc", enc);
        return headsResult;
    }


    /**
     * 将map转换为String
     *
     * @param map      参数
     * @param resource 原始的数据
     * @param link     key与value 的连接符
     * @param sep      每一个的拼接符
     * @return 最后拼接好的结果
     */
    private String mapToString(Map<String, Object> map, String resource, String link, String sep) {
        Set<String> set = map.keySet();
        String result = resource;
        for (String key : set) {
            result = result + key + link + map.get(key) + sep;
        }
        return result.substring(0, result.length() - 1);
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        CrawlerResultTags crawlerResultTags = crawlerRecord.tagsCreator().resultTags();

        if (crawlerResultTags.hasDataType(article)) {
            crawlerDataList.add(washArticle(crawlerRecord, page));
        }

        if (crawlerResultTags.hasDataType(interaction)) {
            crawlerDataList.add(washInteraction(crawlerRecord, page));
        }

        if (crawlerResultTags.hasDataType(comment)) {
            crawlerDataList.addAll(washComment(crawlerRecord, page));
        }

        return crawlerDataList;
    }

    private CrawlerData washArticle(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        String itemUrl = httpPage.getRequest().getUrl();
        String articleKey = itemUrl.substring(itemUrl.lastIndexOf("_") + 1, itemUrl.lastIndexOf("."));

        String title = httpPage.getHtml().xpath("//span[@class=\"subtitle\"]/text()|//span[@title]/text()").get();
        String author = httpPage.getHtml().xpath("//span[@class=\"title ellipsis-style\"]/text()").get();
        String authorId = httpPage.getHtml().xpath("//a[@class=\"title-wrap\"]/@href").get();
        authorId =  StringUtils.isNotBlank(authorId) && authorId.contains("uid=") ? authorId.substring(authorId.lastIndexOf("uid") + 4) : "";
        String follows = httpPage.getHtml().xpath("//span[@class=\"subtitle ellipsis-style\"]/text()").get();
        follows = StringUtils.isNotBlank(follows) ? follows.split("人")[0] : "0";
        String releaseTime = httpPage.getHtml().xpath("//div[@class=\"desc\"]/span/text()[last()]").get();

        CrawlerData crawlerData = null;
        try {
            long releaseTimeToLong = washTime(releaseTime);
            crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("article"))
                    .url(itemUrl)
                    .releaseTime(releaseTimeToLong)
                    .addContentKV(Field_Title, title)
                    .addContentKV(Field_Author, author)
                    .addContentKV(Field_Author_Id, authorId)
                    .addContentKV(Field_Author_Follows, washNum(follows))
                    .build();
        } catch (ParseException e) {
            log.error(e.getMessage());
        }
        return crawlerData;
    }

    private CrawlerData washInteraction(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        String itemUrl = httpPage.getRequest().getUrl();
        String articleKey = itemUrl.substring(itemUrl.lastIndexOf("_") + 1, itemUrl.lastIndexOf("."));

        String releaseTime = httpPage.getHtml().xpath("//div[@class=\"desc\"]/span/text()[last()]").get();
        String comments = washContent("\"totalComment\":\\d*", httpPage.getRawText());
        comments = StringUtils.isNotBlank(comments) ? comments.split(":")[1] : "0";

        CrawlerData crawlerData = null;
        try {
            long releaseTimeToLong = washTime(releaseTime);
            crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, interaction.enumVal(), articleKey))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("interaction"))
                    .url(itemUrl)
                    .releaseTime(releaseTimeToLong)
                    .addContentKV(Field_I_Comments, comments)
                    .build();
        } catch (ParseException e) {
            log.error(e.getMessage());
        }
        return crawlerData;
    }

    public List<CrawlerData> washComment(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        String requestUrl = httpPage.getRequest().getUrl();
        String articleUrl = (String) httpPage.getRequest().getExtras().get("articleUrl");
        String articleKey = articleUrl.substring(articleUrl.lastIndexOf("_") + 1, articleUrl.lastIndexOf("."));

        List<String> commentList = httpPage.getJson().jsonPath($_type + ".nodes").all();
        for (String s : commentList) {
            JSONObject jsonObject = JSONObject.parseObject(s);
            String commentId = jsonObject.getString("commentId");
            JSONObject user = jsonObject.getJSONObject("publisher");
            String author = "";
            String authorId = "";
            if (user != null) {
                author = user.getString("nickName");
                authorId = user.getString("userId");
            }
            JSONObject content = jsonObject.getJSONObject("content");
            Long releseTimeToLong = content.getLong("gmtCreate");
            String contentB = content.getString("text");
            List<List> imgs = content.getJSONArray("imgs").toJavaList(List.class);

            CrawlerData crawlerData = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("comment"))
                    .releaseTime(releseTimeToLong)
                    .url(requestUrl)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .addContentKV(Field_Author, author)
                    .addContentKV(Field_Author_Id, authorId)
                    .addContentKV(Field_Content, contentB)
                    .addContentKV(Field_Images, imgs.toString())
                    .build();
            crawlerDataList.add(crawlerData);


            JSONObject interact = jsonObject.getJSONObject("interact");
            String likes = interact.getString("likeCount");
            String comments = interact.getString("replyCount");
            CrawlerData crawlerDataInteraction = CrawlerData.builder()
                    .data(crawlerRequestRecord, httpPage)
                    .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), commentId))
                    .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), interaction.enumVal(), commentId))
                    .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("interaction"))
                    .releaseTime(releseTimeToLong)
                    .url(requestUrl)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                    .addContentKV(Field_I_Likes, likes)
                    .addContentKV(Field_I_Comments, comments)
                    .build();
            crawlerDataList.add(crawlerDataInteraction);

            JSONArray replies = jsonObject.getJSONArray("replies");
            for (Object reply : replies) {
                JSONObject replyJson = (JSONObject) reply;
                String replyId = replyJson.getString("replyId");
                String userNickName = replyJson.getString("userNickName");
                String userId = replyJson.getString("userId");
                JSONObject replyContent = replyJson.getJSONObject("content");
                String replyContentString = replyContent.getString("text");
                Long gmtCreate = replyContent.getLong("gmtCreate");
                List<String> imgs1 = replyContent.getJSONArray("imgs").toJavaList(String.class);

                CrawlerData crawlerDataReply = CrawlerData.builder()
                        .data(crawlerRequestRecord, httpPage)
                        .parentId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, CrawlerEnum.CrawlerDataType.article.enumVal(), articleKey))
                        .dataId(StringUtils.joinWith("-", crawlerRequestRecord.getDomain(), site, comment.enumVal(), comment.enumVal(), replyId))
                        .resultLabelTag(CrawlerEnum.CrawlerDataType.valueOf("comment"))
                        .releaseTime(gmtCreate)
                        .url(requestUrl)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                        .addContentKV(Field_Author, userNickName)
                        .addContentKV(Field_Author_Id, userId)
                        .addContentKV(Field_Content, replyContentString)
                        .addContentKV(Field_Images, imgs1.toString())
                        .build();
                crawlerDataList.add(crawlerDataReply);
            }
        }
        return crawlerDataList;
    }

    private static String washContent(String regx, String input) {
        Pattern compile = Pattern.compile(regx);
        Matcher matcher = compile.matcher(input);
        while (matcher.find()) {
            return matcher.group(0);
        }
        return null;
    }

    private long washTime(String time) throws ParseException {
        if (StringUtils.isBlank(time)) {
            return 0;
        }

        long currentTimeMillis = System.currentTimeMillis();
        if ("刚刚".equals(time) || "今天".equals(time)) {
            return currentTimeMillis;
        }

        if (time.contains("秒前")) {
            String[] split = time.split("秒");
            return currentTimeMillis - (DateUtils.MILLIS_PER_SECOND * (Integer.parseInt(split[0])));
        }

        if (time.contains("分钟前")) {
            String[] split = time.split("分钟");
            return currentTimeMillis - (DateUtils.MILLIS_PER_MINUTE * (Integer.parseInt(split[0])));
        }

        if (time.contains("小时前")) {
            String[] split = time.split("小时");
            return currentTimeMillis - (DateUtils.MILLIS_PER_HOUR * (Integer.parseInt(split[0])));
        }

        if ("昨天".equals(time)) {
            return currentTimeMillis - DateUtils.MILLIS_PER_DAY;
        }

        if ("前天".equals(time)) {
            return currentTimeMillis - (DateUtils.MILLIS_PER_DAY * 2);
        }

        if (time.contains("天前")) {
            String[] split = time.split("天");
            return currentTimeMillis - (DateUtils.MILLIS_PER_DAY * (Integer.parseInt(split[0])));
        }

        if (time.contains("周前")) {
            String[] split = time.split("周");
            return currentTimeMillis - (DateUtils.MILLIS_PER_DAY * 7 * (Integer.parseInt(split[0])));
        }

        if (time.contains("月前")) {
            String[] split = time.split("月");
            return currentTimeMillis - (DateUtils.MILLIS_PER_DAY * 30 * (Integer.parseInt(split[0])));
        }

        if (time.contains("年前")) {
            String[] split = time.split("年");
            return currentTimeMillis - (DateUtils.MILLIS_PER_DAY * 365 * (Integer.parseInt(split[0])));
        }

        LocalDate now = LocalDate.now();
        if (time.matches("\\d{2}-\\d{2} \\d{2}:\\d{2}")) {
            time = now.getYear() + time;
        }
        return DateUtils.parseDate(time.trim(), "yyyyMM-dd HH:mm", "yyyy-MM-dd HH:mm", "yyyy-MM-dd").getTime();
    }

    //32位小写
    private static String md5(String str) throws Exception {
        MessageDigest md5 = MessageDigest.getInstance("MD5");
        md5.update((str).getBytes("UTF-8"));
        byte[] b = md5.digest();

        int i;
        StringBuffer buf = new StringBuffer("");

        for (int offset = 0; offset < b.length; offset++) {
            i = b[offset];
            if (i < 0) {
                i += 256;
            }
            if (i < 16) {
                buf.append("0");
            }
            buf.append(Integer.toHexString(i));
        }
        return buf.toString();
    }

    private static String washNum(String num){
        if(StringUtils.isBlank(num)){
            return "0";
        }

        if(num.contains("万")){
            String[] split = num.split("万");
            String numDoubule = String.valueOf(10000 * Double.parseDouble(split[0]));
            return numDoubule.split("\\.")[0];
        }

        return num;
    }

    @Override
    public void afterExecute(CrawlerRecordContext crawlerRecordContext) {

    }

    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {
        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(REQUEST_AGAIN_TAG)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(REQUEST_AGAIN_TAG).getVal();
            if (count >= 5) {
                log.error(DOMAIN + " download page the number of retries exceeds the limit" +
                        ",request url {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        LabelTag type = crawlerRecord.tagsCreator().requestTags().getCategoryTag().getLabelTags().get("turn_page_item_request");
        CrawlerRequestRecord crawlerRequestRecord = null;
        if(requestUrl.matches(SEARCH_URL)){
            requestUrl = getTurnUrl(requestUrl, "", "keyword");
        }
        if (type == null) {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .turnPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .notFilterRecord()
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        } else {
            crawlerRequestRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(requestUrl)
                    .releaseTime(crawlerRecord.getReleaseTime())
                    .recordKey(crawlerRecord.getRecordKey() + count)
                    .copyBizTags()
                    .copyResultTags()
                    .build();
        }

        if (crawlerRequestRecord == null) {
            return;
        }

        crawlerRequestRecord.getHttpRequest().setExtras(crawlerRecord.getHttpRequest().getExtras());
        crawlerRequestRecord.getHttpRequest().setHeaders(crawlerRecord.getHttpRequest().getHeaders());
        crawlerRequestRecord.setNeedWashPage(crawlerRecord.isNeedWashPage());
        crawlerRequestRecord.setNeedParsedPage(crawlerRecord.isNeedParsedPage());
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(REQUEST_AGAIN_TAG, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }

    private void initKeyword(CrawlerRequestRecord requestRecord, CrawlerRequestRecord supportSourceRecord, List<CrawlerRecord> crawlerRecords) {
        HttpPage internalDownloadPage = supportSourceRecord.getInternalDownloadPage();
        Map<String, Object> extras = requestRecord.getHttpRequest().getExtras();
        String searchSourceUrl = (String) extras.get(SEARCH_KW_SOURCE_URL);
        if (StringUtils.isBlank(searchSourceUrl)) {
            log.error("search kw source url can not null!");
            return;
        }
        Json json = internalDownloadPage.getJson();
        String msg = json.jsonPath($_type + ".msg").get();
        if (!"success".equals(msg)) {
            log.error("keyword page [{}] download error!", internalDownloadPage.getRequest().getUrl());
            return;
        }

        List<String> all = json.jsonPath($_type + ".content").all();
        for (String data : all) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            String keyword = jsonObject.getString("keyword");

            String time = String.valueOf(System.currentTimeMillis());
            time = time.substring(0, time.length() - 3);
            String turnSignValue = getTurnSignValue(time);
            try {
                String url = String.format(searchSourceUrl, time, turnSignValue, URLEncoder.encode(keyword, "UTF-8"));
                CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(requestRecord)
                        .httpUrl(url)
                        .releaseTime(System.currentTimeMillis())
                        .notFilterRecord()
                        .copyBizTags()
                        .copyResultTags()
                        .build();
                turnRecord.tagsCreator().bizTags().addKeywords(keyword);
                turnRecord.getHttpRequest().addExtra("keyword", keyword);
                crawlerRecords.add(turnRecord);
            } catch (UnsupportedEncodingException e) {
                log.error(e.getMessage());
            }
        }
    }

    private String getTurnSignValue(String times) {
        String srcs = "GET:/i/s:" + times + ":631l1i1x3fv5vs2dxlj5v8x81jqfs2om";
        try {
            return md5(srcs);
        } catch (Exception e) {
            log.error(e.getMessage());
        }
        return null;
    }

    private static List<String> agentList = new ArrayList<>();

    static {
        agentList.add("Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 7.0; InfoPath.3; .NET CLR 3.1.40767; Trident/6.0; en-IN)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)");
        agentList.add("Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)");
        agentList.add("Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2");
        agentList.add("Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.28.3 (KHTML, like Gecko) Version/3.2.3 ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/525.28.3");
        agentList.add("Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16");
        agentList.add("Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14");
        agentList.add("Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14");
        agentList.add("Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0) Opera 12.14");
        agentList.add("Opera/12.80 (Windows NT 5.1; U; en) Presto/2.10.289 Version/12.02");
        agentList.add("Opera/9.80 (Windows NT 6.1; U; es-ES) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/9.80 (Windows NT 5.1; U; zh-sg) Presto/2.9.181 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.2;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Opera/12.0(Windows NT 5.1;U;en)Presto/22.9.168 Version/12.00");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
        agentList.add("Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0");
        agentList.add("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0");
        agentList.add("Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.13 Safari/537.36");
        agentList.add("Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4043.400");
    }

    private static String getRandomUA(){
        return agentList.get(RandomUtils.nextInt(0,agentList.size() - 1));
    }

    public static void main(String[] args) {
        String s = "1.4万";
        String s1 = washNum(s);
        System.out.println(s1);
    }
}
