package com.chance.cc.crawler.development.scripts.weibo;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.HttpConstant;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.downloader.HttpRequest;
import com.chance.cc.crawler.core.downloader.HttpRequestBody;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.core.tags.crawler.CrawlerBusinessTags;
import com.chance.cc.crawler.core.tags.crawler.CrawlerResultTags;
import net.lingala.zip4j.core.ZipFile;
import net.lingala.zip4j.exception.ZipException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.text.ParseException;
import java.util.*;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;

/**
 * @Author Zhao.Hhuan
 * @Date Create in 2021/1/25 13:57
 * @Description 微博 api 临时采集
 **/
public class WeiboApiSimpleCrawlerScript extends CrawlerCommonScript {

    private static Logger log = LoggerFactory.getLogger(WeiboApiSimpleCrawlerScript.class);

    private static final String requestAgainTag = "weibo_download_retry_count";
    private static final String sourceUrl = "https://c.api.weibo.com/";
    private static final String weiboApiUsersInfoUrl = "https://c.api.weibo.com/2/users/show_batch/other.json\\S*";

    private static final String searchContainsKwPostListUrl = "https://c.api.weibo.com/2/search/statuses/limited.json\\S*";

    private static final String usersTagsUrl = "https://c.api.weibo.com/2/tags/tags_batch/other.json\\S*";

    private static final String postInteractionUrl = "https://c.api.weibo.com/2/statuses/count/biz.json\\S*";

    private static final String postForwardsListUrl = "https://c.api.weibo.com/2/statuses/repost_timeline/all.json\\S*";

    private static final String userPostListUrl = "https://c.api.weibo.com/2/statuses/user_timeline_batch.json\\S*";

    private static final String postLikesListUrl = "https://c.api.weibo.com/2/attitudes/show/biz.json\\S*";

    private static final String historicalCreateUrl = "https://c.api.weibo.com/2/search/statuses/historical/create.json\\S*";

    private static final String historicalCheckSourceUrl = "https://c.api.weibo.com/2/search/statuses/historical/check.json";
    private static final String historicalCheckUrl = "https://c.api.weibo.com/2/search/statuses/historical/check.json\\S*";

    private static final String historicalDownloadSourceUrl = "https://c.api.weibo.com/2/search/statuses/historical/download.json";
    private static final String historicalDownloadUrl = "https://c.api.weibo.com/2/search/statuses/historical/download.json\\S*";

    /**
     * 脚本domain定义
     *
     * @return
     */
    @Override
    public String domain() {
        return "weibo";
    }

    /**
     * 进入脚本的正则列表
     */
    @Override
    public void initUrlRegulars() {
        addUrlRegular("https://c.api.weibo.com/\\S*");
    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        List<CrawlerRecord> crawlerRecords = new ArrayList<>();

        String requestUrl = requestRecord.getHttpRequest().getUrl();
        Map<String, Object> extras = requestRecord.getHttpRequest().getExtras();
        if (!sourceUrl.equals(requestUrl)|| !extras.containsKey("filePath")) {
            return crawlerRecords;
        }

        String filePath = (String) extras.get("filePath");
        String site = (String) extras.get("site");
        File readFile = new File(filePath);
        try {
            BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(readFile), "UTF-8"));
            String readLine = "";
            while ((readLine = bufferedReader.readLine()) != null) {
                JSONObject jsonObject = JSONObject.parseObject(readLine);
                String secretKey = "";
                String taskId = "";
                String url = jsonObject.getString("requestUrl");
                if (StringUtils.isEmpty(url)) {
                    jsonObject = jsonObject.getJSONObject("crawlerContent");
                    secretKey = jsonObject.getString("secret_key");
                    taskId = jsonObject.getString("task_id");
                    url = jsonObject.getString("requestUrl");
                }
                String method = jsonObject.getString("method");
                List<Map> paramList = JSON.parseArray(jsonObject.getString("paramList"), Map.class);
                for (Map map : paramList) {
                    CrawlerRequestRecord record = CrawlerRequestRecord.builder()
                            .itemPageRequest(requestRecord)
                            .releaseTime(System.currentTimeMillis())
                            .notFilterRecord()
                            .resultLabelTag(article)
                            .build();
                    HttpRequest httpRequest = new HttpRequest();
                    Set<String> set = map.keySet();
                    String recordKey = url + "?";
                    for (String key : set) {
                        recordKey = recordKey + key + "=" + map.get(key) + "&";
                    }
                    recordKey = recordKey.substring(0, recordKey.length() - 1);
                    if (HttpConstant.Method.GET.equals(method.toUpperCase())) {
                        httpRequest.setUrl(recordKey);
                    } else if (HttpConstant.Method.POST.equals(method.toUpperCase())) {
                        httpRequest.setUrl(url);
                        httpRequest.setMethod(method);
                        httpRequest.setRequestBody(HttpRequestBody.json(JSON.toJSONString(map),"UTF-8"));
                    }

                    if (recordKey.matches(historicalCreateUrl) && !recordKey.contains("source")) {
                        log.error("创建检索历史任务必须需要参数source！");
                        return crawlerRecords;
                    }

                    if(recordKey.matches(historicalCheckUrl) || recordKey.matches(historicalDownloadUrl)){
                        if(StringUtils.isEmpty(secretKey) || StringUtils.isEmpty(taskId)){
                            log.error("historicalCheck`s  secret_key and task_id can not null!");
                            return crawlerRecords;
                        }else{
                            httpRequest.addExtra("secret_key",secretKey);
                            httpRequest.addExtra("task_id",taskId);
                            httpRequest.addExtra("filePath",filePath.substring(0,filePath.lastIndexOf("\\") + 1));

                        }
                    }

                    if(recordKey.matches(historicalCheckUrl)){
                        record.setFilter(CrawlerEnum.CrawlerRecordFilter.key);
                        record.addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-","filter","weibo","api","history","check")));
                    }

                    if(recordKey.matches(historicalDownloadUrl)){
                        record.getHttpConfig().setResponseTextGenerateHtml(false);
                    }
                    record.setRecordKey(recordKey);
                    record.setHttpRequest(httpRequest);
                    record.tagsCreator().bizTags().addSite(site);

                    crawlerRecords.add(record);
                }
            }
        } catch (Exception e) {
            log.error(e.getMessage());
        }
        return crawlerRecords;
    }

    /**
     * 是否执行脚本 ： 输入数据检查，合格的才进入脚本
     * @param crawlerRequestRecord
     * @return
     */
    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRequestRecord) {
        String site = crawlerRequestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("site");
        return site.startsWith("apiSimple");
    }

    /**
     * 解析链接方法
     *
     * @param crawlerRequestRecord
     * @param httpPage
     * @return
     */
    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage) {
        List<CrawlerRequestRecord> parsedLinks = new ArrayList<>();
        if (!httpPage.isDownloadSuccess() || httpPage.getStatusCode() == 503) {
            log.error("weibo api url down load page is fail!will retry");
            requestAgainCrawlerRecord(parsedLinks, crawlerRequestRecord);
            crawlerRequestRecord.setNeedWashPage(false);
            return parsedLinks;
        }

        //判断返回的页面是否成功
        String requestUrl = crawlerRequestRecord.getHttpRequest().getUrl();
        if(!requestUrl.matches(historicalDownloadUrl) && !httpPage.getRawText().startsWith("[")){
            JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
            String error_code = jsonObject.getString("error_code");
            if (StringUtils.isNotEmpty(error_code)) {
                log.error("requestUrl [{}] is error :[{}]!", requestUrl, jsonObject.getString("error"));
                return parsedLinks;
            }
        }

        if (requestUrl.contains("page=")) {
            apiTurnPageRecord(crawlerRequestRecord, httpPage, parsedLinks);
        }else if(requestUrl.matches(historicalDownloadUrl)){
            //将下载的压缩包以字节流的方式写入本地文件中
            Map<String, Object> extras = httpPage.getRequest().getExtras();
            String task_id = (String)extras.get("task_id");
            String secret_key = (String) extras.get("secret_key");
            String filePath = (String)extras.get("filePath");
            String zipFilePath = filePath + task_id + secret_key;
            try {
                IOUtils.write(httpPage.getBytes(),new FileOutputStream(zipFilePath + ".zip"));
            } catch (IOException e) {
                log.error(e.getMessage());
                crawlerRequestRecord.setNeedWashPage(false);
            }

            //解压
            try {
                ZipFile zipFile = new ZipFile(zipFilePath + ".zip");
                zipFile.setPassword(task_id + secret_key);
                zipFile.extractAll(zipFilePath);
            } catch (ZipException e) {
                log.error(e.getMessage());
                crawlerRequestRecord.setNeedWashPage(false);
            }

            //读取解压后的文件
            try {
                List<String> strings = IOUtils.readLines(new InputStreamReader(new FileInputStream(new File(zipFilePath + "\\" + task_id + ".log"))));
                httpPage.setJson(new Json(JSONObject.toJSONString(strings)));
            } catch (Exception e) {
                log.error(e.getMessage());
                crawlerRequestRecord.setNeedWashPage(false);
            }
        }
        return parsedLinks;
    }

    //api接口的翻页
    private void apiTurnPageRecord(CrawlerRequestRecord crawlerRequestRecord, HttpPage httpPage, List<CrawlerRequestRecord> parsedLinks) {
        String requestUrl = httpPage.getRequest().getUrl();
        String[] split = requestUrl.split("\\?");
        String nextPage = split[0] + "?";
        int currentCount = 0;
        int page = 0;
        int count = 0;
        //判断是否进行翻页
        List<NameValuePair> parse = URLEncodedUtils.parse(split[1], Charset.defaultCharset());
        for (NameValuePair nameValuePair : parse) {
            if ("page".equals(nameValuePair.getName())) {
                page = Integer.parseInt(nameValuePair.getValue());
                nextPage = nextPage + nameValuePair.getName() + "=" + (page + 1) + "&";
            } else if ("count".equals(nameValuePair.getName())) {
                count = Integer.parseInt(nameValuePair.getValue());
                nextPage = nextPage + nameValuePair.getName() + "=" + count + "&";
            }else if("q".equals(nameValuePair.getName())){
                try {
                    nextPage = nextPage + nameValuePair.getName() + "=" + URLEncoder.encode(nameValuePair.getValue(),"UTF-8") + "&";
                } catch (UnsupportedEncodingException e) {
                    log.error(e.getMessage());
                }
            } else {
                nextPage = nextPage + nameValuePair.getName() + "=" + nameValuePair.getValue() + "&";
            }
        }

        currentCount = page * count;
        String totalNumber = "";
        try{
            totalNumber = JSONObject.parseObject(httpPage.getRawText()).getString("total_number");
        }catch (Exception e){
            log.error("params error!");
            return;
        }

        if (StringUtils.isEmpty(totalNumber) || currentCount >= Integer.parseInt(totalNumber)) {
            return;
        }
        CrawlerRequestRecord turnRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRequestRecord)
                .httpUrl(nextPage.substring(0,nextPage.length() - 1))
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .needWashed(true)
                .resultLabelTag(article)
                .build();
        turnRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove(requestAgainTag);
        parsedLinks.add(turnRecord);

    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        CrawlerResultTags crawlerResultTags = crawlerRecord.tagsCreator().resultTags();

        String requestUrl = crawlerRecord.getHttpRequest().getUrl();
        if (crawlerResultTags.hasDataType(article)) {
            if (requestUrl.matches(weiboApiUsersInfoUrl)) {
                crawlerDataList.addAll(washUserInfoData(crawlerRecord, page));
            }else if (requestUrl.matches(searchContainsKwPostListUrl)) {
                crawlerDataList.addAll(washSearchContainsKwPostData(crawlerRecord, page));
            } else if (requestUrl.matches(usersTagsUrl)) {
                crawlerDataList.addAll(washUsersTagsData(crawlerRecord, page));
            }else if (requestUrl.matches(postInteractionUrl)) {
                crawlerDataList.addAll(washPostInteractionData(crawlerRecord, page));
            }else if (requestUrl.matches(postForwardsListUrl)) {
                crawlerDataList.addAll(washPostForwardList(crawlerRecord, page));
            }else if (requestUrl.matches(userPostListUrl)) {
                crawlerDataList.addAll(washUserPostList(crawlerRecord, page));
            }else if (requestUrl.matches(postLikesListUrl)) {
                crawlerDataList.addAll(washPostLikesListData(crawlerRecord, page));
            }else if (requestUrl.matches(historicalCreateUrl)) {
                crawlerDataList.add(washHistoricalCreateData(crawlerRecord, page));
            }else if(requestUrl.matches(historicalCheckUrl)){
                if("true".equals(page.getJson().jsonPath($_type + ".status").get())){
                    crawlerDataList.add(washHistoricalCheckData(crawlerRecord,page));
                }
            }else if (requestUrl.matches(historicalDownloadUrl)) {
                crawlerDataList.addAll(washHistoricalDownloadData(crawlerRecord, page));
            }else {
                crawlerDataList.add(washData(crawlerRecord,page));
            }
        }

        return crawlerDataList;
    }

    private List<CrawlerData> washUserInfoData(CrawlerRequestRecord requestRecord, HttpPage httpPage) {
        List<CrawlerData> userList = new ArrayList<>();
        String itemUrl = httpPage.getRequest().getUrl();

        List<String> users = httpPage.getJson().jsonPath($_type + ".users").all();

        if (users.size() < 1) {
            log.error("user info is null ! itemUrl is : " + itemUrl);
            return userList;
        }

        for (String user : users) {
            JSONObject jsonObject = JSONObject.parseObject(user);

            CrawlerData userInfoData = CrawlerData.builder()
                    .data(requestRecord, httpPage)
                    .url(itemUrl)
                    .content(jsonObject.toJSONString())
                    .build();

            userList.add(userInfoData);
        }

        return userList;
    }

    private List<CrawlerData> washUsersTagsData(CrawlerRequestRecord requestRecord, HttpPage httpPage) {
        List<CrawlerData> usersTagsList = new ArrayList<>();
        String itemUrl = httpPage.getRequest().getUrl();

        List list = httpPage.getJson().toObject(List.class);
        if (list.size() < 1) {
            log.error("user [{}] has not tags!", itemUrl);
            return usersTagsList;
        }

        for (Object o : list) {
            String data = String.valueOf(o);
            JSONObject jsonObject = JSONObject.parseObject(data);

            CrawlerData usersTags = CrawlerData.builder()
                    .data(requestRecord, httpPage)
                    .url(itemUrl)
                    .content(jsonObject.toJSONString())
                    .build();

            usersTagsList.add(usersTags);

        }
        return usersTagsList;
    }

    private List<CrawlerData> washPostInteractionData(CrawlerRequestRecord requestRecord, HttpPage httpPage) {
        List<CrawlerData> postInteractionList = new ArrayList<>();
        String itemUrl = httpPage.getRequest().getUrl();

        List list = httpPage.getJson().toObject(List.class);
        if (list.size() < 1) {
            log.error("post [{}] interaction is null!", itemUrl);
            return postInteractionList;
        }
        for (Object o : list) {
            String data = String.valueOf(o);
            JSONObject jsonObject = JSONObject.parseObject(data);

            CrawlerData postInteraction = CrawlerData.builder()
                    .data(requestRecord, httpPage)
                    .url(itemUrl)
                    .content(jsonObject.toJSONString())
                    .build();

            postInteractionList.add(postInteraction);

        }

        return postInteractionList;
    }

    private List<CrawlerData> washPostForwardList(CrawlerRequestRecord requestRecord, HttpPage httpPage) {
        List<CrawlerData> postForwardList = new ArrayList<>();
        String itemUrl = httpPage.getRequest().getUrl();

        List<String> reposts = httpPage.getJson().jsonPath($_type + ".reposts").all();
        if (reposts.size() < 1) {
            log.error("post [{}] forwards is null !", itemUrl);
            return postForwardList;
        }


        for (String repost : reposts) {
            JSONObject jsonObject = JSONObject.parseObject(repost);

            CrawlerData postForward = CrawlerData.builder()
                    .data(requestRecord, httpPage)
                    .url(itemUrl)
                    .content(jsonObject.toJSONString())
                    .build();

            postForwardList.add(postForward);
        }

        return postForwardList;
    }

    private List<CrawlerData> washPostLikesListData(CrawlerRequestRecord requestRecord, HttpPage httpPage) {
        List<CrawlerData> postLikeList = new ArrayList<>();
        String itemUrl = httpPage.getRequest().getUrl();

        List<String> attitudes = httpPage.getJson().jsonPath($_type + ".attitudes").all();
        if (attitudes.size() < 1) {
            log.error("post [{}] likes is null !", itemUrl);
            return postLikeList;
        }

        for (String attitude : attitudes) {
            JSONObject jsonObject = JSONObject.parseObject(attitude);

            CrawlerData postLikes = CrawlerData.builder()
                    .data(requestRecord, httpPage)
                    .url(itemUrl)
                    .content(jsonObject.toJSONString())
                    .build();

            postLikeList.add(postLikes);
        }

        return postLikeList;
    }

    private List<CrawlerData> washUserPostList(CrawlerRequestRecord requestRecord, HttpPage httpPage) {
        List<CrawlerData> userPostList = new ArrayList<>();
        String itemUrl = httpPage.getRequest().getUrl();

        List<String> statues = httpPage.getJson().jsonPath($_type + ".statuses").all();
        for (String statue : statues) {
            JSONObject jsonObject = JSONObject.parseObject(statue);

            CrawlerData userPostData = CrawlerData.builder()
                    .data(requestRecord, httpPage)
                    .url(itemUrl)
                    .content(jsonObject.toJSONString())
                    .build();

            userPostList.add(userPostData);
        }

        return userPostList;
    }

    private List<CrawlerData> washSearchContainsKwPostData(CrawlerRequestRecord requestRecord, HttpPage httpPage) {
        List<CrawlerData> postList = new ArrayList<>();
        String itemUrl = httpPage.getRequest().getUrl();

        List<String> statues = httpPage.getJson().jsonPath($_type + ".statuses").all();
        for (String statue : statues) {
            JSONObject jsonObject = JSONObject.parseObject(statue);
            CrawlerData userInfoData = CrawlerData.builder()
                    .data(requestRecord, httpPage)
                    .url(itemUrl)
                    .content(jsonObject.toJSONString())
                    .build();

            postList.add(userInfoData);
        }

        return postList;
    }


    private CrawlerData washHistoricalCreateData(CrawlerRequestRecord requestRecord, HttpPage httpPage) {
        String itemUrl = httpPage.getRequest().getUrl();
        Json json = httpPage.getJson();
        String task_id = json.jsonPath($_type + ".task_id").get();
        String id = json.jsonPath($_type + ".id").get();
        String q = json.jsonPath($_type + ".q").get();
        String ids = json.jsonPath($_type + ".ids").get();
        String starttime = json.jsonPath($_type + ".starttime").get();
        String endtime = json.jsonPath($_type + ".endtime").get();
        String secret_key = json.jsonPath($_type + ".secret_key").get();

        List<Map<String,Object>> paramList = new ArrayList<>();
        Map<String,String> paramsMap = new HashMap<>();
        List<NameValuePair> parse = URLEncodedUtils.parse(itemUrl.split("\\?")[1], Charset.defaultCharset());
        for (NameValuePair nameValuePair : parse) {
            paramsMap.put(nameValuePair.getName(),nameValuePair.getValue());
        }
        String timestamp = String.valueOf(System.currentTimeMillis()).substring(0,10);
        String signature = md5Util(paramsMap.get("source") + secret_key +timestamp,"32小写");
        Map<String,Object> checkParamsMap = new HashMap<>();
        checkParamsMap.put("access_token",paramsMap.get("access_token"));
        checkParamsMap.put("task_id",task_id);
        checkParamsMap.put("timestamp",timestamp);
        checkParamsMap.put("signature",signature);
        paramList.add(checkParamsMap);
        CrawlerData historicalCreate = CrawlerData.builder()
                .data(requestRecord, httpPage)
                .url(itemUrl)
                .addContentKV("method", HttpConstant.Method.GET)
                .addContentKV("paramList",JSONObject.toJSONString(paramList))
                .addContentKV("requestUrl",historicalCheckSourceUrl)
                .addContentKV("task_id", task_id)
                .addContentKV("id", id)
                .addContentKV("q", q)
                .addContentKV("ids", ids)
                .addContentKV("starttime", starttime)
                .addContentKV("endtime", endtime)
                .addContentKV("createTime", String.valueOf(System.currentTimeMillis()))
                .addContentKV("secret_key", secret_key)
                .build();

        return historicalCreate;
    }

    private CrawlerData washHistoricalCheckData(CrawlerRequestRecord requestRecord,HttpPage httpPage){
        CrawlerData checkData = null;
        String itemUrl = httpPage.getRequest().getUrl();
        JSONObject jsonObject = JSONObject.parseObject(httpPage.getRawText());
        String status = jsonObject.getString("status");

        List<Map<String,Object>> paramList = new ArrayList<>();
        Map<String,Object> paramMap = new HashMap<>();
        List<NameValuePair> parse = URLEncodedUtils.parse(itemUrl.split("\\?")[1], Charset.defaultCharset());
        for (NameValuePair nameValuePair : parse) {
            paramMap.put(nameValuePair.getName(),nameValuePair.getValue());
        }
        paramList.add(paramMap);

        checkData = CrawlerData.builder()
                .data(requestRecord, httpPage)
                .dataId(StringUtils.join("-",itemUrl.split("\\?")[0],paramMap.get("task_id")))
                .url(itemUrl)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                .addContentKV("id",jsonObject.getString("id"))
                .addContentKV("q",jsonObject.getString("q"))
                .addContentKV("ids",jsonObject.getString("ids"))
                .addContentKV("province",jsonObject.getString("province"))
                .addContentKV("city",jsonObject.getString("city"))
                .addContentKV("type",jsonObject.getString("type"))
                .addContentKV("hasv",jsonObject.getString("hasv"))
                .addContentKV("onlynum",jsonObject.getString("onlynum"))
                .addContentKV("task_id",jsonObject.getString("task_id"))
                .addContentKV("status",jsonObject.getString("status"))
                .addContentKV("count",jsonObject.getString("count"))
                .addContentKV("method", HttpConstant.Method.GET)
                .addContentKV("requestUrl", historicalDownloadSourceUrl)
                .addContentKV("paramList", JSON.toJSONString(paramList))
                .addContentKV("secret_key",(String) httpPage.getRequest().getExtras().get("secret_key"))
                .build();
        return checkData;
    }


    private List<CrawlerData> washHistoricalDownloadData(CrawlerRequestRecord requestRecord, HttpPage httpPage) {
        List<CrawlerData> histroyDownloadList = new ArrayList<>();
        String itemUrl = httpPage.getRequest().getUrl();

        Map<String, Object> extras = httpPage.getRequest().getExtras();
        String task_id = (String) extras.get("task_id");
        String secret_key = (String) extras.get("secret_key");
        List<String> dataList = JSON.parseArray(httpPage.getJson().get(), String.class);
        if(dataList.size() < 1){
            log.error("the result data of task [{}] is null ! ",task_id);
            return histroyDownloadList;
        }

        for (String data : dataList) {
            JSONObject jsonObject = JSONObject.parseObject(data);
            Map contentMap = new HashMap<>();
            contentMap = jsonObject.toJavaObject(Map.class);
            contentMap.put("task_id",task_id);
            contentMap.put("secret_key",secret_key);

            CrawlerData historicalDownload = CrawlerData.builder()
                    .data(requestRecord, httpPage)
                    .url(itemUrl)
                    .content(JSON.toJSONString(contentMap))
                    .build();

            histroyDownloadList.add(historicalDownload);
        }

        return histroyDownloadList;
    }

    private CrawlerData washData(CrawlerRequestRecord requestRecord, HttpPage httpPage) {
        String itemUrl = httpPage.getRequest().getUrl();

        CrawlerData data = CrawlerData.builder()
                .data(requestRecord, httpPage)
                .url(itemUrl)
                .content(httpPage.getRawText())
                .build();

        return data;
    }

    private void requestAgainCrawlerRecord(List<CrawlerRequestRecord> crawlerRequestRecords, CrawlerRequestRecord crawlerRecord) {

        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
        int count = 0;
        if (crawlerBusinessTags.hasKVTag(requestAgainTag)) {
            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag(requestAgainTag).getVal();
            if (count >= 5) {
                log.error("Weibo search keyword download page the number of retries exceeds the limit" +
                        ",request url {},download detail {}", crawlerRecord.getHttpRequest().getUrl());
                return;
            }
        }

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .itemPageRequest(crawlerRecord)
                .httpUrl(crawlerRecord.getHttpRequest().getUrl())
                .recordKey(crawlerRecord.getRecordKey() + count)
                .notFilterRecord()
                .copyBizTags()
                .releaseTime(System.currentTimeMillis())
                .resultLabelTag(article)
                .build();

        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(requestAgainTag, ++count);
        crawlerRequestRecords.add(crawlerRequestRecord);
    }


    private static String md5Util(String password, String method) {
        //结果字符串
        String result = "";
        try {
            MessageDigest md = MessageDigest.getInstance("MD5");
            md.update(password.getBytes());
            byte[] digest = md.digest();
            int i;
            StringBuffer buf = new StringBuffer("");
            for (int offset = 0; offset < digest.length; offset++) {
                i = digest[offset];
                if (i < 0) {
                    i += 256;
                }
                if (i < 16) {
                    buf.append("0");
                }
                buf.append(Integer.toHexString(i));
            }
            result = buf.toString();
            if ("32小写".equals(method)) {
                return result;
            } else if ("32大写".equals(method)) {
                return result.toUpperCase();
            } else if ("16小写".equals(method)) {
                return result.substring(8, 24);
            } else if ("16大写".equals(method)) {
                return result.substring(8, 24).toUpperCase();
            }
        } catch (NoSuchAlgorithmException e) {
            e.printStackTrace();
        }
        return "0";
    }


    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    public static void main(String[] args) {
//        String password = "14281998138b844b20d7000d4dda151611644095";
//        String zjb0425 = md5Util(password, "32小写");
//        System.out.println(zjb0425);
//
//        System.out.println(System.currentTimeMillis());
//        System.out.println(String.valueOf(System.currentTimeMillis()).substring(0, 10));


        try {
            System.out.println(DateUtils.parseDate("2021-1-3","yyyy-MM-dd").getTime());
        } catch (ParseException e) {
            e.printStackTrace();
        }
    }
}
