package com.chance.cc.crawler.development.scripts.douyin.api;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.meta.core.bean.common.MetaResponse;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainKeys;
import com.google.common.collect.Maps;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.security.NoSuchAlgorithmException;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Field_Keword;

/**
 * @author bx
 * @date 2020/12/6 0006 12:52
 */
public class DYAccountCrawlerScript extends CrawlerCommonScript {

    private static Logger logger = LoggerFactory.getLogger(DYAccountCrawlerScript.class);
    public static final String site = "api-account";
    public static final String domain = "api-dy";

    private Downloader httpDownload;
    private static Object httpDownloadObj = new Object();
    private static HttpConfig dySignatureConfig = HttpConfig.me("signature");

    public static final String accountUrlRegular = "https://\\S*/aweme/v1/aweme/post/\\S*";
    public static final String accountUrl = "https://api3-core-c-lf.amemv.com/aweme/v1/aweme/post/?";

    public static final String accountUrlParamFormat = "source=0&user_avatar_shrink=96_96&video_cover_shrink=248_330" +
            "&publish_video_strategy_type=2&max_cursor=%s" +
            "&sec_user_id=%s&count=20" +
            "&is_order_flow=0&page_from=2&longitude=121.5888983509045&latitude=31.25251444047919" +
            "&location_permission=true&_rticket=%s&cpu_support64=true&host_abi=armeabi-v7a" +
            "&appTheme=dark&ac=wifi&ts=%s&";

    public static final String commentUrl = "https://api5-normal-c-lf.amemv.com/aweme/v2/comment/list/?";
    public static final String commentUrlParamFormat = "aweme_id=%s" +
            "&cursor=%s" +
            "&count=20&address_book_access=2&gps_access=2&forward_page_type=1&channel_id=0&city=310000&hotsoon_filtered_count=0&hotsoon_has_more=0&follower_count=0&is_familiar=0&page_source=0&user_avatar_shrink=96_96" +
            "&aweme_author=%s" +
            "&manifest_version_code=140101" +
            "&_rticket=%s" +
            "&app_type=normal&iid=2005967845199176&channel=huawei_1&device_type=DIG-AL00" +
            "&language=zh&cpu_support64=true&host_abi=arm64-v8a&resolution=720x1280" +
            "&openudid=c839e5a342685cde&update_version_code=14109900&cdid=75eeadf2-a178-4f34-b9db-ba44ea4470cf" +
            "&appTheme=dark&os_api=23&dpi=480&ac=wifi&device_id=51959513713&mcc_mnc=46011&os_version=6.0" +
            "&version_code=140100&app_name=aweme&version_name=14.1.0&device_brand=HUAWEI&ssmix=a&device_platform=android&aid=1128" +
            "&ts=%s";

    public static final String byteTable = "D6 28 3B 71 70 76 BE 1B A4 FE 19 57 5E 6C BC 21 B2 14 37 7D 8C A2 FA 67 55 6A 95 E3 FA 67 78 ED 8E 55 33 89 A8 CE 36 B3 5C D6 B2 6F 96 C4 34 B9 6A EC 34 95 C4 FA 72 FF B8 42 8D FB EC 70 F0 85 46 D8 B2 A1 E0 CE AE 4B 7D AE A4 87 CE E3 AC 51 55 C4 36 AD FC C4 EA 97 70 6A 85 37 6A C8 68 FA FE B0 33 B9 67 7E CE E3 CC 86 D6 9F 76 74 89 E9 DA 9C 78 C5 95 AA B0 34 B3 F2 7D B2 A2 ED E0 B5 B6 88 95 D1 51 D6 9E 7D D1 C8 F9 B7 70 CC 9C B6 92 C5 FA DD 9F 28 DA C7 E0 CA 95 B2 DA 34 97 CE 74 FA 37 E9 7D C4 A2 37 FB FA F1 CF AA 89 7D 55 AE 87 BC F5 E9 6A C4 68 C7 FA 76 85 14 D0 D0 E5 CE FF 19 D6 E5 D6 CC F1 F4 6C E9 E7 89 B2 B7 AE 28 89 BE 5E DC 87 6C F7 51 F2 67 78 AE B3 4B A2 B3 21 3B 55 F8 B3 76 B2 CF B3 B3 FF B3 5E 71 7D FA FC FF A8 7D FE D8 9C 1B C4 6A F9 88 B5 E5";
    public static final String NULL_MD5_STRING = "00000000000000000000000000000000";

    private LinkedBlockingQueue<String> commonParamsQueue = new LinkedBlockingQueue<>();

    public static final String common_param_v2 = "aid=1128&app_name=aweme&app_type=normal&cdid=%s&channel=%s&device_brand=%s" +
            "&device_id=%s&device_platform=android&device_type=%s&dpi=280&iid=%s&language=zh&manifest_version_code=140101" +
            "&openudid=%s&os_api=%s&os_version=%s&resolution=%s&ssmix=a&update_version_code=14109900" +
            "&version_code=140100&version_name=14.1.0";

    private static LinkedBlockingQueue<String> channels = new LinkedBlockingQueue<>();
    static {
        channels.add("xinyou_dy_and7");
        channels.add("fe_lynx_main_user_card_vertical");
        channels.add("huawei_1");
        channels.add("xiaomi_1128_64");
        channels.add("huawei_2");
        channels.add("huawei_3");
        channels.add("huawei_4");
        channels.add("huawei_5");
        channels.add("huawei_6");
        channels.add("huawei_7");
        channels.add("huawei_8");
        channels.add("huawei_9");
        channels.add("huawei_10");
    }
    private static final String hookApi = "http://192.168.1.212:8383";
    public static final String useragentFormat = "com.ss.android.ugc.aweme/140101 (Linux; U; Android 8.0.0; zh_CN; MI 5s;" +
            " Build/OPR1.170623.032; Cronet/TTNetVersion:%s 2020-12-16 QuicVersion:%s 2020-10-14)";
    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerRequestRecord> crawlerRequestRecords = new ArrayList<>();

        String params = crawlerRecord.getHttpRequest().getHeaders().get("x-common-params-v2");
        String useragent = crawlerRecord.getHttpRequest().getHeaders().get("User-Agent");
        if (StringUtils.isBlank(params)){
            logger.error("dy account common param queue is null!");
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        if (!page.isDownloadSuccess()){
            logger.error("dy account video request download has error status code {},page raw text [{}],will retry!",
                    page.getStatusCode(),page.getRawText());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        try{
            if(page.getRawText().contains("\"status_msg\":\"暂时没有更多了\"")){
                crawlerRecord.setNeedWashPage(false);
                return crawlerRequestRecords;
            }
            String keyword = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("sec_uid");
            Json accountResultJson = new Json(page.getRawText());
            String statusCode = accountResultJson.jsonPath($_type + ".status_code").get();
            String hasMore = accountResultJson.jsonPath($_type + ".has_more").get();

            List<String> awemeList = accountResultJson.jsonPath($_type + ".aweme_list").all();
            List<String> images = new ArrayList<>();
            if (awemeList != null && awemeList.size() > 0){
                for (String aweme : awemeList) {
                    try {
                        Json awemeJson = new Json(aweme);
                        List<String> url_list = awemeJson.jsonPath($_type + ".video.cover.url_list").all();
                        if (url_list != null){
                            for (String url : url_list) {
                                if (url.contains("douyinpic.com")){
                                    images.add(url);
                                }
                            }
                        }
                    } catch (Exception e) {
                    }
                }
            } else {
                logger.error("dy account {} download awmem list is null!,page raw text {}"
                        ,page.getRequest().getHeaders().get("x-common-params-v2"),page.getRawText());
                throw new Exception(page.getRequest().getHeaders().get("x-common-params-v2") +
                        " download awmem list is null!");
            }

            if (images.size() > 0 ){
                for (String image : images) {
                    internalDownloadImg(crawlerRequestRecords,crawlerRecord,image,useragent);
                }
            }

            if ("0".equals(statusCode) && "1".equals(hasMore)){
                String maxCursor = accountResultJson.jsonPath($_type + ".max_cursor").get();
                if ("0".equals(maxCursor)){ //有下一页，但是游标出错，重试
                    throw new Exception("dy account next page cursor error:"+maxCursor);
                }
                long rticket = System.currentTimeMillis();
                long ts = rticket/1000;

                if (crawlerRecord.tagsCreator().bizTags().hasKVTag("download_retry_count")){
                    crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKvTags().remove("download_retry_count");
                }

                String urlParam = String.format(accountUrlParamFormat, maxCursor, keyword, rticket, ts);

                CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                        .turnPageRequest(crawlerRecord)
                        .httpUrl(accountUrl + urlParam)
                        .httpHeads(page.getRequest().getHeaders())
                        .needWashed(true)
                        .releaseTime(System.currentTimeMillis())
                        .copyBizTags()
                        .build();
                crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("download_retry_count",0);
                crawlerRequestRecord.getHttpConfig().setHttpSite("account");
                crawlerRequestRecords.add(crawlerRequestRecord);
            }
        }catch (Exception e){
            logger.error("dy account video request url {} download error, page raw text {}"
                    ,page.getRequest().getUrl(),page.getRawText());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
        }

        return crawlerRequestRecords;
    }

    private void internalDownloadImg(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String url,String useragent) throws NoSuchAlgorithmException {
        if (StringUtils.isNotBlank(url)){
            CrawlerRequestRecord imgRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .httpHead("Host","p6.douyinpic.com")
                    .httpHead("User-Agent",useragent)
                    .releaseTime(System.currentTimeMillis())
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .build();
            imgRecord.getHttpConfig().setResponseTextGenerateHtml(false);
            crawlerRequestRecords.add(imgRecord);
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        String commentRecordFilterInfo = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("comment_record_filter_info");
        if (commentRecordFilterInfo == null){
            logger.warn("dy account comment record filter info is null!");
        }
        Json searchJson = new Json(page.getRawText());
        List<String> datas = searchJson.jsonPath($_type + ".aweme_list").all();
        if (datas != null){
            for(String data : datas){
                try {
                    Json dataJson = new Json(data);
                    String aweme_id = dataJson.jsonPath($_type + ".aweme_id").get();
                    String create_time = dataJson.jsonPath($_type + ".create_time").get();
                    String uniqueId = dataJson.jsonPath(".author.unique_id").get();
                    if (StringUtils.isBlank(uniqueId)){
                        uniqueId = dataJson.jsonPath(".author.short_id").get();
                    }
                    CrawlerData crawlerData = CrawlerData.builder()
                            .data(crawlerRecord, page)
                            .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(), aweme_id))
                            .url("https://www.iesdouyin.com/share/video/"+aweme_id)
                            .releaseTime(Long.valueOf(create_time ) * 1000L)
                            .content(data)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                            .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                            .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                            .flowInPipelineTag("article_result")
                            .build();
                    crawlerData.setFilterPipelineResult(true);
                    crawlerData.tagsCreator().bizTags().addCustomKV(Tag_Field_Keword,uniqueId);
                    crawlerDataList.add(crawlerData);

                    // comment record -- list
                    try {
                        Integer commentCount = Integer.valueOf(dataJson.jsonPath($_type + ".statistics.comment_count").get());
                        String sec_uid = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("sec_uid");
                        if(commentCount > 0){
                            CrawlerData crawlerArticleIdListData = CrawlerData.builder()
                                    .data(crawlerRecord, page)
                                    .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(), article.enumVal(),"ids", aweme_id))
                                    .releaseTime(Long.valueOf(create_time ) * 1000L)
                                    .addContentKV("aweme_id",aweme_id)
                                    .addContentKV("aweme_author",sec_uid)
                                    .addContentKV("comment_record_filter_info",commentRecordFilterInfo)
                                    .url("https://www.iesdouyin.com/share/video/"+aweme_id)
                                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                    .resultLabelTag(article)
                                    .flowInPipelineTag("article_ids")
                                    .build();
                            crawlerArticleIdListData.setFilterPipelineResult(true);
                            crawlerDataList.add(crawlerArticleIdListData);
                        }
                    } catch (Exception e) {
                        logger.error("dy account video comment generator error: {}",e.getMessage());
                    }

                } catch (Exception e) {
                    logger.error("dy account video content data normal,error: {}",e.getMessage());
                }
            }
        }
        return crawlerDataList;
    }

    @Override
    public String domain() {
        return "api-dy";
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(accountUrlRegular); //start url--只是为了进入脚本没有实际意义

    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {

        String recordSite = crawlerRecord.tagsCreator().bizTags().site();
        if (site.equals(recordSite)){
            return true;
        }
        return false;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
        requestRecord.setNeedWashPage(true);
        List<CrawlerRecord> allItemRecords = new ArrayList<>();
        if (supportSourceRecords == null || supportSourceRecords.size() <1){
            Map<String, String> dataMap = Maps.newHashMap();
            Map<String, String> signatureMap = dySignature( requestRecord.getHttpRequest().getUrl(), dataMap);
            requestRecord.getHttpRequest().getHeaders().putAll(signatureMap);
            String useragent = String.format(useragentFormat,RandomStringUtils.random(8,"0123456789abcdf"),RandomStringUtils.random(8,"0123456789abcdf"));
            requestRecord.getHttpRequest().getHeaders().put("User-Agent",useragent);
            requestRecord.getHttpRequest().getHeaders().put("x-common-params-v2",pollOneParam());
            downloadSleep();
            return super.prepareRequest(requestRecord,supportSourceRecords);
        }

        for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
            if (isUrlMatch(supportSourceRecord.getHttpRequest().getUrl(),
                    "http://\\S*/v1/meta/douyin/keys\\?site=\\S*")){
                initAllCrawlerRecordByKeyword(requestRecord,supportSourceRecord,allItemRecords);// cookies userAgents 初始完毕后，才能初始record
            }
            if (supportSourceRecord.getHttpRequest().getUrl().contains("/crawler/oauth/api/v1/douyin/userOauthInfos")){
                commonParamsQueue.clear();
                initAuthorInfos(supportSourceRecord); //初始 认证信息
            }
        }
        return allItemRecords;
    }

    private void initAuthorInfos(CrawlerRequestRecord supportSourceRecord){
        List<String> commonParams = new ArrayList<>();
        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            Json rawText = new Json(httpPage.getRawText());
            String status = rawText.jsonPath($_type + ".status").get();
            List<String> contents = rawText.jsonPath($_type + ".content").all();

            if ("0".equals(status) && contents != null && contents.size() > 0){

                List<Map> userOauthInfos = new Json(contents.get(0)).toList(Map.class);
                for (Map userOauthInfo : userOauthInfos) {
                    String oauthInfo = String.valueOf(userOauthInfo.get("oauthInfo"));
                    Map oauthInfoMap = JSON.parseObject(oauthInfo, Map.class);

                    String category =oauthInfoMap.get("category").toString();
                    if ("device_account".equals(category)){
                        commonParams.add(oauthInfo);
                    }
                }
            }
            if (commonParams.size() > 0){
                Collections.reverse(commonParams);
                commonParamsQueue.addAll(commonParams);
            }
            logger.info("account devices count {}",commonParamsQueue.size());
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private void downloadSleep(){
        Random rand = new Random();
        long sleepTime = (long)((rand.nextFloat()+1.5)*2000L);
        logger.info("xhs download sleep time {}",sleepTime);
        try {
            Thread.sleep(sleepTime);
        } catch (InterruptedException e) {
            logger.error(e.getMessage());
        }
    }

    private String pollOneParam(){
        String param = "";
        String oauthInfo = "";
        try {

            oauthInfo = commonParamsQueue.poll(3, TimeUnit.SECONDS);
            commonParamsQueue.add(oauthInfo);

            Map oauthInfoMap = JSON.parseObject(oauthInfo, Map.class);
            String channel = channels.poll(3, TimeUnit.SECONDS);
            channels.add(channel);
            param = String.format(common_param_v2
                    ,oauthInfoMap.get("cdid").toString()
                    ,channel
                    ,oauthInfoMap.get("device_manufacturer").toString()
                    ,oauthInfoMap.get("device_id").toString()
                    ,oauthInfoMap.get("device_model").toString()
                    ,oauthInfoMap.get("iid").toString()
                    ,oauthInfoMap.get("openudid").toString()
                    ,oauthInfoMap.get("os_api").toString()
                    ,oauthInfoMap.get("os_version").toString()
                    ,oauthInfoMap.get("resolution").toString());
            logger.info("surplus common param:{}",commonParamsQueue.size());
        } catch (Exception e) {
            logger.error("poll common params error:{},oauth info:[{}]",e.getMessage(),oauthInfo);
        }
        return param;
    }

    private Map<String,String> dySignature(String dy_url,Map<String,String> dataMap){
        String signatureUrl = hookApi + "/frida/dy/signature";

        HttpRequest httpRequest = new HttpRequest();
        httpRequest.setUrl(signatureUrl);
        Map<String,Object> dyDataMap = new HashMap<>();
        dyDataMap.put("dy_url",dy_url);
        dyDataMap.put("map_data_json",JSON.toJSONString(dataMap));
        httpRequest.setRequestBody(HttpRequestBody.form(dyDataMap,"utf-8"));
        httpRequest.setMethod("POST");
        String rawText = null;
        HttpPage download = null;
        try {
            download = httpDownload.download(httpRequest, dySignatureConfig);
            rawText = download.getRawText();
            if(rawText.contains("DOCTYPE HTML PUBLIC")){
                throw new Exception("DOCTYPE HTML PUBLIC");
            }
        } catch (Exception e) {
            try {
                while (download == null || !download.isDownloadSuccess() || StringUtils.isBlank(rawText)){
                    Thread.sleep(60000);
                    download = httpDownload.download(httpRequest, dySignatureConfig);
                    rawText = download.getRawText();
                }
            } catch (Exception ex) {
                ex.printStackTrace();
            }
        }
        return JSON.parseObject(rawText,Map.class);
    }

    @Override
    public void beforeDownload(CrawlerRecordContext context) {
        if (httpDownload == null){
            synchronized (httpDownloadObj) {
                if (httpDownload == null){
                    httpDownload = context.getPageDownloader();
                }
            }
        }
        super.beforeDownload(context);
    }

    /**
     * @param requestRecord
     * @param supportSourceRecord
     * @param allItemRecords
     */
    private void initAllCrawlerRecordByKeyword(CrawlerRequestRecord requestRecord,
                                               CrawlerRequestRecord supportSourceRecord,
                                               List<CrawlerRecord> allItemRecords){
        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            MetaResponse metaResponse = JSON.parseObject(httpPage.getRawText(), MetaResponse.class);
            if (metaResponse.getStatus() == 0 && metaResponse.getContent() != null){
                List<String> contents = (List<String>) metaResponse.getContent();
                long rticket = System.currentTimeMillis();
                long ts = rticket/1000;
//                int count =0;
                for (String content : contents) {
//                    count++;
//                    if (count>1){
//                        break;
//                    }
                    CrawlerDomainKeys crawlerDomainKeys = JSON.parseObject(content, CrawlerDomainKeys.class);
                    String keywordJsonStr = crawlerDomainKeys.getKeyword();
                    Json keywordJson = new Json(keywordJsonStr);
                    String sec_uid = keywordJson.jsonPath($_type + ".sec_uid").get();
                    String unique_id = keywordJson.jsonPath($_type + ".unique_id").get();
                    String iid = RandomStringUtils.random(16,"0123456789");
                    String urlParam = String.format(accountUrlParamFormat, 0, sec_uid, rticket,ts);
//                    String cookie = requestRecord.getHttpRequest().getHeaders().get("Cookie");
//                    String xgorgon = xGorgon(ts, strToByte(getXGon(urlParam, "", cookie)));

                    CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(requestRecord)
                            .httpUrl(accountUrl+urlParam)
                            .httpHeads(requestRecord.getHttpRequest().getHeaders())
//                            .httpHead("X-Gorgon", xgorgon)
//                            .httpHead("X-Khronos", ""+ts)
                            .releaseTime(System.currentTimeMillis())
                            .copyBizTags()
                            .needWashed(true)
                            .notFilterRecord()
                            .build();
                    crawlerRequestRecord.getHttpConfig().setHttpSite("account_new");
                    crawlerRequestRecord.getHttpConfig().setDisableCookie(true);
                    crawlerRequestRecord.tagsCreator().bizTags().addKeywords(unique_id);
                    crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("sec_uid",sec_uid);
                    allItemRecords.add(crawlerRequestRecord);
                }
            }
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord){

//        int count = 1;
////        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
////        if (crawlerBusinessTags.hasKVTag("download_retry_count")){
////            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
////            if (count >= 500){
////                logger.error("dy api account download he number of retries exceeds the limit" +
////                        ",request url {},download detail {}",crawlerRecord.getHttpRequest().getUrl());
////                return;
////            }
////        }
////
////        count++;
////        crawlerBusinessTags.addCustomKV("download_retry_count",count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .needWashed(true)
                .build();
        crawlerRequestRecords.add(crawlerRequestRecord);

    }
}
