package com.chance.cc.crawler.development.scripts.douyin.api;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerRecordContext;
import com.chance.cc.crawler.core.downloader.*;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.pipeline.result.CrawlerData;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.core.script.crawler.CrawlerCommonScript;
import com.chance.cc.crawler.core.selector.Json;
import com.chance.cc.crawler.meta.core.bean.common.MetaResponse;
import com.chance.cc.crawler.meta.core.bean.crawler.CrawlerDomainKeys;
import com.google.common.collect.Maps;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.net.URLEncoder;
import java.security.NoSuchAlgorithmException;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerDataType.article;
import static org.codehaus.groovy.runtime.EncodingGroovyMethods.md5;

/**
 * @author bx
 * @date 2020/12/6 0006 12:52
 */
public class DYSearchCrawlerScript extends CrawlerCommonScript {

    private static Logger logger = LoggerFactory.getLogger(DYSearchCrawlerScript.class);
    public static final String site = "api-search";
    public static final String domain = "api-dy";

    private Downloader httpDownload;
    private static Object httpDownloadObj = new Object();
    private static HttpConfig dySignatureConfig = HttpConfig.me("signature");

    public static final String searchUrlRegular = "https://search3-search-lf.amemv.com/aweme/v1/general/search/single/\\S*";
    public static final String searchStartUrl = "https://search3-search-lf.amemv.com/aweme/v1/general/search/single/";
    public static final String searchUrlFormat =  "https://search3-search-lf.amemv.com/aweme/v1/general/search/single/?" +
            "manifest_version_code=120301" +
            "&app_type=normal&iid=%s&channel=xinyou_dy_and7" +
            "&device_type=MI+5s&language=zh&cpu_support64=true&host_abi=armeabi-v7a&resolution=1080*1920" +
            "&openudid=%s&update_version_code=12309900" +
            "&appTheme=dark&os_api=26&dpi=480" +
            "&ac=wifi&device_id=%s&os_version=8.0.0&version_code=120300&app_name=aweme" +
            "&version_name=12.3.0&device_brand=Xiaomi&ssmix=a&device_platform=android&aid=1128" +
            "&cdid=%s&oaid=%s";

    public static final String searchPostParam = "keyword=%s" +
            "&offset=%s" +
            "&count=10&is_pull_refresh=0&search_source=search_history&hot_search=0&latitude=31.252495172715925&longitude=121.58899635593458&query_correct_type=1" +
            "&search_id=%s" +
            "&is_filter_search=1&sort_type=%s&publish_time=0&disable_synthesis=0&multi_mod=0&single_filter_aladdin=0&client_width=360&client_height=640&dynamic_type=0&epidemic_card_type=0&enter_from=homepage_hot&mode=&anchor_item_id=&backtrace=f8zSdzdPBhl57DL%%2BQz8rLw%%3D%%3D&json=&address_book_access=2&location_access=1";

    public static final String commentUrl = "https://api5-normal-c-lf.amemv.com/aweme/v2/comment/list/?";
    public static final String commentUrlParamFormat = "aweme_id=%s" +
            "&cursor=%s" +
            "&count=20&address_book_access=2&gps_access=2&forward_page_type=1&channel_id=0&city=310000&hotsoon_filtered_count=0&hotsoon_has_more=0&follower_count=0&is_familiar=0&page_source=0&user_avatar_shrink=96_96" +
            "&aweme_author=%s" +
            "&manifest_version_code=140101" +
            "&_rticket=%s" +
            "&app_type=normal&iid=2005967845199176&channel=huawei_1&device_type=DIG-AL00" +
            "&language=zh&cpu_support64=true&host_abi=arm64-v8a&resolution=720x1280" +
            "&openudid=c839e5a342685cde&update_version_code=14109900&cdid=75eeadf2-a178-4f34-b9db-ba44ea4470cf" +
            "&appTheme=dark&os_api=23&dpi=480&ac=wifi&device_id=51959513713&mcc_mnc=46011&os_version=6.0" +
            "&version_code=140100&app_name=aweme&version_name=14.1.0&device_brand=HUAWEI&ssmix=a&device_platform=android&aid=1128" +
            "&ts=%s";

    Map<String, LinkedBlockingQueue<CrawlerRequestRecord>> recordMap = new HashMap<>();
    Set<String> availableUsers = new HashSet<>(); //可用用户列表
    private LinkedBlockingQueue<String> devicesQueue = new LinkedBlockingQueue<>();

    public static HttpConfig searchHttpConfig = HttpConfig.me(domain);
    public static HttpConfig commentHttpConfig = HttpConfig.me(domain);
    static {
        searchHttpConfig.setHttpSite("search");
        commentHttpConfig.setHttpSite("comment");
    }
    public static final String useragentFormat = "com.ss.android.ugc.aweme/140101 (Linux; U; Android 8.0.0; zh_CN; MI 5s;" +
            " Build/OPR1.170623.032; Cronet/TTNetVersion:%s 2020-12-16 QuicVersion:%s 2020-10-14)";
    private static final String hookApi = "http://192.168.1.212:8383";
    @Override
    public List<CrawlerRequestRecord> parseLinks(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        String useragent = crawlerRecord.getHttpRequest().getHeaders().get("User-Agent");
        List<CrawlerRequestRecord> crawlerRequestRecords = new ArrayList<>();
        if (!page.isDownloadSuccess()){
            logger.error("dy search request url {} download has error ,will retry");
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
            return crawlerRequestRecords;
        }

        try{
            if(page.getRawText().contains("\"status_msg\":\"暂时没有更多了\"")){
                crawlerRecord.setNeedWashPage(false);
                CrawlerRequestRecord crawlerRequestRecord = fillRecordRequest(crawlerRecord);
                if (crawlerRequestRecord != null){
                    crawlerRequestRecords.add(crawlerRequestRecord);
                }
                return crawlerRequestRecords;
            }

            Json searchResultJson = new Json(page.getRawText());
            String statusCode = searchResultJson.jsonPath($_type + ".status_code").get();
            String hasMore = searchResultJson.jsonPath($_type + ".has_more").get();
            String keyword = crawlerRecord.tagsCreator().bizTags().keywordsList().get(0);
            if ("0".equals(statusCode)){
                if ("1".equals(hasMore) && !nextPageFilter(crawlerRecord, page)){
//
////                    评论生成
                    Json searchJson = new Json(page.getRawText());
                    List<String> images = new ArrayList<>();
                    List<String> awemeList = searchJson.jsonPath($_type + ".data").all();
                    for (String aweme : awemeList) {
                        try {
                            Json awemeJson = new Json(aweme);
                            List<String> url_list = awemeJson.jsonPath($_type + ".aweme_info.author.avatar_larger.url_list").all();
                            if (url_list != null){
                                for (String url : url_list) {
                                    if (url.contains("p6.douyinpic.com")){
                                        images.add(url);
                                    }
                                }
                            }
                        } catch (Exception e) {
                            logger.error("dy search generate comment record error!");
                        }
                    }
                    if (images.size() > 0 ){
                        for (String image : images) {
                            internalDownloadImg(crawlerRequestRecords,crawlerRecord,image,useragent);
                        }
                    }
                    //下一页生成
                    long ts = System.currentTimeMillis()/1000;
                    String cursor = searchResultJson.jsonPath($_type + ".cursor").get();
                    String imprId = searchResultJson.jsonPath($_type + ".log_pb.impr_id").get();
                    String sortType = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("sort_type");
                    String urlParam = String.format(searchPostParam, URLEncoder.encode(keyword,"utf-8"), cursor, imprId,sortType);
                    String stub = md5(urlParam).toUpperCase();
                    String searchUrl = searchUrlFormat();
                    CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(crawlerRecord)
                            .httpUrl(searchUrl)
                            .httpHeads(page.getRequest().getHeaders())
                            .httpHead("X-SS-STUB", stub)
                            .releaseTime(System.currentTimeMillis())
                            .needWashed(true)
                            .copyBizTags()
                            .recordKey(searchUrl+keyword) //防止冲突
                            .build();
                    crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("download_retry_count",0);
                    HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
                    httpRequest.setMethod("POST");
                    httpRequest.setRequestBody(HttpRequestBody.custom(urlParam.getBytes(),"application/x-www-form-urlencoded", "UTF-8"));

                    crawlerRecord.getHttpConfig().setHttpSite("search");
                    crawlerRequestRecords.add(crawlerRequestRecord);


                }else { //没有下一页，当前关键词搜索完毕 生成下一个关键词record
                    CrawlerRequestRecord crawlerRequestRecord = fillRecordRequest(crawlerRecord);
                    if (crawlerRequestRecord != null){
                        crawlerRequestRecords.add(crawlerRequestRecord);
                    }
                }
            }
        }catch (Exception e){
            e.printStackTrace();
            logger.error("dy user {} search request  {} download error：{}, page raw text {}"
                    ,crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("dy_user")
                    ,JSON.toJSONString(page.getRequest()),e,page.getRawText());
            addCrawlerRecords(crawlerRequestRecords,crawlerRecord);
            crawlerRecord.setNeedWashPage(false); //下载异常，不需要清洗页面
        }

        return crawlerRequestRecords;
    }

    private void internalDownloadImg(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord,String url,String useragent) throws NoSuchAlgorithmException {
        if (StringUtils.isNotBlank(url)){
            CrawlerRequestRecord imgRecord = CrawlerRequestRecord.builder()
                    .itemPageRequest(crawlerRecord)
                    .httpUrl(url)
                    .httpHead("Host","p6.douyinpic.com")
                    .httpHead("User-Agent",useragent)
                    .releaseTime(System.currentTimeMillis())
                    .requestLabelTag(CrawlerEnum.CrawlerRequestType.internalDownload)
                    .build();
            imgRecord.getHttpConfig().setResponseTextGenerateHtml(false);
            crawlerRequestRecords.add(imgRecord);
        }
    }

    @Override
    public List<CrawlerData> washPage(CrawlerRequestRecord crawlerRecord, HttpPage page) {
        return washSearchVideo(crawlerRecord,page);
    }

    private List<CrawlerData> washSearchVideo(CrawlerRequestRecord crawlerRecord, HttpPage page){
        List<CrawlerData> crawlerDataList = new ArrayList<>();
        String commentRecordFilterInfo = crawlerRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("comment_record_filter_info");
        if (commentRecordFilterInfo == null){
            logger.warn("dy search comment record filter info is null!");
        }
        Json searchJson = new Json(page.getRawText());
        List<String> datas = searchJson.jsonPath($_type + ".data").all();
        if(datas != null){
            for(String data : datas){
                try {
                    Json dataJson = new Json(data);
                    if ("1".equals(dataJson.jsonPath($_type + ".type").get())){
                        String aweme_id = dataJson.jsonPath($_type + ".aweme_info.aweme_id").get();
                        String create_time = dataJson.jsonPath($_type + ".aweme_info.create_time").get();
                        CrawlerData crawlerData = CrawlerData.builder()
                                .data(crawlerRecord, page)
                                .dataId(StringUtils.joinWith("-", crawlerRecord.getDomain(), CrawlerEnum.CrawlerDataType.article.enumVal(), aweme_id))
                                .url("https://www.iesdouyin.com/share/video/"+aweme_id)
                                .releaseTime(Long.valueOf(create_time ) * 1000L)
                                .content(data)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                                .flowInPipelineTag("article_result")
                                .build();
                        crawlerData.setFilterPipelineResult(true);
                        crawlerDataList.add(crawlerData);

                        // comment record -- list
                        try {
                            Integer commentCount = Integer.valueOf(dataJson.jsonPath($_type + ".aweme_info.statistics.comment_count").get());
                            if(commentCount > 0){
                                String sec_uid = dataJson.jsonPath($_type + ".aweme_info.author.sec_uid").get();
                                CrawlerData crawlerArticleIdListData = CrawlerData.builder()
                                        .data(crawlerRecord, page)
                                        .dataId(StringUtils.joinWith("-",crawlerRecord.getDomain(), article.enumVal(),"ids", aweme_id))
                                        .releaseTime(Long.valueOf(create_time ) * 1000L)
                                        .addContentKV("aweme_id",aweme_id)
                                        .addContentKV("aweme_author",sec_uid)
                                        .addContentKV("comment_record_filter_info",commentRecordFilterInfo)
                                        .url("https://www.iesdouyin.com/share/video/"+aweme_id)
                                        .resultLabelTag(article)
                                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.result)
                                        .requestLabelTag(CrawlerEnum.CrawlerRequestType.filter)
                                        .flowInPipelineTag("article_ids")
                                        .build();
                                crawlerArticleIdListData.setFilterPipelineResult(true);
                                crawlerDataList.add(crawlerArticleIdListData);
                            }
                        } catch (Exception e) {
                            logger.error("dy search video comment generator error: {}",e.getMessage());
                        }
                    }
                } catch (Exception e) {
                    logger.error("dy search video content data normal,error: {}",e.getMessage());
                }
            }
        }
        return crawlerDataList;
    }

    private boolean nextPageFilter(CrawlerRequestRecord crawlerRequestRecord, HttpPage page){

        //视频详情过滤方式目前只能是时间过滤
        if (crawlerRequestRecord.getFilter() != CrawlerEnum.CrawlerRecordFilter.dateRange){
            logger.error("Filtering types other than time filtering are not supported!");
            return true;
        }
        if (crawlerRequestRecord.getFilterInfos() == null || crawlerRequestRecord.getFilterInfos().size() < 1 ){
            logger.error("Filtering information must be configured!");
            return true;
        }
        FilterInfo dateRangeFilterInfo = null;
        for (FilterInfo filterInfo : crawlerRequestRecord.getFilterInfos()) {
            if (filterInfo.getFilter() == CrawlerEnum.CrawlerRecordFilter.dateRange){
                int hourFromNow = filterInfo.getHourFromNow();
                long endTime = filterInfo.getEndTime();
                if (hourFromNow > 0){
                    long start = endTime - 1000L*60*60*hourFromNow;
                    long[] dateRange = new long[2];
                    dateRange[0] = start;
                    dateRange[1] = System.currentTimeMillis();
                    filterInfo.setDateAllowRange(dateRange);
                }
                dateRangeFilterInfo = filterInfo;
            }
        }

        if (dateRangeFilterInfo == null){
            logger.error("Time filtering information must be configured!");
            return true;
        }

        List<CrawlerData> crawlerData = washSearchVideo(crawlerRequestRecord, page);
        if (crawlerData == null || crawlerData.size() <1){
            logger.warn("dy search wash video data is null!");
            return true;
        }

        Json searchJson = new Json(page.getRawText());
        List<String> datas = searchJson.jsonPath($_type + ".data").all();
        if(datas != null){
            for(String data : datas){
                try {
                    Json dataJson = new Json(data);
                    if ("1".equals(dataJson.jsonPath($_type + ".type").get())){
                        String create_time = dataJson.jsonPath($_type + ".aweme_info.create_time").get();
                        long releaseTime = Long.valueOf(create_time ) * 1000L;
                        if (releaseTime >= dateRangeFilterInfo.getDateAllowRange()[0]
                                && releaseTime <= dateRangeFilterInfo.getDateAllowRange()[1]){ //只要有一个时间满足条件就不过滤
                            return false;
                        }
                    }
                } catch (Exception e) {

                }
            }
        }
        return true;
    }

    @Override
    public String domain() {
        return domain;
    }

    @Override
    public void initUrlRegulars() {
        addUrlRegular(searchUrlRegular); //start url--只是为了进入脚本没有实际意义

    }

    @Override
    public boolean crawlerCheck(CrawlerRequestRecord crawlerRecord) {

        String recordSite = crawlerRecord.tagsCreator().bizTags().site();
        if (site.equals(recordSite)){
            return true;
        }
        return false;
    }

    @Override
    public void afterExecute(CrawlerRecordContext context) {

    }

    @Override
    public List<CrawlerRecord> prepareRequest(CrawlerRequestRecord requestRecord, List<CrawlerRequestRecord> supportSourceRecords) {
//        try {
//            Thread.sleep(1326);
//        } catch (InterruptedException e) {
//            e.printStackTrace();
//        }
        requestRecord.setNeedWashPage(true);
        List<CrawlerRecord> allItemRecords = null;

        if (supportSourceRecords != null && supportSourceRecords.size() >1){
            allItemRecords = new ArrayList<>();
            CrawlerRequestRecord kwSupportSourceRecord = null;
            for (CrawlerRequestRecord supportSourceRecord : supportSourceRecords) {
                if (supportSourceRecord.getHttpRequest().getUrl().contains("/v1/meta/douyin/keys")){
                    kwSupportSourceRecord = supportSourceRecord;
                }
                if (supportSourceRecord.getHttpRequest().getUrl().contains("/crawler/oauth/api/v1/douyin/userOauthInfos")){
                    availableUsers.clear();
                    devicesQueue.clear();
                    initAuthorInfos(supportSourceRecord); //初始 认证信息
                }
            }
            if (kwSupportSourceRecord != null){
                //任务标识：daily-日常任务，supplement-补采任务 等等。。。
                String jobFlag = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("job_flag");
                LinkedBlockingQueue<CrawlerRequestRecord> requestRecords = recordMap.get(jobFlag);
                if (requestRecords == null){
                    requestRecords = new LinkedBlockingQueue<>();
                    recordMap.put(jobFlag, requestRecords);
                }
                requestRecords.clear();
                initAllCrawlerRecordByKeyword(requestRecord,kwSupportSourceRecord,requestRecords);// cookies userAgents 初始完毕后，才能初始record
            }

//            int index =0;
            for (String oauthInfo : availableUsers) {//分发关键词
//                index++;
//                if(index > 1){
//                    break;
//                }
                Map oauthInfoMap = new Json(oauthInfo).toObject(Map.class);
                String userCookie = oauthInfoMap.get("cookie").toString();
                String account = oauthInfoMap.get("account").toString();

                CrawlerRequestRecord record = fillRecordRequest(requestRecord,account, userCookie);
                if (record != null){
                    record.tagsCreator().bizTags().addCustomKV("dy_user",account);
                    allItemRecords.add(record);
                }
            }
        }

        //设置签名
        Map<String, String> signatureMap = dySignature( requestRecord.getHttpRequest().getUrl(), Maps.newHashMap());
        requestRecord.getHttpRequest().getHeaders().putAll(signatureMap);
        String useragent = String.format(useragentFormat, RandomStringUtils.random(8,"0123456789abcdf"),RandomStringUtils.random(8,"0123456789abcdf"));
        requestRecord.getHttpRequest().getHeaders().put("User-Agent",useragent);
        downloadSleep();
        return allItemRecords;
    }
    private void downloadSleep(){
        Random rand = new Random();
        long sleepTime = (long)((rand.nextFloat()+1.5)*2000L);
        logger.info("xhs download sleep time {}",sleepTime);
        try {
            Thread.sleep(sleepTime);
        } catch (InterruptedException e) {
            logger.error(e.getMessage());
        }
    }

    private Map<String,String> dySignature(String dy_url,Map<String,String> dataMap){
        String signatureUrl = hookApi + "/frida/dy/signature";

        HttpRequest httpRequest = new HttpRequest();
        httpRequest.setUrl(signatureUrl);
        Map<String,Object> dyDataMap = new HashMap<>();
        dyDataMap.put("dy_url",dy_url);
        dyDataMap.put("map_data_json",JSON.toJSONString(dataMap));
        httpRequest.setRequestBody(HttpRequestBody.form(dyDataMap,"utf-8"));
        httpRequest.setMethod("POST");
        String rawText = null;
        HttpPage download = null;
        try {
            download = httpDownload.download(httpRequest, dySignatureConfig);
            rawText = download.getRawText();
            if(rawText.contains("DOCTYPE HTML PUBLIC")){
                throw new Exception("DOCTYPE HTML PUBLIC");
            }
        } catch (Exception e) {
            try {
                while (download == null || !download.isDownloadSuccess() || StringUtils.isBlank(rawText)){
                    Thread.sleep(60000);
                    download = httpDownload.download(httpRequest, dySignatureConfig);
                    rawText = download.getRawText();
                }
            } catch (Exception ex) {
                ex.printStackTrace();
            }
        }
        return JSON.parseObject(rawText,Map.class);
    }

    @Override
    public void beforeDownload(CrawlerRecordContext context) {
        if (httpDownload == null){
            synchronized (httpDownloadObj) {
                if (httpDownload == null){
                    httpDownload = context.getPageDownloader();
                }
            }
        }
        super.beforeDownload(context);
    }

    private void addCrawlerRecords(List<CrawlerRequestRecord> crawlerRequestRecords,CrawlerRequestRecord crawlerRecord){

//        int count = 1;
//        CrawlerBusinessTags crawlerBusinessTags = crawlerRecord.tagsCreator().bizTags();
//        if (crawlerBusinessTags.hasKVTag("download_retry_count")){
//            count = (int) crawlerBusinessTags.getCategoryTag().getKVTag("download_retry_count").getVal();
//            if (count >= 200){
//                logger.error("dy search download he number of retries exceeds the limit" +
//                        ",request url {},download detail {}",crawlerRecord.getHttpRequest().getUrl());
//                return;
//            }
//        }

//        count++;
//        crawlerBusinessTags.addCustomKV("download_retry_count",count);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .turnPageRequest(crawlerRecord)
                .httpRequest(crawlerRecord.getHttpRequest())
                .releaseTime(System.currentTimeMillis())
                .copyBizTags()
                .notFilterRecord()
                .needWashed(true)
                .build();
        String searchUrl = searchUrlFormat();
        crawlerRequestRecord.getHttpRequest().setUrl(searchUrl);
        crawlerRequestRecords.add(crawlerRequestRecord);

    }

    /**
     * @param requestRecord
     * @param supportSourceRecord
     */
    private void initAllCrawlerRecordByKeyword(CrawlerRequestRecord requestRecord,
                                               CrawlerRequestRecord supportSourceRecord,
                                               LinkedBlockingQueue<CrawlerRequestRecord> requestRecords){
        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            MetaResponse metaResponse = JSON.parseObject(httpPage.getRawText(), MetaResponse.class);

            if (metaResponse.getStatus() == 0 && metaResponse.getContent() != null){
                List<String> contents = (List<String>) metaResponse.getContent();
                for (String content : contents) {

                    CrawlerDomainKeys crawlerDomainKeys = JSON.parseObject(content, CrawlerDomainKeys.class);
                    String keyword = crawlerDomainKeys.getKeyword();
                    String sortType = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("sort_type");
                    if (StringUtils.isBlank(sortType)){ // 2：最新发布，0：综合排序，1：最多点赞
                        sortType = "2"; //最新发布
                    }
                    requestRecord.tagsCreator().bizTags().addCustomKV("sort_type",sortType);
                    String urlParam = String.format(searchPostParam, URLEncoder.encode(keyword,"utf-8"), 0, "",sortType);
                    String searchUrl = searchUrlFormat();
                    CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                            .turnPageRequest(requestRecord)
                            .httpUrl(searchUrl)
                            .httpHeads(requestRecord.getHttpRequest().getHeaders())
                            .releaseTime(System.currentTimeMillis())
                            .copyBizTags()
                            .needWashed(true)
                            .notFilterRecord()
                            .recordKey(searchUrl+keyword) //防止冲突
                            .build();
                    HttpRequest httpRequest = crawlerRequestRecord.getHttpRequest();
                    httpRequest.setMethod("POST");
                    httpRequest.setRequestBody(HttpRequestBody.custom(urlParam.getBytes(),"application/x-www-form-urlencoded", "UTF-8"));

                    crawlerRequestRecord.getHttpConfig().setHttpSite("search_new");
                    crawlerRequestRecord.getHttpConfig().setDisableCookie(true);
                    crawlerRequestRecord.tagsCreator().bizTags().addKeywords(keyword);
                    requestRecords.add(crawlerRequestRecord);
                }
            }
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private void initAuthorInfos(CrawlerRequestRecord supportSourceRecord){
        List<String> devices = new ArrayList<>(); //可用设备列表
        try {
            HttpPage httpPage = supportSourceRecord.getInternalDownloadPage();
            Json rawText = new Json(httpPage.getRawText());
            String status = rawText.jsonPath($_type + ".status").get();
            List<String> contents = rawText.jsonPath($_type + ".content").all();

            if ("0".equals(status) && contents != null && contents.size() > 0){

                List<Map> userOauthInfos = new Json(contents.get(0)).toList(Map.class);
                for (Map userOauthInfo : userOauthInfos) {
                    String oauthInfo = String.valueOf(userOauthInfo.get("oauthInfo"));
                    Json oauthInfoJson = new Json(oauthInfo);
                    String category = oauthInfoJson.jsonPath($_type + ".category").get();
                    if ("search".equals(category)){
                        availableUsers.add(oauthInfo);
                    }
                    if ("device_search".equals(category)){
                        devices.add(oauthInfo);
                    }
                }
                logger.info("search user count {}",availableUsers.size());
            }
            if (devices.size() > 0){
                Collections.reverse(devices);
                devicesQueue.addAll(devices);
            }
            logger.info("search devices count {}",devicesQueue.size());
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
    }

    private CrawlerRequestRecord fillRecordRequest(CrawlerRequestRecord requestRecord){
        String userCookie = requestRecord.getHttpRequest().getHeaders().get("Cookie");
//        String XTtToken = requestRecord.getHttpRequest().getHeaders().get("X-Tt-Token");
        String user = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("dy_user");
        CrawlerRequestRecord crawlerRequestRecord = fillRecordRequest(requestRecord,user,userCookie);
        if (crawlerRequestRecord != null){
            crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("dy_user",user);
            crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("download_retry_count",0);
        }
        return crawlerRequestRecord;
    }

    private CrawlerRequestRecord fillRecordRequest(CrawlerRequestRecord requestRecord,String account,String userCookie ){
        //任务标识：daily-日常任务，supplement-补采任务 等等。。。
        String jobFlag = requestRecord.tagsCreator().bizTags().getCategoryTag().getKVTagStrVal("job_flag");
        LinkedBlockingQueue<CrawlerRequestRecord> crawlerRequestRecords = recordMap.get(jobFlag);
        CrawlerRequestRecord record = null;
        try {
//            long ts = System.currentTimeMillis()/1000;
            record = crawlerRequestRecords.poll(3, TimeUnit.SECONDS);
            if (record != null){
                Map<String, String> headers = record.getHttpRequest().getHeaders();
                String urlParam = new String(record.getHttpRequest().getRequestBody().getBody());
                String stub = md5(urlParam).toUpperCase();
//                String xgorgon = xGorgon(ts, strToByte(getXGon(urlParam, stub, userCookie)));
                if (headers != null ){
                    headers.put("Cookie",userCookie);
////                    headers.put("X-Tt-Token",XTtToken);
//                    headers.put("X-Gorgon",xgorgon);
//                    headers.put("X-Khronos", ""+ts);
                    headers.put("X-SS-STUB", stub);
                }
                logger.info("dy user {} search kw {}",account,record.tagsCreator().bizTags().keywordsList().get(0));
            }
        } catch (Exception e) {
            logger.error(e.getMessage(),e);
        }
        return record;
    }

    private String searchUrlFormat(){
        String device = pollOneDevice();
        Map map = JSON.parseObject(device, Map.class);
        devicesQueue.add(device);
        return String.format(searchUrlFormat,map.get("iid"), map.get("openudid"), map.get("device_id"), map.get("cdid"), map.get("oaid"));
    }

    private String pollOneDevice(){
        String device = "";
        try {

            device = devicesQueue.poll(3, TimeUnit.SECONDS);
        } catch (InterruptedException e) {
            logger.error(e.getMessage());
        }
        return device;
    }
}
