package com.chance.cc.crawler.prod.command.job.domain.vm.dcdapp.realtime;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpConfig;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterInfo;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.queue.crawler.CrawlerQueueConfig;
import com.chance.cc.crawler.core.record.CrawlerRecord;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.meta.core.bean.job.CrawlerScheduleJob;
import com.chance.cc.crawler.prod.command.job.domain.vm.dcdapp.DCDAppCommonCrawlerSchedulerJob;
import org.apache.commons.lang3.StringUtils;

import java.util.ArrayList;
import java.util.List;

import static com.chance.cc.crawler.core.CrawlerEnum.CrawlerRequestType.*;
import static com.chance.cc.crawler.development.scripts.allfeild.AICCommonField.Tag_Site_Info;

/**
 * 懂车帝评书板块每小时文章累积
 */
public class DcdappWebStoryTellingRealtimeCrawlerSchedulerJob extends DCDAppCommonCrawlerSchedulerJob {
    private static final String DOMAIN = "dcdapp";
    private static final String crawler_level = "realtime";
    private static final String metaSite = "series";

    public static void main(String[] args) {
        publishCrawlerScheduleJobInfo();
    }

    public static CrawlerJob publishCrawlerScheduleJobInfo(){
        CrawlerJob crawlerJob = crawlerSchedulejob();

        //发布定时采集作业
        CrawlerScheduleJob crawlerScheduleJob = new CrawlerScheduleJob();
        crawlerScheduleJob.setDomain(domain);
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(crawlerJob));
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setNote("懂车帝评书板块每小时累计文章链接");
        crawlerScheduleJob.setCrawlerKey(crawlerJob.generateCrawlerKey());
        HttpPage page = metaServiceCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("发布crawler作业：" + page.getRawText());
        return crawlerJob;

    }

    public static CrawlerJob crawlerSchedulejob(){
        String url = "https://www.dongchedi.com/";
        String scriptSite = "dynamic";
        String site = "storyTelling";
        String site_biz = StringUtils.joinWith("-",site,crawler_level);
//        String site_biz = site + crawler_level;

        Long currentTime = System.currentTimeMillis();
        Long startTime = currentTime - Long.parseLong(String.valueOf(1.2 * 60 * 60 * 1000).split("\\.")[0]);

        FilterInfo turnFilter = new FilterInfo();
        turnFilter.setCurCount(0);
        turnFilter.setCountTotalNum(10);

        CrawlerRequestRecord crawlerRequestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(DOMAIN, CrawlerEnum.CrawlerRequestType.turnPage)
                .domain(DOMAIN)
                .httpUrl(url)
                .httpConfig(HttpConfig.me(DOMAIN))
                .filter(CrawlerEnum.CrawlerRecordFilter.keyOrDateRange)
//                .addFilterInfo(FilterUtils.dateRangeFilterInfo(0,new long[]{startTime,currentTime}))
                .addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-","filter",domain,site,site_biz,"queue")))
                .addFilterInfo(FilterUtils.dateRangeFilterInfo(24 * 7,null))
                .releaseTime(System.currentTimeMillis())
                .turnPageFilterInfo(turnFilter)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.article)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.interaction)
                .resultLabelTag(CrawlerEnum.CrawlerDataType.comment)
                .build();

        crawlerRequestRecord.tagsCreator().bizTags().addDomain(DOMAIN);
        crawlerRequestRecord.tagsCreator().bizTags().addSite(scriptSite);
        crawlerRequestRecord.tagsCreator().bizTags().addSiteBiz(site_biz);
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV(Tag_Site_Info,site);
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("domainName","dongchedi");

        //创建 评论信息的过滤条件
        CrawlerRecord filterCrawlerRecord =  new CrawlerRecord();
        filterCrawlerRecord.setFilter(CrawlerEnum.CrawlerRecordFilter.dateRange);
//        filterCrawlerRecord.addFilterInfo(FilterUtils.memoryFilterKeyInfo(DOMAIN));
        filterCrawlerRecord.addFilterInfo(FilterUtils.dateRangeFilterInfo(0,new long[]{startTime,currentTime}));

        //添加车友圈的参数
        storytelling(crawlerRequestRecord);

        CrawlerRequestRecord keywordRecord = CrawlerRequestRecord.builder()
                .startPageRequest("dcdapp_series_keyword",turnPageItem)
                .httpUrl("http://"+metaServiceIp+":"+metaServicePort+"/v1/meta/"+domain+"/keys?site="+metaSite)
                .requestLabelTag(supportSource)
                .requestLabelTag(internalDownload)
                .build();

        //每条帖子的结果
        String resultPostQueueName = "dcdapp_storyTelling_item_url";
        CrawlerQueueConfig crawlerPostQueueConfig = new CrawlerQueueConfig(
                resultPostQueueName,
                CrawlerQueueConfig.Content.result,
                CrawlerQueueConfig.Storage.redis,
                CrawlerQueueConfig.Structure.list);
        //每一条帖子结果同步
        CrawlerRequestRecord syncCrawlerPostRecord = CrawlerRequestRecord.builder()
                .startPageRequest("sina_post_result", turnPageItem)
                .httpUrl("http://"+metaServiceIp+":"+metaServicePort+"/crawler/domain/common/api/v1/"+domain+"/search/results/sync?resultQueue=" + resultPostQueueName + "&site="+resultPostQueueName)
                .requestLabelTag(supportCallback)
                .requestLabelTag(internalDownload)
                .build();

        return CrawlerJob.builder()
                .crawlerJobThreadNumber(10)
                .triggerInfo(
                        domain,
                        CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                        System.currentTimeMillis(),
                        StringUtils.joinWith("-",site,site_biz,CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime))
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(StringUtils.joinWith("-","crawler",domain,site,site_biz,"queue")))
                .queueResultPipeline("redis",crawlerPostQueueConfig)
                .fileResultPipeline("redis", "/data/chance_crawler_runner/logs/node/dcdapp/dcdapp_storyTelling_redis.log", false)
                .fileResultPipeline("kafka", "/data/chance_crawler_runner/logs/node/dcdapp/dcdapp_storyTelling_kafka.log", false)
                .kafkaResultPipeline("kafka",kafkaTopicForRealTimeJob,null)
                .requestRecord(crawlerRequestRecord)
                .supportRecord(keywordRecord)
                .supportRecord(syncCrawlerPostRecord)
                .build();
    }

    /**
     * 评书板块需要的参数
     * @param crawlerRequestRecord
     */
    private static void storytelling(CrawlerRequestRecord crawlerRequestRecord){
        List<String> moduleTag = new ArrayList<>();//不同板块
        moduleTag.add("dakareyi");//评书板块
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("moduleTag",moduleTag);
        List<String> moduleSort = new ArrayList<>();
        moduleSort.add("dakareyi");//评书排序方式
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("moduleSort",moduleSort);
        List<String> min_behot_time = new ArrayList<>();//不同板块
        min_behot_time.add("1627374400");//评书板块
        crawlerRequestRecord.tagsCreator().bizTags().addCustomKV("min_behot_time",min_behot_time);
    }

}
