package com.chance.cc.crawler.prod.command.job.domain.news.ifeng.realtime;

import com.alibaba.fastjson.JSON;
import com.chance.cc.crawler.core.CrawlerEnum;
import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.core.filter.FilterUtils;
import com.chance.cc.crawler.core.record.CrawlerRequestRecord;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.meta.core.bean.job.CrawlerScheduleJob;
import com.chance.cc.crawler.prod.command.job.domain.news.NewsCommonScript;
import org.apache.commons.lang3.StringUtils;

/**
 * @ClassName ifeng
 * @Description TODO
 * @Author songding
 * @Date 2021/10/22 10:54
 * @Version 1.0
 **/
public class IFengCarRealtimeCrawlerScheduleJob extends NewsCommonScript {

    public static final String domain = "ifeng";

    private static final String crawler_level = "realtime";

    private static final String siteBiz = "realtime";

    private static final String site = "car";

    public static void main(String[] args) {
        publishCrawlerScheduleJobInfo();
    }

    public static CrawlerJob publishCrawlerScheduleJobInfo() {

        CrawlerJob crawlerJob = crawlerScheduler();

        //发布定时任务
        CrawlerScheduleJob crawlerScheduleJob = new CrawlerScheduleJob();
        crawlerScheduleJob.setJobType(CrawlerMetaConstant.ScheduleCrawlerJobType.crawler.enumVal());
        crawlerScheduleJob.setDomain(domain);
        crawlerScheduleJob.setCrawlerJob(JSON.toJSONString(crawlerJob));
        crawlerScheduleJob.setNote("凤凰新闻汽车板块 汽车车系id等信息每月实时采集任务");
        crawlerScheduleJob.setCrawlerKey(crawlerJob.generateCrawlerKey());
        HttpPage httpPage = metaServiceCommand.addOrUpdateCrawlerScheduleJob(crawlerScheduleJob);
        System.out.println("发布任务：" + httpPage.getRawText());

        return crawlerJob;

    }

    public static CrawlerJob crawlerScheduler() {

        //文章采集
        CrawlerRequestRecord requestRecord = doSearchNewsCrawler();

        CrawlerJob crawlerJob = CrawlerJob.builder()
                .triggerInfo(domain,
                        CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                        System.currentTimeMillis(),//ifeng-car-realtime-realtime   domain会自己添加
                        StringUtils.joinWith("-", site, crawler_level, CrawlerMetaConstant.ScheduleJobTriggerJob_Realtime))
                .crawlerRequestQueue(CrawlerMetaConstant.redisRequestQueue(StringUtils.joinWith("-", "crawler", domain, site, siteBiz, crawler_level, "queue")))
                //.fileResultPipeline("kafka","/data/chance_crawler_runner/logs/node/ifeng.log",false)
                .kafkaResultPipeline(null, kafkaTopic, null)
                .crawlerJobThreadNumber(10)//线程数
                .requestRecord(requestRecord)
                 //.supportRecord(keyRecord) 关键字
                .build();
        return crawlerJob;

    }

    /**
     * 初始record
     *
     * @return(√)
     */
    public static CrawlerRequestRecord doSearchNewsCrawler() {

        CrawlerRequestRecord requestRecord = CrawlerRequestRecord.builder()
                .startPageRequest(domain, CrawlerEnum.CrawlerRequestType.turnPage) //turnpage 翻页
                .domain(domain)
                .httpUrl("https://ncar.auto.ifeng.com/model/home")
                .recordKey("https://ncar.auto.ifeng.com/model/home")
                .releaseTime(System.currentTimeMillis())
                .filter(CrawlerEnum.CrawlerRecordFilter.key)
                .addFilterInfo(FilterUtils.redisFilterKeyInfo(StringUtils.joinWith("-","filter",domain,site,siteBiz,"queue")))//根据redis中的值去重（不是本地）-
                .build();

        requestRecord.tagsCreator().bizTags().addDomain(domain);

        requestRecord.tagsCreator().bizTags().addSite(site);

        requestRecord.tagsCreator().bizTags().addSiteBiz(siteBiz);

        return requestRecord;
    }

}
