package com.chance.cc.crawler.prod.command.trigger.domain.news.xchuxing;

import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;
import com.chance.cc.crawler.prod.command.job.domain.news.xchuxing.geely.XChuXingSearchKwTraceGeelyCrawlerSchedulerJob;
import com.chance.cc.crawler.prod.command.job.domain.news.xici.searchKw.XiCiTraceTraceCrawlerSchedulerJob;
import com.chance.cc.crawler.prod.command.trigger.CommonCrawlerSchedulerTrigger;

/**
 * @Author: ZhaoHhuan on 2021/11/11 15:12
 * @Email: 18638575967@163.com
 * @Description:
 *
**/
public class XChuXingCommonCrawlerScheduleTrigger extends CommonCrawlerSchedulerTrigger {
    public static void main(String[] args) {
        publishXChuXingSearchKwTraceGeelyCrawlerScheduleTriggerInfo();
    }


    public static void publishXChuXingSearchKwTraceGeelyCrawlerScheduleTriggerInfo(){
        CrawlerJob crawlerJob = XChuXingSearchKwTraceGeelyCrawlerSchedulerJob.crawlerSchedulejob();

        metaServiceCommand_17.addOrUpdateScheduleCrawlerTrigger(
                crawlerJob.generateCrawlerKey(),
                crawlerJob.generateCrawlerTriggerKey(),
                XChuXingSearchKwTraceGeelyCrawlerSchedulerJob.domain,
                CrawlerMetaConstant.ScheduleJobTrigger_Cron,
//                CrawlerMetaConstant.ScheduleJobTrigger_Simple,
                "0 0 17 * * ?",
//                "",
                "新出行社区关键词回溯7天采集任务执行，每天下午5点执行"
        );
    }
}
