package com.chance.cc.crawler.development.command.trigger.domain.jingdong;

import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.development.command.job.domain.jingdong.JDH5CommentCrawlerScheduleJob;
import com.chance.cc.crawler.development.command.trigger.CrawlerJobTriggerCommand;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;

/**
 * @author bx
 * @date 2020/12/7 0007 11:15
 */
public class JDH5CommentScheduleTrigger {

    private static final String domainId = "jingdong";

    private static CrawlerJobTriggerCommand crawlerJobTriggerCommand =
            new CrawlerJobTriggerCommand("192.168.1.215",9599);

    public static void main(String[] args) {
        CrawlerJob crawlerJob = JDH5CommentCrawlerScheduleJob.jdCrawlerJob(JDH5CommentCrawlerScheduleJob.domainId);
        publishTBCommentScheduleTriggerInfo(crawlerJob.generateCrawlerKey(),crawlerJob.generateCrawlerTriggerKey());
    }

    public static void publishTBCommentScheduleTriggerInfo(String crawlerKey,String crawlerTriggerKey){
        HttpPage page = crawlerJobTriggerCommand.addOrUpdateScheduleCrawlerTrigger(
                crawlerKey,
                crawlerTriggerKey,
                domainId,
                CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                "0 10 1 * * ?",
                "京东评论每天凌晨1:10采集一次");
        System.out.println("添加采集调度： "+page.getRawText());
    }
}
