package com.chance.cc.crawler.development.command.trigger.domain.du;

import com.chance.cc.crawler.core.CrawlerJob;
import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.development.command.job.domain.du.DUCommonCrawlerScheduleJob;
import com.chance.cc.crawler.development.command.trigger.CrawlerJobTriggerCommand;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;

import static com.chance.cc.crawler.development.command.publish.douyin.DYPublishCrawlerJob.metaServerIP;
import static com.chance.cc.crawler.development.command.publish.douyin.DYPublishCrawlerJob.metaServerPort;

/**
 * @author bx
 * @date 2020/12/7 0007 11:15
 */
public class DUCrawlerScheduleTrigger {


    private static CrawlerJobTriggerCommand crawlerJobTriggerCommand =
            new CrawlerJobTriggerCommand(metaServerIP,metaServerPort);

    public static void main(String[] args) {
        publishDYSearchScheduleTriggerInfo();
    }

    public static void publishDYSearchScheduleTriggerInfo(){
        CrawlerJob crawlerJob = DUCommonCrawlerScheduleJob.crawlerSchduler();
        HttpPage page = crawlerJobTriggerCommand.addOrUpdateScheduleCrawlerTrigger(
                crawlerJob.generateCrawlerKey(),
                crawlerJob.generateCrawlerTriggerKey(),
                DUCommonCrawlerScheduleJob.domainId,
                CrawlerMetaConstant.ScheduleJobTrigger_Simple,
                "",
                "毒app采集任务执行");
        System.out.println("添加采集调度： "+page.getRawText());
    }
}
