package com.chance.cc.crawler.development.command.trigger.domain.dxy;

import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.development.command.trigger.CrawlerJobTriggerCommand;
import com.chance.cc.crawler.meta.core.bean.CrawlerMetaConstant;

/**
 * @author lt
 * @version 1.0
 * @date 2021-02-05 18:12:42
 * @email okprog@sina.com
 */
public class DXYSchedulerTriggerInfo {
    private static final String domain = "dxy";

    private static final CrawlerJobTriggerCommand crawlerJobTriggerCommand =
            new CrawlerJobTriggerCommand("192.168.1.215",9599);



    /**
     * 发布丁香园咨询trigger info
     * @param crawlerKey
     * @param crawlerTriggerKey
     */
    public static void publishDXYNewsCrawlerScheduleTriggerInfo(String crawlerKey,String crawlerTriggerKey){
        HttpPage httpPage = crawlerJobTriggerCommand.addOrUpdateScheduleCrawlerTrigger(
                crawlerKey,
                crawlerTriggerKey,
                domain,
                CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                "0 0 0/2 * * ?",
                "丁香园咨询实时采集任务执行，每两小时执行一次"
        );
        System.out.println("添加采集调度信息：" + httpPage.getRawText());
        System.out.println("triggerKey: " + crawlerTriggerKey);
    }
    /**
     * 发布丁香园咨询回溯trigger info
     * @param crawlerKey
     * @param crawlerTriggerKey
     */
    public static void publishDXYNewsTraceCrawlerScheduleTriggerInfo(String crawlerKey,String crawlerTriggerKey){
        HttpPage httpPage = crawlerJobTriggerCommand.addOrUpdateScheduleCrawlerTrigger(
                crawlerKey,
                crawlerTriggerKey,
                domain,
                CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                "0 0 2 * * ?",
                "丁香园咨询回溯7天采集任务执行，每天执行一次"
        );
        System.out.println("添加采集调度信息：" + httpPage.getRawText());
        System.out.println("triggerKey: " + crawlerTriggerKey);
    }
    /**
     * 发布丁香园论坛实时trigger info
     * @param crawlerKey
     * @param crawlerTriggerKey
     */
    public static void publishDXYForumCrawlerScheduleTriggerInfo(String crawlerKey,String crawlerTriggerKey){
        HttpPage httpPage = crawlerJobTriggerCommand.addOrUpdateScheduleCrawlerTrigger(
                crawlerKey,
                crawlerTriggerKey,
                domain,
                CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                "0 0 0/2 * * ?",
                "丁香园论坛实时采集任务执行，每两小时执行一次"
        );
        System.out.println("添加采集调度信息：" + httpPage.getRawText());
        System.out.println("triggerKey: " + crawlerTriggerKey);
    }

    /**
     * 发布丁香园论坛回溯trigger info
     * @param crawlerKey
     * @param crawlerTriggerKey
     */
    public static void publishDXYForumTraceCrawlerScheduleTriggerInfo(String crawlerKey,String crawlerTriggerKey){
        HttpPage httpPage = crawlerJobTriggerCommand.addOrUpdateScheduleCrawlerTrigger(
                crawlerKey,
                crawlerTriggerKey,
                domain,
                CrawlerMetaConstant.ScheduleJobTrigger_Cron,
                "0 0 2 * * ?",
                "丁香园论坛回溯7天采集任务执行，每天执行一次"
        );
        System.out.println("添加采集调度信息：" + httpPage.getRawText());
        System.out.println("triggerKey: " + crawlerTriggerKey);
    }

    /**
     * 发布丁香园测试发布trigger info
     * @param crawlerKey
     * @param crawlerTriggerKey
     */
    public static void publishDXYTestCrawlerScheduleTriggerInfo(String crawlerKey,String crawlerTriggerKey){
        HttpPage httpPage = crawlerJobTriggerCommand.addOrUpdateScheduleCrawlerTrigger(
                crawlerKey,
                crawlerTriggerKey,
                domain,
                CrawlerMetaConstant.ScheduleJobTrigger_Simple,
                "",
                "丁香园测试"
        );

        System.out.println("添加采集调度信息：" + httpPage.getRawText());
        System.out.println("triggerKey: " + crawlerTriggerKey);
    }
}
