package com.chance.cc.crawler.development.command.node.autohome;

import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.development.command.job.domain.autohome.*;
import com.chance.cc.crawler.development.command.node.CrawlerNodeCommand;
import org.junit.Test;

/**
 * @author lt
 * @version 1.0
 * @date 2020-12-11 18:33:48
 * @email okprog@sina.com
 */
public class AutoHomeCrawlerNodeOperator {
    private static final String domain = "autohome";
    private static final CrawlerNodeCommand crawlerNodeCommand = new CrawlerNodeCommand("192.168.1.215",9505);

    public static void main(String[] args) {
        HttpPage httpPage = null;
        //kill汽车之家最新回复CrawlerJob
//        httpPage = crawlerNodeCommand.killCrawler(AutoHomeWebForumReplyCrawlerScheduleJob.autoHomeScheduler(domain));

        //kill汽车之家新闻CrawlerJob
//        httpPage = crawlerNodeCommand.killCrawler(AutoHomeWebArticleCrawlerScheduleJob.autoHomeScheduler(domain));

        //kill汽车之家新闻追溯三天回复回复CrawlerJob
//        httpPage = crawlerNodeCommand.killCrawler(AutoHomeWebArticleTraceCrawlerScheduleJob.autoHomeScheduler(domain));

        //kill汽车之家最新发布CrawlerJob
        httpPage = crawlerNodeCommand.killCrawler(AutoHomeWebForumReplyCrawlerScheduleJob.autoHomeScheduler(domain));
        System.out.println("Kill Job ：" + httpPage.getRawText());
    }

    @Test
    public void nodeStatus(){
        HttpPage httpPage = crawlerNodeCommand.nodeStatus();
        System.out.println(httpPage);
    }
}
