package com.chance.cc.crawler.development.command.node.douban;

import com.chance.cc.crawler.core.downloader.HttpPage;
import com.chance.cc.crawler.development.command.job.domain.douban.searchKw.DoubanSearchKwSimpleCrawlerSchedulerJob;
import com.chance.cc.crawler.development.command.node.CrawlerNodeCommand;
import org.junit.Test;

public class DouBanCrawlerNodeOperator {
    private static final String domain = "weibo";
    private static final CrawlerNodeCommand crawlerNodeCommand = new CrawlerNodeCommand("192.168.1.215",9505);

    public static void main(String[] args) {
        HttpPage httpPage = null;

        //kill微博关键词
//        httpPage = crawlerNodeCommand.killCrawler(WeiboSearchKwCrawlerScheduleJob.weiboCrawlerSchdule(domain));
//        System.out.println("Kill Job ：" + httpPage.getRawText());


        crawlerNodeCommand.killCrawler(DoubanSearchKwSimpleCrawlerSchedulerJob.crawlerSchedulejob());
        System.out.println("Kill Job : " + httpPage.getRawText());
    }

    @Test
    public void nodeStatus(){
        HttpPage httpPage = crawlerNodeCommand.nodeStatus();
        System.out.println(httpPage);
    }
}
