import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import pipeline.KafkalPipeline;
import pipeline.MysqlPipeline;
import processor.QichachaPageProcessor;
import proxy.IpPoolProxyProvider;
import scheduler.ReCrawlRedisPriorityScheduler;
import us.codecraft.webmagic.Request;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.Task;
import downloader.RandomHeaderDownloader;
import us.codecraft.webmagic.scheduler.RedisPriorityScheduler;
import util.Config;
import util.ReadFile;

import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class Run {
    public static void main(String args[])  {

        //企查查爬虫实例
       Task task = new Task() {
            @Override
            public String getUUID() {
                return "www.qichacha.com";
            }

            @Override
            public Site getSite() {
                return null;
            }
        };
        RedisPriorityScheduler scheduler = new RedisPriorityScheduler(Config.getCrawler_REDIS());
        scheduler.resetDuplicateCheck(task);

        Spider s = Spider.create(new QichachaPageProcessor())
                .setScheduler(scheduler);
        RandomHeaderDownloader randomHeaderDownloader = new RandomHeaderDownloader();
        randomHeaderDownloader.setProxyProvider(new IpPoolProxyProvider());
        s.setDownloader(randomHeaderDownloader);
        s.addPipeline(new KafkalPipeline());
        s.setExitWhenComplete(false);
        s.thread(Integer.valueOf(Config.getCrawler_THREAD_NUMS())).run();



    }
}
