package com.crawler.execute;

import com.crawler.api.CrawlerDownloader;
import com.crawler.api.CrawlerParser;
import com.crawler.api.Crawlerscheduler;
import com.crawler.config.CrawlerConfig;
import com.crawler.model.CrawlerPageInfo;
import com.crawler.model.CrawlerRequestInfo;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import java.util.concurrent.TimeUnit;


@Service
@Slf4j
public class DefaultCrawlerExecutor {

    @Autowired
    private CrawlerConfig crawlerConfig;

    @Autowired
    @Qualifier("jsoupCrawlerDownloader")
    private CrawlerDownloader jsoupCrawlerDownloader;
    @Autowired
    private CrawlerParser crawlerParser;
    @Autowired
    private Crawlerscheduler crawlerscheduler;

    @Async
    public void execute() throws InterruptedException {

        while (true){
            log.info("**************爬虫开启********************");
            CrawlerRequestInfo crawlerRequestInfo = crawlerscheduler.poll();
            if(crawlerRequestInfo!=null){
//                log.info("**************爬虫开启crawlerRequestInfo{}",crawlerRequestInfo);
                // 下载页面
                CrawlerPageInfo download = jsoupCrawlerDownloader.download(crawlerRequestInfo);
                // 解析页面
                crawlerParser.parse(download);
                // 持久化
                log.info("**************爬虫结束********************");
                TimeUnit.MILLISECONDS.sleep(1000);
            }else{
                TimeUnit.MILLISECONDS.sleep(1000);
            }

        }

    }


}
