package com.xu.crawler.core;

import com.xu.crawler.dao.entity.FailRecordDO;
import com.xu.crawler.dao.entity.TargetUrlsDO;
import com.xu.crawler.listener.DownloadFailListener;
import com.xu.crawler.pipeline.MongoPipeline;
import com.xu.crawler.processor.ScmpPageProcessor;
import com.xu.crawler.service.FailRecordService;
import com.xu.crawler.service.TargetUrlsService;
import com.xu.crawler.service.impl.TargetUrlsServiceImpl;
import org.assertj.core.util.Lists;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.enums.SourceEnum;

import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

/**
 * 定时任务
 * <p>
 * Created by xuguangwu486
 */
@Component
public class ScheduleTasks {

    @Resource
    private FailRecordService failRecordService;
    @Resource
    private TargetUrlsService targetUrlsService;

    private List<TargetUrlsDO> targetUrls;

    @PostConstruct
    public void init() {
//        targetUrls = targetUrlsService.queryTargetUrls();
    }

//    @Scheduled(cron = "0 43 12 * * ?")
//    @Scheduled(cron = "0 */1 * * * ?")
    private void scmpData() {
        Spider spider = Spider.create(new ScmpPageProcessor());
        spider.addPipeline(new MongoPipeline());
        spider.setSpiderListeners(Lists.newArrayList(new DownloadFailListener()));
        targetUrls.stream().filter(tempTargetUrl -> tempTargetUrl.getSourceId().equals(SourceEnum.SCMP.getSourceId()))
                .collect(Collectors.toSet()).forEach(targetUrl -> spider.addUrl(targetUrl.getUrl()).thread(10).run());
//        spider.addUrl("https://www.scmp.com/sitemap_news.xml").thread(10).run();
//        spider.addUrl("https://www.scmp.com/sitemap_business.xml").thread(10).run();
    }

}
