package com.leo.csdnspider.tasks;

import com.leo.csdnspider.dao.UserDao;
import com.leo.csdnspider.entity.User;
import com.leo.csdnspider.pipeline.*;
import com.leo.csdnspider.processor.*;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Repository;
import us.codecraft.webmagic.Spider;

import java.util.List;

/**
 * @ClassName: MainScheduled
 * @Author: Leo
 * @Description: 主要的定时任务
 * @Date: 9/3/2019 4:11 PM
 */
@Component
@Repository
public class MainScheduled {
    @Autowired
    private BaseInfoPipeline baseInfoPipeline;

    @Autowired
    private ArchiveInfoPipeline archiveInfoPipeline;

    @Autowired
    private ClassInfoPipeline classInfoPipeline;

    @Autowired
    PopularInfoPipeline popularInfoPipeline;

    @Autowired
    LatestCommentPipeline latestCommentPipeline;

    @Autowired
    private UserDao userDao;

    private String baseUrl = "https://blog.csdn.net/";

    private Logger logger = Logger.getLogger(MainScheduled.class);


    /**
     * 开始所有爬虫
     */
    public void startAll() {

        String[] urls = getAllURL();

        logger.info("开始执行 BaseInfo 爬虫---");
        Spider spider = Spider.create(new BaseInfoProcessor());
        spider.addUrl(urls);
        spider.addPipeline(baseInfoPipeline);
        spider.thread(1);
        spider.setExitWhenComplete(true);
        spider.start();
        spider.stop();


        logger.info("开始执行 ArchiveInfo 爬虫---");
        Spider spider1 = Spider.create(new ArchiveInfoProcessor());
        spider1.addUrl(urls);
        spider1.addPipeline(archiveInfoPipeline);
        spider1.thread(1);
        spider1.setExitWhenComplete(true);
        spider1.start();
        spider1.stop();


        logger.info("开始执行 ClassInfo 爬虫---");
        Spider spider2 = Spider.create(new ClassInfoProcessor());
        spider2.addUrl(urls);
        spider2.addPipeline(classInfoPipeline);
        spider2.thread(1);
        spider2.setExitWhenComplete(true);
        spider2.start();
        spider2.stop();


        logger.info("开始执行 PopularInfo 爬虫---");
        Spider spider3 = Spider.create(new PopularInfoProcessor());
        spider3.addUrl(getAllURL());
        spider3.addPipeline(popularInfoPipeline);
        spider3.thread(1);
        spider3.setExitWhenComplete(true);
        spider3.start();
        spider3.stop();


        logger.info("开始执行 LatestComment 爬虫---");
        Spider spider4 = Spider.create(new LatestCommentProcessor());
        spider4.addUrl(urls);
        spider4.addPipeline(latestCommentPipeline);
        spider4.thread(1);
        spider4.setExitWhenComplete(true);
        spider4.start();
        spider4.stop();


    }

    /**
     * 获取所有要爬取的URl
     *
     * @return 拼接好的URL
     */
    private String[] getAllURL() {
        List<User> users = userDao.findAll();
        String[] urls = new String[users.size()];
        for (int i = 0; i < users.size(); i++) {
            urls[i] = baseUrl + users.get(i).getUsername();
            logger.info("获取URl --->" + urls[i]);
        }
        return urls;
    }

    /**
     * 基础信息定时器
     */
    @Scheduled(cron = "0 0/10 * * * ?")
    public void baseInfoScheduled() {
        logger.info("开始执行 BaseInfo 爬虫---");
        Spider spider = Spider.create(new BaseInfoProcessor());
        spider.addUrl(getAllURL());
        spider.addPipeline(baseInfoPipeline);
        spider.thread(5);
        spider.setExitWhenComplete(true);
        spider.start();
        spider.stop();
    }

    /**
     * 归档信息定时器
     */
    @Scheduled(cron = "0 0/10 * * * ?")
    public void archiveInfoScheduled() {
        logger.info("开始执行 ArchiveInfo 爬虫---");
        Spider spider = Spider.create(new ArchiveInfoProcessor());
        spider.addUrl(getAllURL());
        spider.addPipeline(archiveInfoPipeline);
        spider.thread(5);
        spider.setExitWhenComplete(true);
        spider.start();
        spider.stop();
    }

    /**
     * 分类信息定时器
     */
    @Scheduled(cron = "0 0/10 * * * ?")
    public void classInfoScheduled() {
        logger.info("开始执行 ClassInfo 爬虫---");
        Spider spider = Spider.create(new ClassInfoProcessor());
        spider.addUrl(getAllURL());
        spider.addPipeline(classInfoPipeline);
        spider.thread(5);
        spider.setExitWhenComplete(true);
        spider.start();
        spider.stop();
    }

    /**
     * 热门文章定时器
     */
    @Scheduled(cron = "0 0/10 * * * ?")
    public void popularInfoScheduled() {
        logger.info("开始执行 PopularInfo 爬虫---");
        Spider spider = Spider.create(new PopularInfoProcessor());
        spider.addUrl(getAllURL());
        spider.addPipeline(popularInfoPipeline);
        spider.thread(5);
        spider.setExitWhenComplete(true);
        spider.start();
        spider.stop();
    }

    /**
     * 最新评论定时器
     */
    @Scheduled(cron = "0 0/10 * * * ?")
    public void latestCommentScheduled() {
        logger.info("开始执行 LatestComment 爬虫---");
        Spider spider = Spider.create(new LatestCommentProcessor());
        spider.addUrl(getAllURL());
        spider.addPipeline(latestCommentPipeline);
        spider.thread(5);
        spider.setExitWhenComplete(true);
        spider.start();
        spider.stop();
    }


}
