package com.wisnews.service;

import com.wisnews.pojo.sec.vo.NewsFor163Pipeline;
import com.wisnews.util.spider.WebMagic;
import com.wisnews.util.spider.WebMagic2;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import us.codecraft.webmagic.Spider;

import java.util.Set;

/**
 * @Description：创建爬虫
 * @Author: yuyd
 * @Email: yyd8358@foxmail.com
 * @Date: 2020/12/26 14:17
 * @Version 1.0
 */
@Service
@Slf4j
public class CrawlDataAutoSchedService{

    @Autowired
    private NewsFor163Pipeline newsFor163Pipeline;

    @Autowired
    private WebMagic webMagic;

    @Autowired
    @Qualifier(value = "webMagic2")
    private WebMagic2 webMagic2;

    @Autowired
    private RedisTemplate redisTemplate;

    private Spider spider;

    @Async
    public void crawlData(){

        //删除user::开头的reids缓存
        Set keys = redisTemplate.keys("news::*");
        redisTemplate.delete(keys);
        //老版本，爬虫，2021后爬取数据较少
        spider =  Spider.create(webMagic);
        spider.addUrl("http://news.163.com/special/0001386F/rank_whole.html")
                .addPipeline(newsFor163Pipeline)
                .thread(2)
                .runAsync(); //异步启动，不阻塞当前运行的线程。 用run会阻塞当前运行的线程
        //新版爬虫
        spider =  Spider.create(webMagic2);
        spider.addUrl("http://news.163.com/special/0001386F/rank_whole.html")
                .addPipeline(newsFor163Pipeline)
                .thread(5)
                .runAsync();
        //程序结束后清空指定页面缓存
    }

    public void stopcrawlData(){
        spider.stop();
    }

}
