package com.xm.task;

import com.xm.pojo.Bilibili;
import org.jsoup.Jsoup;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.downloader.HttpClientDownloader;
import us.codecraft.webmagic.processor.PageProcessor;
import us.codecraft.webmagic.proxy.Proxy;
import us.codecraft.webmagic.proxy.SimpleProxyProvider;
import us.codecraft.webmagic.scheduler.QueueScheduler;
import us.codecraft.webmagic.scheduler.component.HashSetDuplicateRemover;

@Component
public class JobCrawler implements PageProcessor {
    private String url="https://search.bilibili.com/upuser?keyword=%E6%8B%89%E8%8D%89%E8%8E%93%E7%9A%84%E8%A5%BF%E7%93%9CJUN";
    private Site site =Site.me()
            .setTimeOut(10*1000)
            .setRetrySleepTime(3000)
            .setRetryTimes(3);
    @Override
    public void process(Page page) throws InterruptedException {
        System.out.println("开始爬虫-----");
//        page.putField("info:",page.getHtml().css("div#user-list ul div.up-face a").$("a","title").all());
//        page.putField("href:",page.getHtml().css("div#user-list ul div.up-face a").links().all());
//        page.putField("fances",page.getHtml().css("div#user-list ul li div.up-info.clearfix span").regex(".*粉丝.*").all());
        Bilibili bilibili = new Bilibili();
        bilibili.setUpName(page.getHtml().css("div#user-list ul div.up-face a").$("a","title").get());
        bilibili.setUrl(page.getHtml().css("div#user-list ul div.up-face a").links().get());
        bilibili.setFances(Jsoup.parse(page.getHtml().css("div#user-list ul li div.up-info.clearfix span").regex(".*粉丝.*").toString()).text());
        page.putField("bilibili",bilibili);
    }

    @Override
    public Site getSite() {
        return site;
    }

    @Autowired
    private SpringDataPipeline springDataPipeline;

    @Scheduled(initialDelay = 100,fixedDelay = 10*1000)
    public void process(){
        System.out.println("开始定时任务-----");
        //创建代理服务器
        HttpClientDownloader httpClientDownloader = new HttpClientDownloader();
        httpClientDownloader.setProxyProvider(SimpleProxyProvider.from(new Proxy("183.166.70.223",9999)));

        Spider.create(new JobCrawler())
                .addUrl(url)
                .thread(5)
                .setDownloader(httpClientDownloader)
                .addPipeline(this.springDataPipeline)
                .setScheduler(new QueueScheduler().setDuplicateRemover(new HashSetDuplicateRemover()))
                .run();
    }
}
