package tk.mybatis.springboot.Timer;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.virjar.dungproxy.client.webmagic.DungProxyDownloader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import tk.mybatis.springboot.service.SpringerService;
import tk.mybatis.springboot.spider.entity.LiteratureEntity;
import tk.mybatis.springboot.spider.pipeline.ErrorPipeline;
import tk.mybatis.springboot.spider.pipeline.LiteraturePipeline;
import tk.mybatis.springboot.spider.processor.LiteraturePageProcessor;
import tk.mybatis.springboot.spider.utils.HttpClientUtils;
import us.codecraft.webmagic.Spider;

import java.util.List;


@Component
public class Job {

    @Autowired
    private LiteraturePipeline literaturePipeline;
    @Autowired
    private ErrorPipeline errorPipeline;

    @Autowired
    private SpringerService springerService;


    @Scheduled(cron = "0 42 10 9 * ?")
    public void twoOClockPerDay() {
        System.out.println("===定时任务启动了");
        System.out.println("========Science【启动】喽！=========");
//        for (int i = 1; i < Math.ceil((float) 3937 / 50); i++) {
        for (int i = 6; i <= 8; i++) {
            HttpClientUtils httpClientUtils = new HttpClientUtils("http://www.sciencedirect.com/science/browsescroll/journals/all/begidx/" + 50 * i + "/rwpos/0");
            String json = httpClientUtils.getData();
            JSONArray jsonArray = JSON.parseArray(json);
            for (int j = 0; j < jsonArray.size(); j++) {
                JSONObject jsonObject = (JSONObject) jsonArray.get(j);
                String id = jsonObject.getString("I");
                System.out.println("======id======" + id);

                Spider.create(new LiteraturePageProcessor())
                        .addPipeline(literaturePipeline)
//                        .setDownloader(new DungProxyDownloader())
                        .addUrl("http://www.sciencedirect.com/science/journal/" + id)
                        .thread(20)
                        .run();

            }
        }
        System.out.println("========Science信息小爬虫【结束】喽！ 总共 " + LiteraturePageProcessor.num + " 条信息=========");
    }


    //    @Scheduled(cron = "0 30 15 * * ?")
    public void cycleErrorLink() {
        System.out.println("===开始重新爬取错误链接======");
        List<LiteratureEntity> list = springerService.getErrorLink();
        for (int i = 0; i < list.size(); i++) {
            springerService.delete(list.get(i));
            Spider.create(new LiteraturePageProcessor())
                    .addPipeline(errorPipeline)
                    .setDownloader(new DungProxyDownloader())
                    .addUrl(list.get(i).getCompany())
                    .thread(20)
                    .runAsync();
        }

        System.out.println("========爬取错误链接【结束】喽！ 总共 " + LiteraturePageProcessor.num + " 条信息=========");
    }


}