package com.example.magic;


import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Request;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.pipeline.ConsolePipeline;
import us.codecraft.webmagic.pipeline.JsonFilePipeline;
import us.codecraft.webmagic.processor.PageProcessor;
import us.codecraft.webmagic.scheduler.QueueScheduler;

import java.io.IOException;
//爬取数据逻辑

@Controller
public class App implements PageProcessor {



    // 对爬取站点的一些属性进行设置，例如：设置域名，设置代理等；
    private Site site = Site.me().setDomain("111.206.227.156")
            .setSleepTime(2000);

    @Override
    public Site getSite() {
        return site;
    }

    public void process(Page page) {
        page.putField("ten", page.getJson().toString());
    }

//    这里开始爬取数据
    @RequestMapping("spader")
    public String and() throws IOException, ClassNotFoundException, InterruptedException {
//        这里两个变量先别删除
        String url_init = "https://club.jd.com/comment/productPageComments.action?callback=fetchJSON_comment98&productId=10040682942358&score=0&sortType=5&page=0&pageSize=10&isShadowSku=0&rid=0&fold=1";
        String url_pattern = "https://club.jd.com/comment/productPageComments.action?callback=fetchJSON_comment98&productId=10040682942358&score=0&sortType=5&pageSize=10&isShadowSku=0&rid=0&fold=1&page=";
        String output = "E://123//";

        QueueScheduler scheduler = new QueueScheduler();

        Spider spider = Spider.create(new App()).addUrl(url_init)
                .setScheduler(scheduler)
                .addPipeline(new JsonFilePipeline(output))
                .addPipeline(new ConsolePipeline());

        for (int i = 1; i <= 1; i++) {
            Request request = new Request();
            //request.setUrl(url_pattern + i);
            request.setUrl(url_init);
            scheduler.push(request, spider);
        }
        spider.thread(5).run();

        System.out.println("数据抓取完成");
//        这里对爬取的数据进行清洗
        QingXiJson qingXiJson = new QingXiJson();
        qingXiJson.and();


        return "magic";
    }


}
