package com.hui.platform.falseworkapi.service.business.webmagic.controller;

import com.hui.platform.falseworkapi.common.web.annotation.ApiResult;
import com.hui.platform.falseworkapi.service.business.webmagic.pipeline.BaiduNewsPipeline;
import com.hui.platform.falseworkapi.service.business.webmagic.processor.BaiDuPageProcessor;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.pipeline.FilePipeline;

/**
 * WebMagic
 *
 * @author hui
 * @since 2020-04-19
 */
@Api(tags = "WebMagic爬虫-相关接口")
@ApiResult
@Validated
@RestController
@RequestMapping("/webMagic")
public class WebMagicController {

    @Autowired
    private BaiduNewsPipeline baiduNewsPipeline;

    @Async
    @GetMapping("/getBaiduNews")
    @ApiOperation("抓取百度新闻")
    public Boolean getBaiduNews(@ApiParam(value = "本地保存地址(不传则默认保存在E:\\data\\webmagic\\baiduNews 下)") @RequestParam(required = false) String localAddr) {
        if (localAddr != null && !"".equals(localAddr)){
            Spider.create(new BaiDuPageProcessor())
                    //从此URL开始抓  百度新闻
                    .addUrl("http://news.baidu.com/")
                    //保存结果处理
                    .addPipeline(new FilePipeline(localAddr))
                    .thread(3)
                    //启动爬虫
                    .run();
        }else {
            Spider.create(new BaiDuPageProcessor())
                    //从此URL开始抓  百度新闻
                    .addUrl("http://news.baidu.com/")
                    //保存结果处理
                    .addPipeline(new FilePipeline("E:\\data\\webmagic\\baiduNews\\" + System.currentTimeMillis()))
                    .addPipeline(baiduNewsPipeline)
                    .thread(3)
                    //启动爬虫
                    .run();
        }
        return true;
    }
}
