package com.xu.crawler.api;

import com.xu.crawler.dao.entity.TargetUrlsDO;
import com.xu.crawler.enums.DataStatusEnum;
import com.xu.crawler.enums.URLMapperEnum;
import com.xu.crawler.listener.DownloadFailListener;
import com.xu.crawler.pipeline.MongoPipeline;
import com.xu.crawler.processor.ScmpPageProcessor;
import com.xu.crawler.service.TargetUrlsService;
import org.assertj.core.util.Lists;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import us.codecraft.webmagic.Spider;
import us.codecraft.webmagic.enums.SourceEnum;
import us.codecraft.webmagic.pipeline.FilePipeline;

import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;

@RestController
public class NewsController {

    @Resource
    private TargetUrlsService targetUrlsService;

    private List<TargetUrlsDO> targetUrls = Lists.newArrayList();

    @PostConstruct
    public void init() {
//        targetUrls = targetUrlsService.queryTargetUrls();
        List<String> scmpUrls = Lists.newArrayList(
                "https://www.scmp.com/sitemap_announcements.xml",
                "https://www.scmp.com/sitemap_infographics.xml",
                "https://www.scmp.com/sitemap_business.xml",
                "https://www.scmp.com/sitemap_comment.xml",
                "https://www.scmp.com/sitemap_tech.xml",
                "https://www.scmp.com/sitemap_culture.xml");
        scmpUrls.forEach(tempUrl -> {
            TargetUrlsDO targetUrlsDO = new TargetUrlsDO();
            targetUrlsDO.setDataStatus(DataStatusEnum.YES.getDataStatus());
            targetUrlsDO.setCreateTime(new Date().getTime());
            targetUrlsDO.setSourceId(SourceEnum.SCMP.getSourceId());
            targetUrlsDO.setHost(URLMapperEnum.SOUTH_CHINA_HOST.toString());
            targetUrlsDO.setUrl(tempUrl);
            targetUrls.add(targetUrlsDO);
        });
    }

    @RequestMapping(value = "/crawler/pullNews", method = RequestMethod.GET)
    public String pullNews() {
        Spider spider = Spider.create(new ScmpPageProcessor());
//        spider.addPipeline(new MongoPipeline());
        spider.addPipeline(new FilePipeline());
        spider.setSpiderListeners(Lists.newArrayList(new DownloadFailListener()));
        targetUrls.stream().filter(tempTargetUrl -> tempTargetUrl.getSourceId().equals(SourceEnum.SCMP.getSourceId()))
                .collect(Collectors.toSet()).forEach(targetUrl -> spider.addUrl(targetUrl.getUrl()).thread(10).run());

        return "SUCCESS";
    }

}
