package cn.sunxyz.spider;

import cn.sunxyz.spider.downloader.Downloader;
import cn.sunxyz.spider.downloader.JSoupDownloader;
import cn.sunxyz.spider.parser.Parser;
import cn.sunxyz.spider.parser.impl.UrlParser;
import cn.sunxyz.spider.pipeline.ConsolePipeLine;
import cn.sunxyz.spider.pipeline.Pipeline;
import cn.sunxyz.spider.queue.UrlQueueManager;
import cn.sunxyz.spider.queue.support.DefaultUrlQueueManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Objects;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Stream;

/**
 * Created by yangrd on 2018/6/14
 **/
public class Spider implements DispatcherSpider {

    private static final Logger LOGGER = LoggerFactory.getLogger(Spider.class);

    public static Downloader downloader;

    public static Parser<Set<String>> linkParser;

    public  static UrlQueueManager urlQueueManager;

    public static Pipeline<Object> pipeline;

    public static String extractPattern;

    public static int extractDepth; //深度判断 暂时未使用

    static {
        downloader = new JSoupDownloader();
        linkParser = new UrlParser();
        urlQueueManager = new DefaultUrlQueueManager();
        pipeline = new ConsolePipeLine<>();

        /**
         *  定向爬虫
         */
        extractDepth = 3;
        extractPattern = "https://.*";
    }

    private Stream<String> urlStream;

    public Spider init(String seed) {
        urlQueueManager.push(seed);
        return this;
    }

    public Spider init(Stream<String> urlStream) {
        this.urlStream = urlStream;
        return this;
    }

    @Override
    public void start() {
        if (urlStreamNotNull()) {
            run(urlStream);
        } else {
            int size = urlQueueManager.size();
            for (; size > 0; ) {
                LOGGER.debug("run urlQueue size :=> {}", size);
                run(urlStream(size));
                size = urlQueueManager.size();
            }
        }
    }

    private void run(Stream<String> urlStream) {
        urlStream.peek(LOGGER::debug).map(downloader::download).filter(Page::isSuccess).forEach(page -> {
            if (urlStreamNotNull()) {
                linkParser.parser(page).stream().filter(this::yes).forEach(urlQueueManager::push);
            }
            pipeline.process(page.getDocument().title(), page);
        });
    }

    private Stream<String> urlStream(int size) {
        return Stream.generate(urlQueueManager::pop).limit(size).parallel();
    }

    private boolean yes(String url) {
        return isUrlMatch(url) && Objects.nonNull(url); //&& hasDeepHeight(content);
    }

    private boolean isUrlMatch(String url) {
        return Pattern.matches(extractPattern, url);
    }

    /**
     * 深度判断 暂时未使用
     * @param url
     * @return
     */
    private boolean hasDeepHeight(String url) {
        if (url.lastIndexOf("/") == url.length() - 1) {
            return url.split("/").length <= extractDepth + 4;
        } else {
            return url.split("/").length <= extractDepth + 3;
        }
    }

    private boolean urlStreamNotNull() {
        return urlStream != null;
    }
}
